def _update(self) -> None: """Fill the catalogue by loading the translated texts from file (again).""" if not self.__application: self.__require_update = True return if not self.__name: self.__name = self.__application.getApplicationName() if self.__language == "default": self.__language = self.__application.getApplicationLanguage() # Ask gettext for all the translations in the .mo files. for path in Resources.getAllPathsForType(Resources.i18n): if gettext.find(cast(str, self.__name), path, languages=[self.__language]): try: self.__translation = gettext.translation( cast(str, self.__name), path, languages=[self.__language]) except OSError: Logger.warning( "Corrupt or inaccessible translation file: {fname}". format(fname=self.__name)) self.__require_update = False
def _onUploadSlotCompleted(self, reply: QNetworkReply, error: Optional["QNetworkReply.NetworkError"] = None) -> None: if HttpRequestManager.safeHttpStatus(reply) >= 300: replyText = HttpRequestManager.readText(reply) Logger.warning("Could not request backup upload: %s", replyText) self.backup_upload_error_message = self.DEFAULT_UPLOAD_ERROR_MESSAGE if HttpRequestManager.safeHttpStatus(reply) == 400: errors = json.loads(replyText)["errors"] if "moreThanMaximum" in [error["code"] for error in errors if error["meta"] and error["meta"]["field_name"] == "backup_size"]: self.backup_upload_error_message = catalog.i18nc("@error:file_size", "The backup exceeds the maximum file size.") self._job_done.set() return if error is not None: Logger.warning("Could not request backup upload: %s", HttpRequestManager.qt_network_error_name(error)) self.backup_upload_error_message = self.DEFAULT_UPLOAD_ERROR_MESSAGE self._job_done.set() return backup_upload_url = HttpRequestManager.readJSON(reply)["data"]["upload_url"] # Upload the backup to storage. HttpRequestManager.getInstance().put( backup_upload_url, data=self._backup_zip, callback=self._uploadFinishedCallback, error_callback=self._uploadFinishedCallback )
def connect(self): self._firmware_name = None # after each connection ensure that the firmware name is removed if self._baud_rate is None: if self._use_auto_detect: auto_detect_job = AutoDetectBaudJob(self._serial_port) auto_detect_job.start() auto_detect_job.finished.connect(self._autoDetectFinished) return if self._serial is None: try: self._serial = Serial(str(self._serial_port), self._baud_rate, timeout=self._timeout, writeTimeout=self._timeout) except SerialException: Logger.warning( "An exception occurred while trying to create serial connection." ) return except OSError as e: Logger.warning( "The serial device is suddenly unavailable while trying to create a serial connection: {err}" .format(err=str(e))) return CuraApplication.getInstance().globalContainerStackChanged.connect( self._onGlobalContainerStackChanged) self._onGlobalContainerStackChanged() self.setConnectionState(ConnectionState.Connected) self._update_thread.start()
def read(self, file_name: str) -> Union[SceneNode, List[SceneNode]]: """Read mesh data from file and returns a node that contains the data Note that in some cases you can get an entire scene of nodes in this way (eg; 3MF) :return: node :type{SceneNode} or :type{list(SceneNode)} The SceneNode or SceneNodes read from file. """ file_name = resolveAnySymlink(file_name) result = self._read(file_name) UM.Application.Application.getInstance().getController().getScene( ).addWatchedFile(file_name) # The mesh reader may set a MIME type itself if it knows a more specific MIME type than just going by extension. # If not, automatically generate one from our MIME type database, going by the file extension. if not isinstance(result, list): meshes = [result] else: meshes = result for mesh in meshes: if mesh.source_mime_type is None: try: mesh.source_mime_type = MimeTypeDatabase.getMimeTypeForFile( file_name) except MimeTypeNotFoundError: Logger.warning( f"Loaded file {file_name} has no associated MIME type." ) # Leave MIME type at None then. return result
def check_user_profile(user_profile: Optional["UserProfile"]) -> None: if user_profile: # If the profile was found, we call it back immediately. callback(user_profile) return # The JWT was expired or invalid and we should request a new one. if self._auth_data is None or self._auth_data.refresh_token is None: Logger.warning("There was no refresh token in the auth data.") callback(None) return def process_auth_data(auth_data: AuthenticationResponse) -> None: if auth_data.access_token is None: Logger.warning( "Unable to use the refresh token to get a new access token." ) callback(None) return # Ensure it gets stored as otherwise we only have it in memory. The stored refresh token has been # deleted from the server already. Do not store the auth_data if we could not get new auth_data (e.g. # due to a network error), since this would cause an infinite loop trying to get new auth-data. if auth_data.success: self._storeAuthData(auth_data) self._auth_helpers.checkToken(auth_data.access_token, callback, lambda: callback(None)) self._auth_helpers.getAccessTokenUsingRefreshToken( self._auth_data.refresh_token, process_auth_data)
def _onUploadSlotCompleted( self, reply: QNetworkReply, error: Optional["QNetworkReply.NetworkError"] = None) -> None: if error is not None: Logger.warning(str(error)) self.backup_upload_error_message = self.DEFAULT_UPLOAD_ERROR_MESSAGE self._job_done.set() return if reply.attribute(QNetworkRequest.HttpStatusCodeAttribute) >= 300: Logger.warning("Could not request backup upload: %s", HttpRequestManager.readText(reply)) self.backup_upload_error_message = self.DEFAULT_UPLOAD_ERROR_MESSAGE self._job_done.set() return backup_upload_url = HttpRequestManager.readJSON( reply)["data"]["upload_url"] # Upload the backup to storage. HttpRequestManager.getInstance().put( backup_upload_url, data=self._backup_zip, callback=self._uploadFinishedCallback, error_callback=self._uploadFinishedCallback)
def addPluginLocation(self, location: str) -> None: if not os.path.isdir(location): Logger.warning( "Plugin location {0} must be a folder.".format(location)) return self._plugin_locations.append(location)
def _findInstalledPlugins(self, paths=None) -> List[str]: plugin_ids = [] if not paths: paths = self._plugin_locations for folder in paths: try: if not os.path.isdir(folder): continue for file in os.listdir(folder): filepath = os.path.join(folder, file) if os.path.isdir(filepath): if os.path.isfile(os.path.join(filepath, "__init__.py")): plugin_ids.append(file) else: plugin_ids += self._findInstalledPlugins( [filepath]) except EnvironmentError as err: Logger.warning("Unable to read folder {folder}: {err}".format( folder=folder, err=err)) continue return plugin_ids
def process_auth_data(response: AuthenticationResponse) -> None: if response.success: self._storeAuthData(response) self.onAuthStateChanged.emit(logged_in=True) else: Logger.warning( "Failed to get a new access token from the server.") self.onAuthStateChanged.emit(logged_in=False)
def _updateMetadataInDatabase(self, metadata: metadata_type) -> None: container_type = metadata["type"] if container_type in self._database_handlers: try: self._database_handlers[container_type].update(metadata) except db.DatabaseError as e: Logger.warning(f"Removing corrupt database and recreating database. {e}") self._recreateCorruptDataBase(self._database_handlers[container_type].cursor)
def requestHook(self, request: QNetworkRequest): super().requestHook(request) token = self._account.accessToken if not self._account.isLoggedIn or token is None: Logger.warning("Cannot add authorization to Cloud Api request") return header_dict = {"Authorization": "Bearer {}".format(token)} self.addHeaders(request, header_dict)
def loadScripts(self, path: str) -> None: if ApplicationMetadata.IsEnterpriseVersion: # Delete all __pycache__ not in installation folder, as it may present a security risk. # It prevents this very strange scenario (should already be prevented on enterprise because signed-fault): # - Copy an existing script from the postprocessing-script folder to the appdata scripts folder. # - Also copy the entire __pycache__ folder from the first to the last location. # - Leave the __pycache__ as is, but write malicious code just before the class begins. # - It'll execute, despite that the script has not been signed. # It's not known if these reproduction steps are minimal, but it does at least happen in this case. install_prefix = os.path.abspath(CuraApplication.getInstance().getInstallPrefix()) try: is_in_installation_path = os.path.commonpath([install_prefix, path]).startswith(install_prefix) except ValueError: is_in_installation_path = False if not is_in_installation_path: TrustBasics.removeCached(path) ## Load all scripts in the scripts folders scripts = pkgutil.iter_modules(path = [path]) for loader, script_name, ispkg in scripts: # Iterate over all scripts. if script_name not in sys.modules: try: file_path = os.path.join(path, script_name + ".py") if not self._isScriptAllowed(file_path): Logger.warning("Skipped loading post-processing script {}: not trusted".format(file_path)) continue spec = importlib.util.spec_from_file_location(__name__ + "." + script_name, file_path) loaded_script = importlib.util.module_from_spec(spec) if spec.loader is None: continue spec.loader.exec_module(loaded_script) # type: ignore sys.modules[script_name] = loaded_script #TODO: This could be a security risk. Overwrite any module with a user-provided name? loaded_class = getattr(loaded_script, script_name) temp_object = loaded_class() Logger.log("d", "Begin loading of script: %s", script_name) try: setting_data = temp_object.getSettingData() if "name" in setting_data and "key" in setting_data: self._script_labels[setting_data["key"]] = setting_data["name"] self._loaded_scripts[setting_data["key"]] = loaded_class else: Logger.log("w", "Script %s.py has no name or key", script_name) self._script_labels[script_name] = script_name self._loaded_scripts[script_name] = loaded_class except AttributeError: Logger.log("e", "Script %s.py is not a recognised script type. Ensure it inherits Script", script_name) except NotImplementedError: Logger.log("e", "Script %s.py has no implemented settings", script_name) except Exception as e: Logger.logException("e", "Exception occurred while loading post processing plugin: {error_msg}".format(error_msg = str(e)))
def parse() -> None: self._anti_gc_callbacks.remove(parse) status_code, response = parser(reply) if parser else ApiHelper.parseReplyAsJson(reply) if not status_code or status_code >= 400 or response is None: Logger.warning("API returned with status {} and body {}".format(status_code, response)) if on_failed and isinstance(response, dict): error_response = ApiError(response) on_failed(error_response) else: on_finished(response) reply.deleteLater()
def _install(self, package_id: str, update: bool = False) -> None: package_path = self._to_install.pop(package_id) to_be_installed = self._package_manager.installPackage( package_path) is not None if not to_be_installed: Logger.warning(f"Could not install {package_id}") return package = self.getPackageModel(package_id) if package: self.subscribeUserToPackage(package_id, str(package.sdk_version)) else: Logger.log("w", f"Unable to get data on package {package_id}")
def loadScripts(self, path: str) -> None: ## Load all scripts in the scripts folders scripts = pkgutil.iter_modules(path=[path]) for loader, script_name, ispkg in scripts: # Iterate over all scripts. if script_name not in sys.modules: try: file_path = os.path.join(path, script_name + ".py") if not self._isScriptAllowed(file_path): Logger.warning( "Skipped loading post-processing script {}: not trusted" .format(file_path)) continue spec = importlib.util.spec_from_file_location( __name__ + "." + script_name, file_path) loaded_script = importlib.util.module_from_spec(spec) if spec.loader is None: continue spec.loader.exec_module(loaded_script) # type: ignore sys.modules[ script_name] = loaded_script #TODO: This could be a security risk. Overwrite any module with a user-provided name? loaded_class = getattr(loaded_script, script_name) temp_object = loaded_class() Logger.log("d", "Begin loading of script: %s", script_name) try: setting_data = temp_object.getSettingData() if "name" in setting_data and "key" in setting_data: self._script_labels[ setting_data["key"]] = setting_data["name"] self._loaded_scripts[ setting_data["key"]] = loaded_class else: Logger.log("w", "Script %s.py has no name or key", script_name) self._script_labels[script_name] = script_name self._loaded_scripts[script_name] = loaded_class except AttributeError: Logger.log( "e", "Script %s.py is not a recognised script type. Ensure it inherits Script", script_name) except NotImplementedError: Logger.log("e", "Script %s.py has no implemented settings", script_name) except Exception as e: Logger.logException( "e", "Exception occurred while loading post processing plugin: {error_msg}" .format(error_msg=str(e)))
def process_auth_data(auth_data: AuthenticationResponse) -> None: if auth_data.access_token is None: Logger.warning( "Unable to use the refresh token to get a new access token." ) callback(None) return # Ensure it gets stored as otherwise we only have it in memory. The stored refresh token has been # deleted from the server already. Do not store the auth_data if we could not get new auth_data (e.g. # due to a network error), since this would cause an infinite loop trying to get new auth-data. if auth_data.success: self._storeAuthData(auth_data) self._auth_helpers.checkToken(auth_data.access_token, callback, lambda: callback(None))
def getProjectFilesFailed(self, reply: QNetworkReply, error: QNetworkReply.NetworkError) -> None: """ Error function, called whenever the retrieval of the files in a library project fails. """ try: Logger.warning( f"Failed to retrieve the list of files in project '{self._project_model._projects[self._selected_project_idx]}' from the Digital Library" ) except IndexError: Logger.warning( f"Failed to retrieve the list of files in a project from the Digital Library. And failed to get the project too." ) self.setRetrievingFilesStatus(RetrievalStatus.Failed)
def callbackWrap(response: Optional[Any] = None, *args, **kwargs) -> None: if (response is not None and isinstance( response, DigitalFactoryFeatureBudgetResponse) and response.library_max_private_projects is not None): callback(response.library_max_private_projects == -1 or # Note: -1 is unlimited response.library_max_private_projects > 0) self._library_max_private_projects = response.library_max_private_projects else: Logger.warning( f"Digital Factory: Response is not a feature budget, likely an error: {str(response)}" ) callback(False)
def _sync(self) -> None: """Signals all sync services to start syncing This can be considered a forced sync: even when a sync is currently running, a sync will be requested. """ if self._update_timer.isActive(): self._update_timer.stop() elif self._sync_state == SyncState.SYNCING: Logger.warning( "Starting a new sync while previous sync was not completed\n{}", str(self._sync_services)) self.syncRequested.emit()
def store_profile(profile: Optional["UserProfile"]) -> None: if profile is not None: self._user_profile = profile if callback is not None: callback(profile) elif self._auth_data: # If there is no user profile from the JWT, we have to log in again. Logger.warning( "The user profile could not be loaded. The user must log in again!" ) self.deleteAuthData() if callback is not None: callback(None) else: if callback is not None: callback(None)
def restoreBackup(self, backup: Dict[str, Any]) -> None: self.restoringStateChanged.emit(is_restoring=True) download_url = backup.get("download_url") if not download_url: # If there is no download URL, we can't restore the backup. Logger.warning("backup download_url is missing. Aborting backup.") self.restoringStateChanged.emit( is_restoring=False, error_message=catalog.i18nc( "@info:backup_status", "There was an error trying to restore your backup.")) return restore_backup_job = RestoreBackupJob(backup) restore_backup_job.finished.connect(self._onRestoreFinished) restore_backup_job.start()
def values(self, container_id: str) -> Generator: """Yields all value obtained from the DB row and the 'container_type' and `type` param container_id: The container_id to query """ result = self._execute(self._queries.select, (container_id, )).fetchone() if result is None: Logger.warning( f"Could not retrieve metadata for: {container_id} from database" ) return [ ] # Todo: check if this needs to be None, empty list, raise an exception or fallback to old container for value in result: yield value yield self._container_type yield self._queries.table
def _serve_forever(self) -> None: """ If the platform is windows, this function calls the serve_forever function of the _web_server, catching any OSErrors that may occur in the thread, thus making the reported message more log-friendly. If it is any other platform, it just calls the serve_forever function immediately. :return: None """ if self._web_server: if sys.platform == "win32": try: self._web_server.serve_forever() except OSError as e: Logger.warning(str(e)) else: # Leave the default behavior in non-windows platforms self._web_server.serve_forever()
def callbackWrap(response: Optional[Any] = None, *args, **kwargs) -> None: if (response is not None and isinstance( response, DigitalFactoryFeatureBudgetResponse) and response.library_max_private_projects is not None): # A user has DF access when library_max_private_projects is either -1 (unlimited) or bigger then 0 has_access = response.library_max_private_projects == -1 or response.library_max_private_projects > 0 callback(has_access) self._library_max_private_projects = response.library_max_private_projects # update the account with the additional user rights self._account.updateAdditionalRight(df_access=has_access) else: Logger.warning( f"Digital Factory: Response is not a feature budget, likely an error: {str(response)}" ) callback(False)
def _onUploadSlotCompleted( self, reply: QNetworkReply, error: Optional["QNetworkReply.NetworkError"] = None) -> None: if HttpRequestManager.safeHttpStatus(reply) >= 300: replyText = HttpRequestManager.readText(reply) Logger.warning("Could not request backup upload: %s", replyText) self.backup_upload_error_message = self.DEFAULT_UPLOAD_ERROR_MESSAGE if HttpRequestManager.safeHttpStatus(reply) == 400: errors = json.loads(replyText)["errors"] if "moreThanMaximum" in [ error["code"] for error in errors if error["meta"] and error["meta"]["field_name"] == "backup_size" ]: if self._backup_zip is None: # will never happen; keep mypy happy zip_error = "backup is None." else: zip_error = "{} exceeds max size.".format( str(len(self._backup_zip))) sentry_sdk.capture_message( "backup failed: {}".format(zip_error), level="warning") self.backup_upload_error_message = catalog.i18nc( "@error:file_size", "The backup exceeds the maximum file size.") from sentry_sdk import capture_message self._job_done.set() return if error is not None: Logger.warning("Could not request backup upload: %s", HttpRequestManager.qt_network_error_name(error)) self.backup_upload_error_message = self.DEFAULT_UPLOAD_ERROR_MESSAGE self._job_done.set() return backup_upload_url = HttpRequestManager.readJSON( reply)["data"]["upload_url"] # Upload the backup to storage. HttpRequestManager.getInstance().put( backup_upload_url, data=self._backup_zip, callback=self._uploadFinishedCallback, error_callback=self._uploadFinishedCallback)
def getThemes(self) -> List[Dict[str, str]]: install_prefix = os.path.abspath( UM.Application.Application.getInstance().getInstallPrefix()) themes = [] for path in Resources.getAllPathsForType(Resources.Themes): if self._check_if_trusted and not TrustBasics.isPathInLocation( install_prefix, path): # This will prevent themes to load from outside 'bundled' folders, when `check_if_trusted` is True. # Note that this will be a lot less useful in newer versions supporting Qt 6, due to lack of QML Styles. Logger.warning( "Skipped indexing Theme from outside bundled folders: ", path) continue try: for file in os.listdir(path): folder = os.path.join(path, file) theme_file = os.path.join(folder, "theme.json") if os.path.isdir(folder) and os.path.isfile(theme_file): theme_id = os.path.basename(folder) with open(theme_file, encoding="utf-8") as f: try: data = json.load(f) except (UnicodeDecodeError, json.decoder.JSONDecodeError): Logger.log("w", "Could not parse theme %s", theme_id) continue # do not add this theme to the list, but continue looking for other themes try: theme_name = data["metadata"]["name"] except KeyError: Logger.log( "w", "Theme %s does not have a name; using its id instead", theme_id) theme_name = theme_id # fallback if no name is specified in json themes.append({"id": theme_id, "name": theme_name}) except EnvironmentError: pass themes.sort(key=lambda k: k["name"]) return themes
def updateFirmware(self, firmware_file: Union[str, QUrl]) -> None: # the file path could be url-encoded. if firmware_file.startswith("file://"): self._firmware_file = QUrl(firmware_file).toLocalFile() else: self._firmware_file = firmware_file if self._firmware_file == "": self._setFirmwareUpdateState( FirmwareUpdateState.firmware_not_found_error) return self._setFirmwareUpdateState(FirmwareUpdateState.updating) try: self._update_firmware_thread.start() except RuntimeError: Logger.warning( "Could not start the update thread, since it's still running!")
def _setup(self): # scene = CuraApplication.getInstance().getController().getScene().getRoot() selected_node = Selection.getSelectedObject(0) Selection.setFaceSelectMode(True) if not selected_node: Logger.warning("No node selected for creating boundary conditions") return self._smart_slice_scene_node = findChildSceneNode(selected_node, SmartSliceScene.Root) if not self._smart_slice_scene_node: Logger.warning("No SmartSlice node found for creating boundary conditions") return self._smart_slice_scene_node.childrenChanged.connect(self._smartSliceSceneChanged) self._populateList()
def _extractArchive(self, archive: "ZipFile", target_path: str) -> bool: """Extract the whole archive to the given target path. :param archive: The archive as ZipFile. :param target_path: The target path. :return: Whether we had success or not. """ # Implement security recommendations: Sanity check on zip files will make it harder to spoof. from cura.CuraApplication import CuraApplication config_filename = CuraApplication.getInstance().getApplicationName( ) + ".cfg" # Should be there if valid. if config_filename not in [file.filename for file in archive.filelist]: Logger.logException( "e", "Unable to extract the backup due to corruption of compressed file(s)." ) return False Logger.log("d", "Removing current data in location: %s", target_path) Resources.factoryReset() Logger.log("d", "Extracting backup to location: %s", target_path) name_list = archive.namelist() ignore_string = re.compile("|".join(self.IGNORED_FILES + self.IGNORED_FOLDERS)) for archive_filename in name_list: if ignore_string.search(archive_filename): Logger.warning( f"File ({archive_filename}) in archive that doesn't fit current backup policy; ignored." ) continue try: archive.extract(archive_filename, target_path) except (PermissionError, EnvironmentError): Logger.logException( "e", f"Unable to extract the file {archive_filename} from the backup due to permission or file system errors." ) except UnicodeEncodeError: Logger.error( f"Unable to extract the file {archive_filename} because of an encoding error." ) CuraApplication.getInstance().processEvents() return True
def callback(profile: Optional["UserProfile"]) -> None: if profile is not None: self.onAuthStateChanged.emit(logged_in=True) Logger.debug("Auth data was successfully loaded") else: if self._unable_to_get_data_message is not None: self._unable_to_get_data_message.show() else: self._unable_to_get_data_message = Message( i18n_catalog.i18nc( "@info", "Unable to reach the Ultimaker account server." ), title=i18n_catalog.i18nc( "@info:title", "Log-in failed"), message_type=Message.MessageType.ERROR) Logger.warning( "Unable to get user profile using auth data from preferences." ) self._unable_to_get_data_message.show()