def _getUserSubscribedPackages(self) -> None: Logger.debug("Requesting subscribed packages metadata from server.") url = CloudApiModel.api_url_user_packages self._application.getHttpRequestManager().get(url, callback = self._onUserPackagesRequestFinished, error_callback = self._onUserPackagesRequestFinished, scope = self._scope)
def refreshAccessToken(self) -> None: """Try to refresh the access token. This should be used when it has expired.""" if self._auth_data is None or self._auth_data.refresh_token is None: Logger.log( "w", "Unable to refresh access token, since there is no refresh token." ) return def process_auth_data(response: AuthenticationResponse) -> None: if response.success: self._storeAuthData(response) self.onAuthStateChanged.emit(logged_in=True) else: Logger.warning( "Failed to get a new access token from the server.") self.onAuthStateChanged.emit(logged_in=False) if self._currently_refreshing_token: Logger.debug( "Was already busy refreshing token. Do not start a new request." ) return self._currently_refreshing_token = True self._auth_helpers.getAccessTokenUsingRefreshToken( self._auth_data.refresh_token, process_auth_data)
def _getUserSubscribedPackages(self) -> None: self._application.getCuraAPI().account.setSyncState(self.SYNC_SERVICE_NAME, SyncState.SYNCING) Logger.debug("Requesting subscribed packages metadata from server.") url = CloudApiModel.api_url_user_packages self._application.getHttpRequestManager().get(url, callback = self._onUserPackagesRequestFinished, error_callback = self._onUserPackagesRequestFinished, scope = self._scope)
def loadSettings(self): path = PluginRegistry.getInstance().getPluginPath(self.getPluginId()) path = os.path.join(path, 'settings.cfg') if os.path.exists(path): self.settings = json.load(open(path, 'r')) else: with open(path, 'w') as file: json.dump(self.settings, file) Logger.debug("SettingsLoaded")
def loadTokenRegistry(self): path = PluginRegistry.getInstance().getPluginPath(self.getPluginId()) path = os.path.join(path, 'tokens.cfg') if os.path.exists(path): self._tokenregistry = json.load(open(path, 'r')) else: with open(path, 'w') as file: json.dump(self._tokenregistry, file) Logger.debug("TokenRegistryLoaded")
def _progressCallback(self, bytes_sent: int, bytes_total: int) -> None: """Handles an update to the upload progress :param bytes_sent: The amount of bytes sent in the current request. :param bytes_total: The amount of bytes to send in the current request. """ Logger.debug("Cloud upload progress %s / %s", bytes_sent, bytes_total) if bytes_total: self._on_progress(int(bytes_sent / len(self._data) * 100))
def _subscribe(self, package_id: str) -> None: """You probably don't want to use this directly. All installed packages will be automatically subscribed.""" Logger.debug("Subscribing to {}", package_id) data = "{\"data\": {\"package_id\": \"%s\", \"sdk_version\": \"%s\"}}" % ( package_id, CloudApiModel.sdk_version) HttpRequestManager.getInstance().put( url=CloudApiModel.api_url_user_packages, data=data.encode(), scope=self._scope)
def requestHook(self, request: QNetworkRequest): super().requestHook(request) token = self._account.accessToken if not self._account.isLoggedIn or token is None: Logger.debug( "User is not logged in for Cloud API request to {url}".format( url=request.url().toDisplayString())) return header_dict = {"Authorization": "Bearer {}".format(token)} self.addHeaders(request, header_dict)
def _onGetRemoteClustersFinished(self, clusters: List[CloudClusterResponse]) -> None: """Callback for when the request for getting the clusters is successful and finished.""" self._um_cloud_printers = {m.getMetaDataEntry(self.META_CLUSTER_ID): m for m in CuraApplication.getInstance().getContainerRegistry().findContainerStacks( type = "machine") if m.getMetaDataEntry(self.META_CLUSTER_ID, None)} new_clusters = [] all_clusters = {c.cluster_id: c for c in clusters} # type: Dict[str, CloudClusterResponse] online_clusters = {c.cluster_id: c for c in clusters if c.is_online} # type: Dict[str, CloudClusterResponse] # Add the new printers in Cura. for device_id, cluster_data in all_clusters.items(): if device_id not in self._remote_clusters: new_clusters.append(cluster_data) if device_id in self._um_cloud_printers: # Existing cloud printers may not have the host_guid meta-data entry. If that's the case, add it. if not self._um_cloud_printers[device_id].getMetaDataEntry(self.META_HOST_GUID, None): self._um_cloud_printers[device_id].setMetaDataEntry(self.META_HOST_GUID, cluster_data.host_guid) # If a printer was previously not linked to the account and is rediscovered, mark the printer as linked # to the current account if not parseBool(self._um_cloud_printers[device_id].getMetaDataEntry(META_UM_LINKED_TO_ACCOUNT, "true")): self._um_cloud_printers[device_id].setMetaDataEntry(META_UM_LINKED_TO_ACCOUNT, True) if not self._um_cloud_printers[device_id].getMetaDataEntry(META_CAPABILITIES, None): self._um_cloud_printers[device_id].setMetaDataEntry(META_CAPABILITIES, ",".join(cluster_data.capabilities)) self._onDevicesDiscovered(new_clusters) self._updateOnlinePrinters(all_clusters) # Hide the current removed_printers_message, if there is any if self._removed_printers_message: self._removed_printers_message.actionTriggered.disconnect(self._onRemovedPrintersMessageActionTriggered) self._removed_printers_message.hide() # Remove the CloudOutput device for offline printers offline_device_keys = set(self._remote_clusters.keys()) - set(online_clusters.keys()) for device_id in offline_device_keys: self._onDiscoveredDeviceRemoved(device_id) # Handle devices that were previously added in Cura but do not exist in the account anymore (i.e. they were # removed from the account) removed_device_keys = set(self._um_cloud_printers.keys()) - set(all_clusters.keys()) if removed_device_keys: self._devicesRemovedFromAccount(removed_device_keys) if new_clusters or offline_device_keys or removed_device_keys: self.discoveredDevicesChanged.emit() if offline_device_keys: # If the removed device was active we should connect to the new active device self._connectToActiveMachine() self._syncing = False self._account.setSyncState(self.SYNC_SERVICE_NAME, SyncState.SUCCESS) Logger.debug("Synced cloud printers with account.")
def _makeRequestByType(self, request_type: str) -> None: Logger.debug(f"Requesting {request_type} metadata from server.") url = self._request_urls[request_type] callback = lambda r, rt=request_type: self._onDataRequestFinished( rt, r) error_callback = lambda r, e, rt=request_type: self._onDataRequestError( rt, r, e) self._application.getHttpRequestManager().get( url, callback=callback, error_callback=error_callback, scope=self._json_scope)
def _onError(self, reply: "QNetworkReply", error: Optional[QNetworkReply.NetworkError]) -> None: """ Handles networking and server errors when requesting the list of packages. :param reply: The reply with packages. This will most likely be incomplete and should be ignored. :param error: The error status of the request. """ if error == QNetworkReply.NetworkError.OperationCanceledError: Logger.debug("Cancelled request for packages.") self._ongoing_requests["get_packages"] = None return # Don't show an error about this to the user. Logger.error("Could not reach Marketplace server.") self.setErrorMessage(catalog.i18nc("@info:error", "Could not reach Marketplace.")) self._ongoing_requests["get_packages"] = None self.setIsLoading(False)
def store(cls, file_path: str, file_id: str, version: Version = Version("1.0.0")) -> None: """ Store a new file into the central file storage. This file will get moved to a storage location that is not specific to this version of the application. If the file already exists, this will check if it's the same file. If the file is not the same, it raises a `FileExistsError`. If the file is the same, no error is raised and the file to store is simply deleted. It is a duplicate of the file already stored. :param file_path: The path to the file to store in the central file storage. :param file_id: A name for the file to store. :param version: A version number for the file. :raises FileExistsError: There is already a centrally stored file with that name and version, but it's different. """ if not os.path.exists(cls._centralStorageLocation()): os.makedirs(cls._centralStorageLocation()) if not os.path.exists(file_path): Logger.debug( f"{file_id} {str(version)} was already stored centrally.") return storage_path = cls._getFilePath(file_id, version) if os.path.exists( storage_path): # File already exists. Check if it's the same. if os.path.getsize(file_path) != os.path.getsize( storage_path ): # As quick check if files are the same, check their file sizes. raise FileExistsError( f"Central file storage already has a file with ID {file_id} and version {str(version)}, but it's different." ) new_file_hash = cls._hashFile(file_path) stored_file_hash = cls._hashFile(storage_path) if new_file_hash != stored_file_hash: raise FileExistsError( f"Central file storage already has a file with ID {file_id} and version {str(version)}, but it's different." ) os.remove(file_path) Logger.info( f"{file_id} {str(version)} was already stored centrally. Removing duplicate." ) else: shutil.move(file_path, storage_path) Logger.info(f"Storing new file {file_id} {str(version)}.")
def _parseJWT(self, callback: Callable[[Optional["UserProfile"]], None]) -> None: """ Tries to parse the JWT (JSON Web Token) data, which it does if all the needed data is there. :param callback: A function to call asynchronously once the user profile has been obtained. It will be called with `None` if it failed to obtain a user profile. """ if not self._auth_data or self._auth_data.access_token is None: # If no auth data exists, we should always log in again. Logger.debug("There was no auth data or access token") callback(None) return # When we checked the token we may get a user profile. This callback checks if that is a valid one and tries to refresh the token if it's not. def check_user_profile(user_profile: Optional["UserProfile"]) -> None: if user_profile: # If the profile was found, we call it back immediately. callback(user_profile) return # The JWT was expired or invalid and we should request a new one. if self._auth_data is None or self._auth_data.refresh_token is None: Logger.warning("There was no refresh token in the auth data.") callback(None) return def process_auth_data(auth_data: AuthenticationResponse) -> None: if auth_data.access_token is None: Logger.warning( "Unable to use the refresh token to get a new access token." ) callback(None) return # Ensure it gets stored as otherwise we only have it in memory. The stored refresh token has been # deleted from the server already. Do not store the auth_data if we could not get new auth_data (e.g. # due to a network error), since this would cause an infinite loop trying to get new auth-data. if auth_data.success: self._storeAuthData(auth_data) self._auth_helpers.checkToken(auth_data.access_token, callback, lambda: callback(None)) self._auth_helpers.getAccessTokenUsingRefreshToken( self._auth_data.refresh_token, process_auth_data) self._auth_helpers.checkToken(self._auth_data.access_token, check_user_profile, lambda: check_user_profile(None))
def _sync(self) -> None: """Signals all sync services to start syncing This can be considered a forced sync: even when a sync is currently running, a sync will be requested. """ self._update_packages_action = None self._update_packages_enabled = False self.updatePackagesEnabledChanged.emit(self._update_packages_enabled) if self._update_timer.isActive(): self._update_timer.stop() elif self._sync_state == SyncState.SYNCING: Logger.debug( "Starting a new sync while previous sync was not completed") self.syncRequested.emit()
def callback(profile: Optional["UserProfile"]) -> None: if profile is not None: self.onAuthStateChanged.emit(logged_in=True) Logger.debug("Auth data was successfully loaded") else: if self._unable_to_get_data_message is not None: self._unable_to_get_data_message.show() else: self._unable_to_get_data_message = Message( i18n_catalog.i18nc( "@info", "Unable to reach the Ultimaker account server." ), title=i18n_catalog.i18nc( "@info:title", "Log-in failed"), message_type=Message.MessageType.ERROR) Logger.warning( "Unable to get user profile using auth data from preferences." ) self._unable_to_get_data_message.show()
def _checkCompatibilities(self, json_data) -> None: user_subscribed_packages = [ plugin["package_id"] for plugin in json_data ] user_installed_packages = self._package_manager.getUserInstalledPackages( ) user_dismissed_packages = self._package_manager.getDismissedPackages() if user_dismissed_packages: user_installed_packages += user_dismissed_packages # We check if there are packages installed in Cloud Marketplace but not in Cura marketplace package_discrepancy = list( set(user_subscribed_packages).difference(user_installed_packages)) if package_discrepancy: self._models["subscribed_packages"].addDiscrepancies( package_discrepancy) self._models["subscribed_packages"].initialize() Logger.debug( "Discrepancy found between Cloud subscribed packages and Cura installed packages" ) sync_message = Message( i18n_catalog.i18nc( "@info:generic", "\nDo you want to sync material and software packages with your account?" ), lifetime=0, title=i18n_catalog.i18nc( "@info:title", "Changes detected from your Ultimaker account", )) sync_message.addAction( "sync", name=i18n_catalog.i18nc("@action:button", "Sync"), icon="", description= "Sync your Cloud subscribed packages to your local environment.", button_align=Message.ActionButtonAlignment.ALIGN_RIGHT) sync_message.actionTriggered.connect(self._onSyncButtonClicked) sync_message.show()
def _onLicenseAnswers(self, answers: List[Dict[str, Any]]) -> None: Logger.debug("Got license answers: {}", answers) has_changes = False # True when at least one package is installed for item in answers: if item["accepted"]: # install and subscribe packages if not self._package_manager.installPackage( item["package_path"]): Logger.error("could not install {}".format( item["package_id"])) continue self._cloud_package_manager.subscribe(item["package_id"]) has_changes = True else: # todo unsubscribe declined packages pass # delete temp file os.remove(item["package_path"]) if has_changes: self._restart_presenter.present()
def removeFromDismissedPackages(self, package: str) -> None: if package in self._dismissed_packages: self._dismissed_packages.remove(package) Logger.debug( "Removed package [%s] from the dismissed packages list" % package)
def dismissAllIncompatiblePackages( self, incompatible_packages: List[str]) -> None: self._dismissed_packages.update(incompatible_packages) self._saveManagementData() Logger.debug("Dismissed Incompatible package(s): {}".format( incompatible_packages))
def store(cls, path: str, path_id: str, version: Version = Version("1.0.0"), move_file: bool = True) -> None: """ Store a new item (file or directory) into the central file storage. This item will get moved to a storage location that is not specific to this version of the application. If the item already exists, this will check if it's the same item. If the item is not the same, it raises a `FileExistsError`. If the item is the same, no error is raised and the item to store is simply deleted. It is a duplicate of the item already stored. Note that this function SHOULD NOT be called by plugins themselves. The central_storage.json should be used instead! :param path: The path to the item (file or directory) to store in the central file storage. :param path_id: A name for the item (file or directory) to store. :param version: A version number for the item (file or directory). :param move_file: Should the file be moved at all or just remembered for later retrieval :raises FileExistsError: There is already a centrally stored item (file or directory) with that name and version, but it's different. """ if not move_file: full_identifier = path_id + str(version) if full_identifier not in cls._unmoved_files: cls._unmoved_files[full_identifier] = path return if not os.path.exists(cls.getCentralStorageLocation()): os.makedirs(cls.getCentralStorageLocation()) if not os.path.exists(path): Logger.debug( f"{path_id} {str(version)} was already stored centrally or the provided path is not correct" ) return storage_path = cls._getItemPath(path_id, version) if os.path.exists( storage_path): # File already exists. Check if it's the same. if os.path.getsize(path) != os.path.getsize( storage_path ): # As quick check if files are the same, check their file sizes. raise FileExistsError( f"Central file storage already has an item (file or directory) with ID {path_id} and version {str(version)}, but it's different." ) new_item_hash = cls._hashItem(path) stored_item_hash = cls._hashItem(storage_path) if new_item_hash != stored_item_hash: raise FileExistsError( f"Central file storage already has an item (file or directory) with ID {path_id} and version {str(version)}, but it's different." ) if os.path.isfile(path): os.remove(path) elif os.path.isdir(path): shutil.rmtree(path) Logger.info( f"{path_id} {str(version)} was already stored centrally. Removing duplicate." ) else: shutil.move(path, storage_path) Logger.info(f"Storing new item {path_id}.{str(version)}.")
def dismissPackage(self, package_id: str) -> None: package = self.find(key="package_id", value=package_id) if package != -1: self.setProperty(package, property="is_dismissed", value=True) Logger.debug("Package {} has been dismissed".format(package_id))
def SaveTokenRegistry(self): path = PluginRegistry.getInstance().getPluginPath(self.getPluginId()) path = os.path.join(path, 'tokens.cfg') with open(path, 'w') as file: json.dump(self._tokenregistry, file) Logger.debug("TokensSaved")