def test_versionPostfix(): version = Version("1.2.3-alpha.4") assert version.getPostfixType() == "alpha" assert version.getPostfixVersion() == 4 assert version.hasPostFix() assert not Version("").hasPostFix() assert version <= Version("1.2.3-alpha.5") assert version < Version("1.2.3-alpha.5")
def _loadChangeLogText(self) -> str: # Load change log texts and organize them with a dict file_path = Resources.getPath(Resources.Texts, "change_log.txt") change_logs_dict = {} # type: Dict[Version, Dict[str, List[str]]] with open(file_path, "r", encoding = "utf-8") as f: open_version = None # type: Optional[Version] open_header = "" # Initialise to an empty header in case there is no "*" in the first line of the changelog for line in f: line = line.replace("\n", "") if "[" in line and "]" in line: line = line.replace("[", "") line = line.replace("]", "") open_version = Version(line) if open_version > Version([14, 99, 99]): # Bit of a hack: We released the 15.x.x versions before 2.x open_version = Version([0, open_version.getMinor(), open_version.getRevision(), open_version.getPostfixVersion()]) open_header = "" change_logs_dict[open_version] = collections.OrderedDict() elif line.startswith("*"): open_header = line.replace("*", "") change_logs_dict[cast(Version, open_version)][open_header] = [] elif line != "": if open_header not in change_logs_dict[cast(Version, open_version)]: change_logs_dict[cast(Version, open_version)][open_header] = [] change_logs_dict[cast(Version, open_version)][open_header].append(line) # Format changelog text content = "" for version in sorted(change_logs_dict.keys(), reverse = True): text_version = version if version < Version([1, 0, 0]): # Bit of a hack: We released the 15.x.x versions before 2.x text_version = Version([15, version.getMinor(), version.getRevision(), version.getPostfixVersion()]) content += "<h1>" + str(text_version) + "</h1><br>" content += "" for change in change_logs_dict[version]: if str(change) != "": content += "<b>" + str(change) + "</b><br>" for line in change_logs_dict[version][change]: content += str(line) + "<br>" content += "<br>" return content
def __initializeStoragePaths(cls): Logger.log("d", "Initializing storage paths") # use nested structure: <app-name>/<version>/... if cls.ApplicationVersion == "master" or cls.ApplicationVersion == "unknown": storage_dir_name = os.path.join(cls.ApplicationIdentifier, cls.ApplicationVersion) else: from UM.Version import Version version = Version(cls.ApplicationVersion) storage_dir_name = os.path.join(cls.ApplicationIdentifier, "%s.%s" % (version.getMajor(), version.getMinor())) # config is saved in "<CONFIG_ROOT>/<storage_dir_name>" cls.__config_storage_path = os.path.join(Resources._getConfigStorageRootPath(), storage_dir_name) Logger.log("d", "Config storage path is %s", cls.__config_storage_path) # data is saved in # - on Linux: "<DATA_ROOT>/<storage_dir_name>" # - on other: "<CONFIG_DIR>" (in the config directory) data_root_path = Resources._getDataStorageRootPath() cls.__data_storage_path = cls.__config_storage_path if data_root_path is None else \ os.path.join(data_root_path, storage_dir_name) Logger.log("d", "Data storage path is %s", cls.__data_storage_path) # cache is saved in # - on Linux: "<CACHE_DIR>/<storage_dir_name>" # - on Windows: "<CACHE_DIR>/<storage_dir_name>/cache" # - on Mac: "<CONFIG_DIR>/cache" (in the config directory) cache_root_path = Resources._getCacheStorageRootPath() if cache_root_path is None: cls.__cache_storage_path = os.path.join(cls.__config_storage_path, "cache") else: cls.__cache_storage_path = os.path.join(cache_root_path, storage_dir_name) if Platform.isWindows(): cls.__cache_storage_path = os.path.join(cls.__cache_storage_path, "cache") Logger.log("d", "Cache storage path is %s", cls.__cache_storage_path) if not os.path.exists(cls.__config_storage_path): cls._copyLatestDirsIfPresent() cls.__paths.insert(0, cls.__data_storage_path)
def __matchVersion(): cura_version = Application.getInstance().getVersion() if cura_version == "master": Logger.log("d", "Running Cura from source, ignoring version of the plugin") return True cura_version = Version(cura_version) cura_version = Version([cura_version.getMajor(), cura_version.getMinor()]) # Get version information from plugin.json plugin_file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "plugin.json") try: with open(plugin_file_path) as plugin_file: plugin_info = json.load(plugin_file) minimum_cura_version = Version(plugin_info["minimum_cura_version"]) maximum_cura_version = Version(plugin_info["maximum_cura_version"]) except: Logger.log("w", "Could not get version information for the plugin") return False if cura_version >= minimum_cura_version and cura_version <= maximum_cura_version: return True else: Logger.log("d", "This version of the plugin is not compatible with this version of Cura. Please check for an update.") return False
def test_availableVersionNotInstalledOrBundled(self): manager = PackageManager(MagicMock()) manager.addAvailablePackageVersion("beep", Version("1.0.0")) # Even though we have a known package version, it's not installed / bundled, so we cant update assert manager.checkIfPackageCanUpdate("beep") is False
def installPlugin(self, plugin_path: str): Logger.log("d", "Install plugin got path: %s", plugin_path) plugin_path = QUrl(plugin_path).toLocalFile() Logger.log("i", "Attempting to install a new plugin %s", plugin_path) local_plugin_path = os.path.join( Resources.getStoragePath(Resources.Resources), "plugins") plugin_folder = "" result = {"status": "error", "message": "", "id": ""} success_message = i18n_catalog.i18nc( "@info:status", "The plugin has been installed.\nPlease re-start the application to activate the plugin." ) try: with zipfile.ZipFile(plugin_path, "r") as zip_ref: plugin_id = None for file in zip_ref.infolist(): if file.filename.endswith("/"): plugin_id = file.filename.strip("/") break if plugin_id is None: result["message"] = i18n_catalog.i18nc( "@info:status", "Failed to install plugin from <filename>{0}</filename>:\n<message>{1}</message>", plugin_path, "Invalid plugin archive.") return result result["id"] = plugin_id plugin_folder = os.path.join(local_plugin_path, plugin_id) if os.path.isdir( plugin_folder ): # Plugin is already installed by user (so not a bundled plugin) metadata = {} with zip_ref.open(plugin_id + "/plugin.json") as metadata_file: metadata = json.loads( metadata_file.read().decode("utf-8")) if "version" in metadata: new_version = Version(metadata["version"]) old_version = Version( self.getMetaData(plugin_id)["plugin"]["version"]) if new_version > old_version: for info in zip_ref.infolist(): extracted_path = zip_ref.extract( info.filename, path=plugin_folder) permissions = os.stat(extracted_path).st_mode os.chmod(extracted_path, permissions | stat.S_IEXEC ) #Make these files executable. result["status"] = "ok" result["message"] = success_message return result Logger.log( "w", "The plugin was already installed. Unable to install it again!" ) result["status"] = "duplicate" result["message"] = i18n_catalog.i18nc( "@info:status", "Failed to install the plugin;\n<message>{0}</message>", "Plugin was already installed") return result elif plugin_id in self._plugins: # Plugin is already installed, but not by the user (eg; this is a bundled plugin) # TODO: Right now we don't support upgrading bundled plugins at all, but we might do so in the future. result["message"] = i18n_catalog.i18nc( "@info:status", "Failed to install the plugin;\n<message>{0}</message>", "Unable to upgrade or install bundled plugins.") return result for info in zip_ref.infolist(): extracted_path = zip_ref.extract(info.filename, path=plugin_folder) permissions = os.stat(extracted_path).st_mode os.chmod(extracted_path, permissions | stat.S_IEXEC) #Make these files executable. except: # Installing a new plugin should never crash the application. Logger.logException( "d", "An exception occurred while installing plugin {path}".format( path=plugin_path)) result["message"] = i18n_catalog.i18nc( "@info:status", "Failed to install plugin from <filename>{0}</filename>:\n<message>{1}</message>", plugin_folder, "Invalid plugin file") return result result["status"] = "ok" result["message"] = success_message return result
def _onNetworkRequestFinished(self, reply): reply_url = reply.url().toString() if "system" in reply_url: if reply.attribute(QNetworkRequest.HttpStatusCodeAttribute) != 200: # Something went wrong with checking the firmware version! return try: system_info = json.loads( bytes(reply.readAll()).decode("utf-8")) except: Logger.log("e", "Something went wrong converting the JSON.") return address = reply.url().host() has_cluster_capable_firmware = Version( system_info["firmware"]) > self._min_cluster_version instance_name = "manual:%s" % address properties = { b"name": system_info["name"].encode("utf-8"), b"address": address.encode("utf-8"), b"firmware_version": system_info["firmware"].encode("utf-8"), b"manual": b"true", b"machine": str(system_info['hardware']["typeid"]).encode("utf-8") } if has_cluster_capable_firmware: # Cluster needs an additional request, before it's completed. properties[b"incomplete"] = b"true" # Check if the device is still in the list & re-add it with the updated # information. if instance_name in self._discovered_devices: self._onRemoveDevice(instance_name) self._onAddDevice(instance_name, address, properties) if has_cluster_capable_firmware: # We need to request more info in order to figure out the size of the cluster. cluster_url = QUrl("http://" + address + self._cluster_api_prefix + "printers/") cluster_request = QNetworkRequest(cluster_url) self._network_manager.get(cluster_request) elif "printers" in reply_url: if reply.attribute(QNetworkRequest.HttpStatusCodeAttribute) != 200: # Something went wrong with checking the amount of printers the cluster has! return # So we confirmed that the device is in fact a cluster printer, and we should now know how big it is. try: cluster_printers_list = json.loads( bytes(reply.readAll()).decode("utf-8")) except: Logger.log("e", "Something went wrong converting the JSON.") return address = reply.url().host() instance_name = "manual:%s" % address if instance_name in self._discovered_devices: device = self._discovered_devices[instance_name] properties = device.getProperties().copy() if b"incomplete" in properties: del properties[b"incomplete"] properties[b'cluster_size'] = len(cluster_printers_list) self._onRemoveDevice(instance_name) self._onAddDevice(instance_name, address, properties)
def loadPlugin(self, plugin_id: str) -> None: # If plugin has already been loaded, do not load it again: if plugin_id in self._plugins: Logger.log("w", "Plugin %s was already loaded", plugin_id) return # Find the actual plugin on drive, do security checks if necessary: plugin = self._findPlugin(plugin_id) # If not found, raise error: if not plugin: raise PluginNotFoundError(plugin_id) # If found, but isn't in the metadata dictionary, add it: if plugin_id not in self._metadata: try: self._populateMetaData(plugin_id) except InvalidMetaDataError: return # Do not load plugin that has been disabled if plugin_id in self._disabled_plugins: Logger.log("i", "Plugin [%s] has been disabled. Skip loading it.", plugin_id) return # If API version is incompatible, don't load it. supported_sdk_versions = self._metadata[plugin_id].get( "plugin", {}).get("supported_sdk_versions", [Version("0")]) is_plugin_supported = False for supported_sdk_version in supported_sdk_versions: is_plugin_supported |= self.isPluginApiVersionCompatible( supported_sdk_version) if is_plugin_supported: break if not is_plugin_supported: Logger.log( "w", "Plugin [%s] with supported sdk versions [%s] is incompatible with the current sdk version [%s].", plugin_id, [str(version) for version in supported_sdk_versions], self._api_version) self._outdated_plugins.append(plugin_id) return try: to_register = plugin.register( self._application ) # type: ignore # We catch AttributeError on this in case register() doesn't exist. if not to_register: Logger.log("w", "Plugin %s did not return any objects to register", plugin_id) return for plugin_type, plugin_object in to_register.items(): if type(plugin_object) == list: for metadata_index, nested_plugin_object in enumerate( plugin_object): nested_plugin_object.setVersion( self._metadata[plugin_id].get("plugin", {}).get("version")) all_metadata = self._metadata[plugin_id].get( plugin_type, []) try: nested_plugin_object.setMetaData( all_metadata[metadata_index]) except IndexError: nested_plugin_object.setMetaData({}) self._addPluginObject(nested_plugin_object, plugin_id, plugin_type) else: plugin_object.setVersion(self._metadata[plugin_id].get( "plugin", {}).get("version")) metadata = self._metadata[plugin_id].get(plugin_type, {}) if type(metadata) == list: try: metadata = metadata[0] except IndexError: metadata = {} plugin_object.setMetaData(metadata) self._addPluginObject(plugin_object, plugin_id, plugin_type) self._plugins[plugin_id] = plugin self.enablePlugin(plugin_id) Logger.info("Loaded plugin %s", plugin_id) except Exception as ex: Logger.logException("e", "Error loading plugin %s:", plugin_id)
def startSplashWindowPhase(self) -> None: super().startSplashWindowPhase() i18n_catalog = i18nCatalog("uranium") self.showSplashMessage( i18n_catalog.i18nc("@info:progress", "Initializing package manager...")) self._package_manager.initialize() signal.signal(signal.SIGINT, signal.SIG_DFL) # This is done here as a lot of plugins require a correct gl context. If you want to change the framework, # these checks need to be done in your <framework>Application.py class __init__(). self._configuration_error_message = ConfigurationErrorMessage( self, i18n_catalog.i18nc("@info:status", "Your configuration seems to be corrupt."), lifetime=0, title=i18n_catalog.i18nc("@info:title", "Configuration errors")) # Remove, install, and then loading plugins self.showSplashMessage( i18n_catalog.i18nc("@info:progress", "Loading plugins...")) # Remove and install the plugins that have been scheduled self._plugin_registry.initializeBeforePluginsAreLoaded() self._loadPlugins() self._plugin_registry.checkRequiredPlugins(self.getRequiredPlugins()) self.pluginsLoaded.emit() self.showSplashMessage( i18n_catalog.i18nc("@info:progress", "Updating configuration...")) with self._container_registry.lockFile(): VersionUpgradeManager.getInstance().upgrade() # Load preferences again because before we have loaded the plugins, we don't have the upgrade routine for # the preferences file. Now that we have, load the preferences file again so it can be upgraded and loaded. self.showSplashMessage( i18n_catalog.i18nc("@info:progress", "Loading preferences...")) try: preferences_filename = Resources.getPath(Resources.Preferences, self._app_name + ".cfg") with open(preferences_filename, "r", encoding="utf-8") as f: serialized = f.read() # This performs the upgrade for Preferences self._preferences.deserialize(serialized) self._preferences.setValue("general/plugins_to_remove", "") self._preferences.writeToFile(preferences_filename) except (EnvironmentError, UnicodeDecodeError): Logger.log( "i", "The preferences file cannot be opened or it is corrupted, so we will use default values" ) self.processEvents() # Force the configuration file to be written again since the list of plugins to remove maybe changed try: self._preferences_filename = Resources.getPath( Resources.Preferences, self._app_name + ".cfg") self._preferences.readFromFile(self._preferences_filename) except FileNotFoundError: Logger.log( "i", "The preferences file '%s' cannot be found, will use default values", self._preferences_filename) self._preferences_filename = Resources.getStoragePath( Resources.Preferences, self._app_name + ".cfg") Logger.info("Completed loading preferences.") # FIXME: This is done here because we now use "plugins.json" to manage plugins instead of the Preferences file, # but the PluginRegistry will still import data from the Preferences files if present, such as disabled plugins, # so we need to reset those values AFTER the Preferences file is loaded. self._plugin_registry.initializeAfterPluginsAreLoaded() # Check if we have just updated from an older version self._preferences.addPreference("general/last_run_version", "") last_run_version_str = self._preferences.getValue( "general/last_run_version") if not last_run_version_str: last_run_version_str = self._version last_run_version = Version(last_run_version_str) current_version = Version(self._version) if last_run_version < current_version: self._just_updated_from_old_version = True self._preferences.setValue("general/last_run_version", str(current_version)) self._preferences.writeToFile(self._preferences_filename) # Preferences: recent files self._preferences.addPreference("%s/recent_files" % self._app_name, "") file_names = self._preferences.getValue("%s/recent_files" % self._app_name).split(";") for file_name in file_names: if not os.path.isfile(file_name): continue self._recent_files.append(QUrl.fromLocalFile(file_name)) if not self.getIsHeadLess(): # Initialize System tray icon and make it invisible because it is used only to show pop up messages self._tray_icon = None if self._tray_icon_name: try: self._tray_icon = QIcon( Resources.getPath(Resources.Images, self._tray_icon_name)) self._tray_icon_widget = QSystemTrayIcon(self._tray_icon) self._tray_icon_widget.setVisible(False) Logger.info("Created system tray icon.") except FileNotFoundError: Logger.log("w", "Could not find the icon %s", self._tray_icon_name)
def __init__(self, name: str, version: str, api_version: str, app_display_name: str = "", build_type: str = "", is_debug_mode: bool = False, **kwargs) -> None: """Init method :param name: :type{string} The name of the application. :param version: :type{string} Version, formatted as major.minor.rev :param build_type: Additional version info on the type of build this is, such as "master". :param is_debug_mode: Whether to run in debug mode. """ if Application.__instance is not None: raise RuntimeError("Try to create singleton '%s' more than once" % self.__class__.__name__) Application.__instance = self super().__init__() # Call super to make multiple inheritance work. self._api_version = Version(api_version) # type: Version self._app_name = name # type: str self._app_display_name = app_display_name if app_display_name else name # type: str self._version = version # type: str self._build_type = build_type # type: str self._is_debug_mode = is_debug_mode # type: bool self._is_headless = False # type: bool self._use_external_backend = False # type: bool self._just_updated_from_old_version = False # type: bool self._config_lock_filename = "{name}.lock".format(name = self._app_name) # type: str self._cli_args = None # type: argparse.Namespace self._cli_parser = argparse.ArgumentParser(prog = self._app_name, add_help = False) # type: argparse.ArgumentParser self._main_thread = threading.current_thread() # type: threading.Thread self.default_theme = self._app_name # type: str # Default theme is the application name self._default_language = "en_US" # type: str self.change_log_url = "https://github.com/Ultimaker/Uranium" # Where to find a more detailed description of the recent updates. self._preferences_filename = None # type: str self._preferences = None # type: Preferences self._extensions = [] # type: List[Extension] self._required_plugins = [] # type: List[str] self._package_manager_class = PackageManager # type: type self._package_manager = None # type: PackageManager self._plugin_registry = None # type: PluginRegistry self._container_registry_class = ContainerRegistry # type: type self._container_registry = None # type: ContainerRegistry self._global_container_stack = None # type: Optional[ContainerStack] self._controller = None # type: Controller self._backend = None # type: Backend self._output_device_manager = None # type: OutputDeviceManager self._operation_stack = None # type: OperationStack self._visible_messages = [] # type: List[Message] self._message_lock = threading.Lock() # type: threading.Lock self._app_install_dir = self.getInstallPrefix() # type: str self._workspace_metadata_storage = WorkspaceMetadataStorage() # type: WorkspaceMetadataStorage
def run(self): if not self._url: Logger.log("e", "Can not check for a new release. URL not set!") no_new_version = True application_name = Application.getInstance().getApplicationName() Logger.log("i", "Checking for new version of %s" % application_name) try: headers = { "User-Agent": "%s - %s" % (application_name, Application.getInstance().getVersion()) } request = urllib.request.Request(self._url, headers=headers) latest_version_file = urllib.request.urlopen(request) except Exception as e: Logger.log("w", "Failed to check for new version: %s" % e) if not self.silent: Message(i18n_catalog.i18nc( "@info", "Could not access update information."), title=i18n_catalog.i18nc("@info:title", "Version Upgrade")).show() return try: reader = codecs.getreader("utf-8") data = json.load(reader(latest_version_file)) try: if Application.getInstance().getVersion() is not "master": local_version = Version( Application.getInstance().getVersion()) else: if not self.silent: Message(i18n_catalog.i18nc( "@info", "The version you are using does not support checking for updates." ), title=i18n_catalog.i18nc( "@info:title", "Warning")).show() return except ValueError: Logger.log( "w", "Could not determine application version from string %s, not checking for updates", Application.getInstance().getVersion()) if not self.silent: Message(i18n_catalog.i18nc( "@info", "The version you are using does not support checking for updates." ), title=i18n_catalog.i18nc( "@info:title", "Version Upgrade")).show() return if application_name in data: for key, value in data[application_name].items(): if "major" in value and "minor" in value and "revision" in value and "url" in value: os = key if platform.system( ) == os: #TODO: add architecture check newest_version = Version([ int(value["major"]), int(value["minor"]), int(value["revision"]) ]) if local_version < newest_version: Logger.log( "i", "Found a new version of the software. Spawning message" ) title_message = i18n_catalog.i18nc( "@info:status", "Continuum {0} is available!", newest_version) content_message = i18n_catalog.i18nc( "@info:status", "Continuum {0} provides better and reliable printing experience.", newest_version) message = Message(text=content_message, title=title_message) message.addAction( "download", i18n_catalog.i18nc("@action:button", "Download"), "[no_icon]", "[no_description]") message.addAction( "new_features", i18n_catalog.i18nc( "@action:button", "Learn more about new features"), "[no_icon]", "[no_description]", button_style=Message.ActionButtonStyle. LINK, button_align=Message.ActionButtonStyle. BUTTON_ALIGN_LEFT) if self._set_download_url_callback: self._set_download_url_callback( value["url"]) message.actionTriggered.connect(self._callback) message.show() no_new_version = False break else: Logger.log( "w", "Could not find version information or download url for update." ) else: Logger.log( "w", "Did not find any version information for %s." % application_name) except Exception: Logger.logException( "e", "Exception in update checker while parsing the JSON file.") Message(i18n_catalog.i18nc( "@info", "An error occurred while checking for updates."), title=i18n_catalog.i18nc("@info:title", "Error")).show() no_new_version = False # Just to suppress the message below. if no_new_version and not self.silent: Message(i18n_catalog.i18nc("@info", "No new version was found."), title=i18n_catalog.i18nc("@info:title", "Version Upgrade")).show()
def __init__(self, application: "Application"): PluginRegistry._PluginRegistry__instance = None super().__init__(application) self._api_version = Version("5.5.0") self._plugin_config_filename = "test_file"
def _findLatestDirInPaths(cls, search_path_list: List[str], dir_type: str = "config") -> Optional[str]: # version dir name must match: <digit(s)>.<digit(s)> version_regex = re.compile(r"^[0-9]+\.[0-9]+$") check_dir_type_func_dict = { "data": Resources._isNonVersionedDataDir, "config": Resources._isNonVersionedConfigDir } check_dir_type_func = check_dir_type_func_dict[dir_type] # CURA-6744 # If the application version matches "<major>.<minor>", create a Version object for it for comparison, so we # can find the directory with the highest version that's below the application version. # An application version that doesn't match "<major>.<minor>", e.g. "master", probably indicates a temporary # version, and in this case, this temporary version is treated as the latest version. It will ONLY upgrade from # a highest "<major>.<minor>" version if it's present. # For app version, there can be extra version strings at the end. For comparison, we only want the # "<major>.<minor>.<patch>" part. Here we use a regex to find that part in the app version string. semantic_version_regex = re.compile(r"(^[0-9]+\.([0-9]+)*).*$") app_version = None # type: Optional[Version] app_version_str = cls.ApplicationVersion if app_version_str is not None: result = semantic_version_regex.match(app_version_str) if result is not None: app_version_str = result.group(0) app_version = Version(app_version_str) latest_config_path = None # type: Optional[str] for search_path in search_path_list: if not os.path.exists(search_path): continue # Give priority to a folder with files with version number in it try: storage_dir_name_list = next(os.walk(search_path))[1] except StopIteration: # There is no next(). continue match_dir_name_list = [ n for n in storage_dir_name_list if version_regex.match(n) is not None ] match_dir_version_list = [ { "dir_name": n, "version": Version(n) } for n in match_dir_name_list ] # type: List[Dict[str, Union[str, Version]]] match_dir_version_list = sorted(match_dir_version_list, key=lambda x: x["version"], reverse=True) if app_version is not None: match_dir_version_list = list(x for x in match_dir_version_list if x["version"] < app_version) if len(match_dir_version_list) > 0: if isinstance(match_dir_version_list[0]["dir_name"], str): latest_config_path = os.path.join( search_path, match_dir_version_list[0]["dir_name"]) # type: ignore if latest_config_path is not None: break # If not, check if there is a non versioned data dir if check_dir_type_func(search_path): latest_config_path = search_path break return latest_config_path
def loadPlugin(self, plugin_id: str) -> None: # If plugin has already been loaded, do not load it again: if plugin_id in self._plugins: Logger.log("w", "Plugin %s was already loaded", plugin_id) return # If the plugin is in the list of disabled plugins, alert and return: if plugin_id in self._disabled_plugins: Logger.log("d", "Plugin %s was disabled", plugin_id) return # Find the actual plugin on drive: plugin = self._findPlugin(plugin_id) # If not found, raise error: if not plugin: raise PluginNotFoundError(plugin_id) # If found, but isn't in the metadata dictionary, add it: if plugin_id not in self._metadata: try: self._populateMetaData(plugin_id) except InvalidMetaDataError: return #If API version is incompatible, don't load it. if self._metadata[plugin_id].get("plugin", {}).get( "api", 0) != self.APIVersion: Logger.log("w", "Plugin %s uses an incompatible API version, ignoring", plugin_id) del self._metadata[plugin_id] return #HACK: For OctoPrint plug-in version 3.2.2, it broke the start-up sequence when auto-connecting. #Remove this hack once we've increased the API version number to something higher than 4. version = self._metadata[plugin_id].get("plugin", {}).get("version", "0.0.0") if plugin_id == "OctoPrintPlugin" and Version(version) < Version( "3.3.0"): Logger.log( "e", "Plugin OctoPrintPlugin version {version} was disabled because it was using an old API for network connection." .format(version=version)) return try: to_register = plugin.register( self._application ) #type: ignore #We catch AttributeError on this in case register() doesn't exist. if not to_register: Logger.log("e", "Plugin %s did not return any objects to register", plugin_id) return for plugin_type, plugin_object in to_register.items(): if type(plugin_object) == list: for nested_plugin_object in plugin_object: nested_plugin_object.setVersion( self._metadata[plugin_id].get("plugin", {}).get("version")) self._addPluginObject(nested_plugin_object, plugin_id, plugin_type) else: plugin_object.setVersion(self._metadata[plugin_id].get( "plugin", {}).get("version")) self._addPluginObject(plugin_object, plugin_id, plugin_type) self._plugins[plugin_id] = plugin self.enablePlugin(plugin_id) Logger.log("i", "Loaded plugin %s", plugin_id) except KeyError as e: Logger.log("e", "Error loading plugin %s:", plugin_id) Logger.log("e", "Unknown plugin type: %s", str(e)) except Exception as e: Logger.logException("e", "Error loading plugin %s:", plugin_id)
def ratePackage(self, package_id: str, rating: int) -> None: url = "{base_url}/packages/{package_id}/ratings".format(base_url = self._api_url, package_id = package_id) data = "{\"data\": {\"cura_version\": \"%s\", \"rating\": %i}}" % (Version(self._application.getVersion()), rating) self._application.getHttpRequestManager().put(url, headers_dict = self._request_headers, data = data.encode())
def run(self): if not self._url: Logger.log("e", "Can not check for a new release. URL not set!") no_new_version = True application_name = Application.getInstance().getApplicationName() Logger.log("i", "Checking for new version of %s" % application_name) try: headers = {"User-Agent": "%s - %s" % (application_name, Application.getInstance().getVersion())} # CURA-6698 Create an SSL context and use certifi CA certificates for verification. context = ssl.SSLContext(protocol = ssl.PROTOCOL_TLSv1_2) context.load_verify_locations(cafile = certifi.where()) request = urllib.request.Request(self._url, headers = headers) latest_version_file = urllib.request.urlopen(request, context = context) except Exception as e: Logger.logException("w", "Failed to check for new version: %s" % e) if not self.silent: Message(i18n_catalog.i18nc("@info", "Could not access update information."), title = i18n_catalog.i18nc("@info:title", "Version Upgrade") ).show() return try: reader = codecs.getreader("utf-8") data = json.load(reader(latest_version_file)) try: if Application.getInstance().getVersion() != "master": local_version = Version(Application.getInstance().getVersion()) else: if not self.silent: Message(i18n_catalog.i18nc("@info", "The version you are using does not support checking for updates."), title = i18n_catalog.i18nc("@info:title", "Warning")).show() return except ValueError: Logger.log("w", "Could not determine application version from string %s, not checking for updates", Application.getInstance().getVersion()) if not self.silent: Message(i18n_catalog.i18nc("@info", "The version you are using does not support checking for updates."), title = i18n_catalog.i18nc("@info:title", "Version Upgrade")).show() return if application_name in data: for key, value in data[application_name].items(): if "major" in value and "minor" in value and "revision" in value and "url" in value: os = key if platform.system() == os: #TODO: add architecture check newest_version = Version([int(value["major"]), int(value["minor"]), int(value["revision"])]) if local_version < newest_version: preferences = Application.getInstance().getPreferences() latest_version_shown = preferences.getValue("info/latest_update_version_shown") if latest_version_shown == newest_version and not self.display_same_version: continue # Don't show this update again. The user already clicked it away and doesn't want it again. preferences.setValue("info/latest_update_version_shown", str(newest_version)) Logger.log("i", "Found a new version of the software. Spawning message") self.showUpdate(newest_version, value["url"]) no_new_version = False break else: Logger.log("w", "Could not find version information or download url for update.") else: Logger.log("w", "Did not find any version information for %s." % application_name) except Exception: Logger.logException("e", "Exception in update checker while parsing the JSON file.") Message(i18n_catalog.i18nc("@info", "An error occurred while checking for updates."), title = i18n_catalog.i18nc("@info:title", "Error")).show() no_new_version = False # Just to suppress the message below. if no_new_version and not self.silent: Message(i18n_catalog.i18nc("@info", "No new version was found."), title = i18n_catalog.i18nc("@info:title", "Version Upgrade")).show()
def getSelectFaceSupported() -> bool: # Use a dummy postfix, since an equal version with a postfix is considered smaller normally. return Version(OpenGL.getInstance().getOpenGLVersion()) >= Version( "4.1 dummy-postfix")
class CloudOutputDevice(UltimakerNetworkedPrinterOutputDevice): # The interval with which the remote cluster is checked. # We can do this relatively often as this API call is quite fast. CHECK_CLUSTER_INTERVAL = 10.0 # seconds # Override the network response timeout in seconds after which we consider the device offline. # For cloud this needs to be higher because the interval at which we check the status is higher as well. NETWORK_RESPONSE_CONSIDER_OFFLINE = 15.0 # seconds # The minimum version of firmware that support print job actions over cloud. PRINT_JOB_ACTIONS_MIN_VERSION = Version("5.3.0") # Notify can only use signals that are defined by the class that they are in, not inherited ones. # Therefore we create a private signal used to trigger the printersChanged signal. _cloudClusterPrintersChanged = pyqtSignal() ## Creates a new cloud output device # \param api_client: The client that will run the API calls # \param cluster: The device response received from the cloud API. # \param parent: The optional parent of this output device. def __init__(self, api_client: CloudApiClient, cluster: CloudClusterResponse, parent: QObject = None) -> None: # The following properties are expected on each networked output device. # Because the cloud connection does not off all of these, we manually construct this version here. # An example of why this is needed is the selection of the compatible file type when exporting the tool path. properties = { b"address": cluster.host_internal_ip.encode() if cluster.host_internal_ip else b"", b"name": cluster.friendly_name.encode() if cluster.friendly_name else b"", b"firmware_version": cluster.host_version.encode() if cluster.host_version else b"", b"printer_type": cluster.printer_type.encode() if cluster.printer_type else b"", b"cluster_size": b"1" # cloud devices are always clusters of at least one } super().__init__( device_id=cluster.cluster_id, address="", connection_type=ConnectionType.CloudConnection, properties=properties, parent=parent ) self._api = api_client self._account = api_client.account self._cluster = cluster self.setAuthenticationState(AuthState.NotAuthenticated) self._setInterfaceElements() # Trigger the printersChanged signal when the private signal is triggered. self.printersChanged.connect(self._cloudClusterPrintersChanged) # Keep server string of the last generated time to avoid updating models more than once for the same response self._received_printers = None # type: Optional[List[ClusterPrinterStatus]] self._received_print_jobs = None # type: Optional[List[ClusterPrintJobStatus]] # Reference to the uploaded print job / mesh # We do this to prevent re-uploading the same file multiple times. self._tool_path = None # type: Optional[bytes] self._uploaded_print_job = None # type: Optional[CloudPrintJobResponse] ## Connects this device. def connect(self) -> None: if self.isConnected(): return super().connect() Logger.log("i", "Connected to cluster %s", self.key) CuraApplication.getInstance().getBackend().backendStateChange.connect(self._onBackendStateChange) self._update() ## Disconnects the device def disconnect(self) -> None: if not self.isConnected(): return super().disconnect() Logger.log("i", "Disconnected from cluster %s", self.key) CuraApplication.getInstance().getBackend().backendStateChange.disconnect(self._onBackendStateChange) ## Resets the print job that was uploaded to force a new upload, runs whenever the user re-slices. def _onBackendStateChange(self, _: BackendState) -> None: self._tool_path = None self._uploaded_print_job = None ## Checks whether the given network key is found in the cloud's host name def matchesNetworkKey(self, network_key: str) -> bool: # Typically, a network key looks like "ultimakersystem-aabbccdd0011._ultimaker._tcp.local." # the host name should then be "ultimakersystem-aabbccdd0011" if network_key.startswith(self.clusterData.host_name): return True # However, for manually added printers, the local IP address is used in lieu of a proper # network key, so check for that as well if self.clusterData.host_internal_ip is not None and network_key in self.clusterData.host_internal_ip: return True return False ## Set all the interface elements and texts for this output device. def _setInterfaceElements(self) -> None: self.setPriority(2) # Make sure we end up below the local networking and above 'save to file'. self.setShortDescription(I18N_CATALOG.i18nc("@action:button", "Print via Cloud")) self.setDescription(I18N_CATALOG.i18nc("@properties:tooltip", "Print via Cloud")) self.setConnectionText(I18N_CATALOG.i18nc("@info:status", "Connected via Cloud")) ## Called when the network data should be updated. def _update(self) -> None: super()._update() if time() - self._time_of_last_request < self.CHECK_CLUSTER_INTERVAL: return # avoid calling the cloud too often self._time_of_last_request = time() if self._account.isLoggedIn: self.setAuthenticationState(AuthState.Authenticated) self._last_request_time = time() self._api.getClusterStatus(self.key, self._onStatusCallFinished) else: self.setAuthenticationState(AuthState.NotAuthenticated) ## Method called when HTTP request to status endpoint is finished. # Contains both printers and print jobs statuses in a single response. def _onStatusCallFinished(self, status: CloudClusterStatus) -> None: self._responseReceived() if status.printers != self._received_printers: self._received_printers = status.printers self._updatePrinters(status.printers) if status.print_jobs != self._received_print_jobs: self._received_print_jobs = status.print_jobs self._updatePrintJobs(status.print_jobs) ## Called when Cura requests an output device to receive a (G-code) file. def requestWrite(self, nodes: List[SceneNode], file_name: Optional[str] = None, limit_mimetypes: bool = False, file_handler: Optional[FileHandler] = None, filter_by_machine: bool = False, **kwargs) -> None: # Show an error message if we're already sending a job. if self._progress.visible: PrintJobUploadBlockedMessage().show() return # Indicate we have started sending a job. self.writeStarted.emit(self) # The mesh didn't change, let's not upload it to the cloud again. # Note that self.writeFinished is called in _onPrintUploadCompleted as well. if self._uploaded_print_job: self._api.requestPrint(self.key, self._uploaded_print_job.job_id, self._onPrintUploadCompleted) return # Export the scene to the correct file type. job = ExportFileJob(file_handler=file_handler, nodes=nodes, firmware_version=self.firmwareVersion) job.finished.connect(self._onPrintJobCreated) job.start() ## Handler for when the print job was created locally. # It can now be sent over the cloud. def _onPrintJobCreated(self, job: ExportFileJob) -> None: output = job.getOutput() self._tool_path = output # store the tool path to prevent re-uploading when printing the same file again request = CloudPrintJobUploadRequest( job_name=job.getFileName(), file_size=len(output), content_type=job.getMimeType(), ) self._api.requestUpload(request, self._uploadPrintJob) ## Uploads the mesh when the print job was registered with the cloud API. # \param job_response: The response received from the cloud API. def _uploadPrintJob(self, job_response: CloudPrintJobResponse) -> None: if not self._tool_path: return self._onUploadError() self._progress.show() self._uploaded_print_job = job_response # store the last uploaded job to prevent re-upload of the same file self._api.uploadToolPath(job_response, self._tool_path, self._onPrintJobUploaded, self._progress.update, self._onUploadError) ## Requests the print to be sent to the printer when we finished uploading the mesh. def _onPrintJobUploaded(self) -> None: self._progress.update(100) print_job = cast(CloudPrintJobResponse, self._uploaded_print_job) self._api.requestPrint(self.key, print_job.job_id, self._onPrintUploadCompleted) ## Shows a message when the upload has succeeded # \param response: The response from the cloud API. def _onPrintUploadCompleted(self, response: CloudPrintResponse) -> None: self._progress.hide() PrintJobUploadSuccessMessage().show() self.writeFinished.emit() ## Displays the given message if uploading the mesh has failed # \param message: The message to display. def _onUploadError(self, message: str = None) -> None: self._progress.hide() self._uploaded_print_job = None PrintJobUploadErrorMessage(message).show() self.writeError.emit() ## Whether the printer that this output device represents supports print job actions via the cloud. @pyqtProperty(bool, notify=_cloudClusterPrintersChanged) def supportsPrintJobActions(self) -> bool: if not self._printers: return False version_number = self.printers[0].firmwareVersion.split(".") firmware_version = Version([version_number[0], version_number[1], version_number[2]]) return firmware_version >= self.PRINT_JOB_ACTIONS_MIN_VERSION ## Set the remote print job state. def setJobState(self, print_job_uuid: str, state: str) -> None: self._api.doPrintJobAction(self._cluster.cluster_id, print_job_uuid, state) @pyqtSlot(str, name="sendJobToTop") def sendJobToTop(self, print_job_uuid: str) -> None: self._api.doPrintJobAction(self._cluster.cluster_id, print_job_uuid, "move", {"list": "queued", "to_position": 0}) @pyqtSlot(str, name="deleteJobFromQueue") def deleteJobFromQueue(self, print_job_uuid: str) -> None: self._api.doPrintJobAction(self._cluster.cluster_id, print_job_uuid, "remove") @pyqtSlot(str, name="forceSendJob") def forceSendJob(self, print_job_uuid: str) -> None: self._api.doPrintJobAction(self._cluster.cluster_id, print_job_uuid, "force") @pyqtSlot(name="openPrintJobControlPanel") def openPrintJobControlPanel(self) -> None: QDesktopServices.openUrl(QUrl(self.clusterCloudUrl)) @pyqtSlot(name="openPrinterControlPanel") def openPrinterControlPanel(self) -> None: QDesktopServices.openUrl(QUrl(self.clusterCloudUrl)) ## Gets the cluster response from which this device was created. @property def clusterData(self) -> CloudClusterResponse: return self._cluster ## Updates the cluster data from the cloud. @clusterData.setter def clusterData(self, value: CloudClusterResponse) -> None: self._cluster = value ## Gets the URL on which to monitor the cluster via the cloud. @property def clusterCloudUrl(self) -> str: root_url_prefix = "-staging" if self._account.is_staging else "" return "https://mycloud{}.ultimaker.com/app/jobs/{}".format(root_url_prefix, self.clusterData.cluster_id)
def installPlugin(self, plugin_path: str): plugin_path = QUrl(plugin_path).toLocalFile() Logger.log("d", "Attempting to install a new plugin %s", plugin_path) local_plugin_path = os.path.join( Resources.getStoragePath(Resources.Resources), "plugins") plugin_folder = "" try: with zipfile.ZipFile(plugin_path, "r") as zip_ref: plugin_id = None for file in zip_ref.infolist(): if file.filename.endswith("/"): plugin_id = file.filename.strip("/") break if plugin_id is None: return { "status": "error", "message": i18n_catalog.i18nc( "@info:status", "Failed to install plugin from <filename>{0}</filename>:\n<message>{1}</message>", plugin_path, "Invalid plugin file") } plugin_folder = os.path.join(local_plugin_path, plugin_id) if os.path.isdir( plugin_folder ): # Plugin is already installed by user (so not a bundled plugin) metadata = {} with zip_ref.open(plugin_id + "/plugin.json") as metadata_file: metadata = json.loads( metadata_file.read().decode("utf-8")) if "version" in metadata: new_version = Version(metadata["version"]) old_version = Version( self.getMetaData(plugin_id)["plugin"]["version"]) if new_version > old_version: zip_ref.extractall(plugin_folder) return { "status": "ok", "message": i18n_catalog.i18nc( "@info:status", "The plugin has been installed.\n Please re-start the application to activate the plugin." ) } Logger.log( "w", "The plugin was already installed. Unable to install it again!" ) return { "status": "duplicate", "message": i18n_catalog.i18nc( "@info:status", "Failed to install the plugin; \n<message>{0}</message>", "Plugin was already installed") } elif plugin_id in self._plugins: # Plugin is already installed, but not by the user (eg; this is a bundled plugin) # TODO: Right now we don't support upgrading bundled plugins at all, but we might do so in the future. return { "status": "duplicate", "message": i18n_catalog.i18nc( "@info:status", "Failed to install the plugin; \n<message>{0}</message>", "Unable to upgrade or instal bundled plugins.") } zip_ref.extractall(plugin_folder) except: # Installing a new plugin should never crash the application. Logger.logException( "d", "An exception occurred while installing plugin ") return { "status": "error", "message": i18n_catalog.i18nc( "@info:status", "Failed to install plugin from <filename>{0}</filename>:\n<message>{1}</message>", plugin_folder, "Invalid plugin file") } return { "status": "ok", "message": i18n_catalog.i18nc( "@info:status", "The plugin has been installed.\n Please re-start the application to activate the plugin." ) }
def _onPutBedTemperatureCompleted(self, reply): if Version(self._preheat_printer.firmwareVersion) < Version("3.5.92"): # If it was handling a preheat, it isn't anymore. self._preheat_request_in_progress = False
def __init__(self, name: str, version: str, api_version: str, app_display_name: str = "", build_type: str = "", is_debug_mode: bool = False, **kwargs) -> None: """Init method :param name: :type{string} The name of the application. :param version: :type{string} Version, formatted as major.minor.rev :param build_type: Additional version info on the type of build this is, such as "master". :param is_debug_mode: Whether to run in debug mode. """ if Application.__instance is not None: raise RuntimeError("Try to create singleton '%s' more than once" % self.__class__.__name__) super().__init__() # Call super to make multiple inheritance work. Application.__instance = self self._api_version = Version(api_version) # type: Version self._app_name = name # type: str self._app_display_name = app_display_name if app_display_name else name # type: str self._version = version # type: str self._build_type = build_type # type: str self._is_debug_mode = is_debug_mode # type: bool self._is_headless = False # type: bool self._use_external_backend = False # type: bool self._just_updated_from_old_version = False # type: bool self._config_lock_filename = "{name}.lock".format( name=self._app_name) # type: str self._cli_args = None # type: argparse.Namespace self._cli_parser = argparse.ArgumentParser( prog=self._app_name, add_help=False) # type: argparse.ArgumentParser self._main_thread = threading.current_thread( ) # type: threading.Thread self.default_theme = self._app_name # type: str # Default theme is the application name self._default_language = "en_US" # type: str self.change_log_url: str = "https://github.com/Ultimaker/Uranium" # Where to find a more detailed description of the recent updates. self.beta_change_log_url: str = "https://github.com/Ultimaker/Uranium" # Where to find a more detailed description of proposed updates. self._preferences_filename = None # type: str self._preferences = None # type: Preferences self._extensions = [] # type: List[Extension] self._file_providers = [] # type: List[FileProvider] self._required_plugins = [] # type: List[str] self._package_manager_class = PackageManager # type: type self._package_manager = None # type: PackageManager self._plugin_registry = None # type: PluginRegistry self._container_registry_class = ContainerRegistry # type: type self._container_registry = None # type: ContainerRegistry self._global_container_stack = None # type: Optional[ContainerStack] self._file_provider_model = FileProviderModel( application=self) # type: Optional[FileProviderModel] self._controller = None # type: Controller self._backend = None # type: Backend self._output_device_manager = None # type: OutputDeviceManager self._operation_stack = None # type: OperationStack self._visible_messages = [] # type: List[Message] self._message_lock = threading.Lock() # type: threading.Lock self._app_install_dir = self.getInstallPrefix() # type: str # Intended for keeping plugin workspace metadata that is going to be saved in and retrieved from workspace files. # When the workspace is stored, all workspace readers will need to ensure that the workspace metadata is correctly # stored to the output file. The same also holds when loading a workspace; the existing data will be cleared # and replaced with the data recovered from the file (if any). self._workspace_metadata_storage = WorkspaceMetadataStorage( ) # type: WorkspaceMetadataStorage # Intended for keeping plugin workspace information that is only temporary. The information added in this structure # is NOT saved to and retrieved from workspace files. self._current_workspace_information = WorkspaceMetadataStorage( ) # type: WorkspaceMetadataStorage
def __init__(self): super().__init__() self._zero_conf = None self._zero_conf_browser = None self._application = CuraApplication.getInstance() # Create a cloud output device manager that abstracts all cloud connection logic away. self._cloud_output_device_manager = CloudOutputDeviceManager() # Because the model needs to be created in the same thread as the QMLEngine, we use a signal. self.addDeviceSignal.connect(self._onAddDevice) self.removeDeviceSignal.connect(self._onRemoveDevice) self._application.globalContainerStackChanged.connect( self.reCheckConnections) self._discovered_devices = {} self._network_manager = QNetworkAccessManager() self._network_manager.finished.connect(self._onNetworkRequestFinished) self._min_cluster_version = Version("4.0.0") self._min_cloud_version = Version("5.2.0") self._api_version = "1" self._api_prefix = "/api/v" + self._api_version + "/" self._cluster_api_version = "1" self._cluster_api_prefix = "/cluster-api/v" + self._cluster_api_version + "/" # Get list of manual instances from preferences self._preferences = CuraApplication.getInstance().getPreferences() self._preferences.addPreference( "um3networkprinting/manual_instances", "") # A comma-separated list of ip adresses or hostnames self._manual_instances = self._preferences.getValue( "um3networkprinting/manual_instances").split(",") # Store the last manual entry key self._last_manual_entry_key = "" # type: str # The zero-conf service changed requests are handled in a separate thread, so we can re-schedule the requests # which fail to get detailed service info. # Any new or re-scheduled requests will be appended to the request queue, and the handling thread will pick # them up and process them. self._service_changed_request_queue = Queue() self._service_changed_request_event = Event() self._service_changed_request_thread = Thread( target=self._handleOnServiceChangedRequests, daemon=True) self._service_changed_request_thread.start() self._account = self._application.getCuraAPI().account # Check if cloud flow is possible when user logs in self._account.loginStateChanged.connect(self.checkCloudFlowIsPossible) # Check if cloud flow is possible when user switches machines self._application.globalContainerStackChanged.connect( self._onMachineSwitched) # Listen for when cloud flow is possible self.cloudFlowIsPossible.connect(self._onCloudFlowPossible) # Listen if cloud cluster was added self._cloud_output_device_manager.addedCloudCluster.connect( self._onCloudPrintingConfigured) # Listen if cloud cluster was removed self._cloud_output_device_manager.removedCloudCluster.connect( self.checkCloudFlowIsPossible) self._start_cloud_flow_message = None # type: Optional[Message] self._cloud_flow_complete_message = None # type: Optional[Message]
class LocalClusterOutputDeviceManager: META_NETWORK_KEY = "um_network_key" MANUAL_DEVICES_PREFERENCE_KEY = "um3networkprinting/manual_instances" MIN_SUPPORTED_CLUSTER_VERSION = Version("4.0.0") # The translation catalog for this device. I18N_CATALOG = i18nCatalog("cura") # Signal emitted when the list of discovered devices changed. discoveredDevicesChanged = Signal() def __init__(self) -> None: # Persistent dict containing the networked clusters. self._discovered_devices = { } # type: Dict[str, LocalClusterOutputDevice] self._output_device_manager = CuraApplication.getInstance( ).getOutputDeviceManager() # Hook up ZeroConf client. self._zero_conf_client = ZeroConfClient() self._zero_conf_client.addedNetworkCluster.connect( self._onDeviceDiscovered) self._zero_conf_client.removedNetworkCluster.connect( self._onDiscoveredDeviceRemoved) ## Start the network discovery. def start(self) -> None: self._zero_conf_client.start() for address in self._getStoredManualAddresses(): self.addManualDevice(address) ## Stop network discovery and clean up discovered devices. def stop(self) -> None: self._zero_conf_client.stop() for instance_name in list(self._discovered_devices): self._onDiscoveredDeviceRemoved(instance_name) ## Restart discovery on the local network. def startDiscovery(self): self.stop() self.start() ## Add a networked printer manually by address. def addManualDevice( self, address: str, callback: Optional[Callable[[bool, str], None]] = None) -> None: api_client = ClusterApiClient( address, lambda error: Logger.log("e", str(error))) api_client.getSystem(lambda status: self._onCheckManualDeviceResponse( address, status, callback)) ## Remove a manually added networked printer. def removeManualDevice(self, device_id: str, address: Optional[str] = None) -> None: if device_id not in self._discovered_devices and address is not None: device_id = "manual:{}".format(address) if device_id in self._discovered_devices: address = address or self._discovered_devices[device_id].ipAddress self._onDiscoveredDeviceRemoved(device_id) if address in self._getStoredManualAddresses(): self._removeStoredManualAddress(address) ## Force reset all network device connections. def refreshConnections(self) -> None: self._connectToActiveMachine() ## Get the discovered devices. def getDiscoveredDevices(self) -> Dict[str, LocalClusterOutputDevice]: return self._discovered_devices ## Connect the active machine to a given device. def associateActiveMachineWithPrinterDevice( self, device: LocalClusterOutputDevice) -> None: active_machine = CuraApplication.getInstance().getGlobalContainerStack( ) if not active_machine: return self._connectToOutputDevice(device, active_machine) self._connectToActiveMachine() # Pre-select the correct machine type of the group host. # We first need to find the correct definition because the machine manager only takes name as input, not ID. definitions = CuraApplication.getInstance().getContainerRegistry( ).findContainers(id=device.printerType) if not definitions: return CuraApplication.getInstance().getMachineManager().switchPrinterType( definitions[0].getName()) ## Callback for when the active machine was changed by the user or a new remote cluster was found. def _connectToActiveMachine(self) -> None: active_machine = CuraApplication.getInstance().getGlobalContainerStack( ) if not active_machine: return output_device_manager = CuraApplication.getInstance( ).getOutputDeviceManager() stored_device_id = active_machine.getMetaDataEntry( self.META_NETWORK_KEY) for device in self._discovered_devices.values(): if device.key == stored_device_id: # Connect to it if the stored key matches. self._connectToOutputDevice(device, active_machine) elif device.key in output_device_manager.getOutputDeviceIds(): # Remove device if it is not meant for the active machine. output_device_manager.removeOutputDevice(device.key) ## Callback for when a manual device check request was responded to. def _onCheckManualDeviceResponse( self, address: str, status: PrinterSystemStatus, callback: Optional[Callable[[bool, str], None]] = None) -> None: self._onDeviceDiscovered( "manual:{}".format(address), address, { b"name": status.name.encode("utf-8"), b"address": address.encode("utf-8"), b"machine": str(status.hardware.get("typeid", "")).encode("utf-8"), b"manual": b"true", b"firmware_version": status.firmware.encode("utf-8"), b"cluster_size": b"1" }) self._storeManualAddress(address) if callback is not None: CuraApplication.getInstance().callLater(callback, True, address) ## Returns a dict of printer BOM numbers to machine types. # These numbers are available in the machine definition already so we just search for them here. @staticmethod def _getPrinterTypeIdentifiers() -> Dict[str, str]: container_registry = CuraApplication.getInstance( ).getContainerRegistry() ultimaker_machines = container_registry.findContainersMetadata( type="machine", manufacturer="Ultimaker B.V.") found_machine_type_identifiers = {} # type: Dict[str, str] for machine in ultimaker_machines: machine_type = machine.get("id", None) machine_bom_numbers = machine.get("bom_numbers", []) if machine_type and machine_bom_numbers: for bom_number in machine_bom_numbers: # This produces a n:1 mapping of bom numbers to machine types # allowing the S5R1 and S5R2 hardware to use a single S5 definition. found_machine_type_identifiers[str( bom_number)] = machine_type return found_machine_type_identifiers ## Add a new device. def _onDeviceDiscovered(self, key: str, address: str, properties: Dict[bytes, bytes]) -> None: machine_identifier = properties.get(b"machine", b"").decode("utf-8") printer_type_identifiers = self._getPrinterTypeIdentifiers() # Detect the machine type based on the BOM number that is sent over the network. properties[b"printer_type"] = b"Unknown" for bom, p_type in printer_type_identifiers.items(): if machine_identifier.startswith(bom): properties[b"printer_type"] = bytes(p_type, encoding="utf8") break device = LocalClusterOutputDevice(key, address, properties) discovered_printers_model = CuraApplication.getInstance( ).getDiscoveredPrintersModel() if address in list( discovered_printers_model.discoveredPrintersByAddress.keys()): # The printer was already added, we just update the available data. discovered_printers_model.updateDiscoveredPrinter( ip_address=address, name=device.getName(), machine_type=device.printerType) else: # The printer was not added yet so let's do that. discovered_printers_model.addDiscoveredPrinter( ip_address=address, key=device.getId(), name=device.getName(), create_callback=self._createMachineFromDiscoveredDevice, machine_type=device.printerType, device=device) self._discovered_devices[device.getId()] = device self.discoveredDevicesChanged.emit() self._connectToActiveMachine() ## Remove a device. def _onDiscoveredDeviceRemoved(self, device_id: str) -> None: device = self._discovered_devices.pop( device_id, None) # type: Optional[LocalClusterOutputDevice] if not device: return device.close() CuraApplication.getInstance().getDiscoveredPrintersModel( ).removeDiscoveredPrinter(device.address) self.discoveredDevicesChanged.emit() ## Create a machine instance based on the discovered network printer. def _createMachineFromDiscoveredDevice(self, device_id: str) -> None: device = self._discovered_devices.get(device_id) if device is None: return # Create a new machine and activate it. # We do not use use MachineManager.addMachine here because we need to set the network key before activating it. # If we do not do this the auto-pairing with the cloud-equivalent device will not work. new_machine = CuraStackBuilder.createMachine(device.name, device.printerType) if not new_machine: Logger.log("e", "Failed creating a new machine") return new_machine.setMetaDataEntry(self.META_NETWORK_KEY, device.key) CuraApplication.getInstance().getMachineManager().setActiveMachine( new_machine.getId()) self._connectToOutputDevice(device, new_machine) self._showCloudFlowMessage(device) ## Add an address to the stored preferences. def _storeManualAddress(self, address: str) -> None: stored_addresses = self._getStoredManualAddresses() if address in stored_addresses: return # Prevent duplicates. stored_addresses.append(address) new_value = ",".join(stored_addresses) CuraApplication.getInstance().getPreferences().setValue( self.MANUAL_DEVICES_PREFERENCE_KEY, new_value) ## Remove an address from the stored preferences. def _removeStoredManualAddress(self, address: str) -> None: stored_addresses = self._getStoredManualAddresses() try: stored_addresses.remove(address) # Can throw a ValueError new_value = ",".join(stored_addresses) CuraApplication.getInstance().getPreferences().setValue( self.MANUAL_DEVICES_PREFERENCE_KEY, new_value) except ValueError: Logger.log( "w", "Could not remove address from stored_addresses, it was not there" ) ## Load the user-configured manual devices from Cura preferences. def _getStoredManualAddresses(self) -> List[str]: preferences = CuraApplication.getInstance().getPreferences() preferences.addPreference(self.MANUAL_DEVICES_PREFERENCE_KEY, "") manual_instances = preferences.getValue( self.MANUAL_DEVICES_PREFERENCE_KEY).split(",") return manual_instances ## Add a device to the current active machine. def _connectToOutputDevice(self, device: UltimakerNetworkedPrinterOutputDevice, machine: GlobalStack) -> None: # Make sure users know that we no longer support legacy devices. if Version( device.firmwareVersion) < self.MIN_SUPPORTED_CLUSTER_VERSION: LegacyDeviceNoLongerSupportedMessage().show() return machine.setName(device.name) machine.setMetaDataEntry(self.META_NETWORK_KEY, device.key) machine.setMetaDataEntry("group_name", device.name) machine.addConfiguredConnectionType(device.connectionType.value) if not device.isConnected(): device.connect() output_device_manager = CuraApplication.getInstance( ).getOutputDeviceManager() if device.key not in output_device_manager.getOutputDeviceIds(): output_device_manager.addOutputDevice(device) ## Nudge the user to start using Ultimaker Cloud. @staticmethod def _showCloudFlowMessage(device: LocalClusterOutputDevice) -> None: if CuraApplication.getInstance().getMachineManager( ).activeMachineIsUsingCloudConnection: # This printer is already cloud connected, so we do not bother the user anymore. return if not CuraApplication.getInstance().getCuraAPI().account.isLoggedIn: # Do not show the message if the user is not signed in. return CloudFlowMessage(device.ipAddress).show()
def run(self): if not self._url: Logger.log("e", "Can not check for a new release. URL not set!") no_new_version = True application_name = Application.getInstance().getApplicationName() Logger.log("i", "Checking for new version of %s" % application_name) try: headers = { "User-Agent": "%s - %s" % (application_name, Application.getInstance().getVersion()) } request = urllib.request.Request(self._url, headers=headers) latest_version_file = urllib.request.urlopen(request) except Exception as e: Logger.log("w", "Failed to check for new version: %s" % e) if not self.silent: Message(i18n_catalog.i18nc( "@info", "Could not access update information."), title=i18n_catalog.i18nc("@info:title", "Version Upgrade")).show() return try: reader = codecs.getreader("utf-8") data = json.load(reader(latest_version_file)) try: if Application.getInstance().getVersion() is not "master": local_version = Version( Application.getInstance().getVersion()) else: if not self.silent: Message(i18n_catalog.i18nc( "@info", "The version you are using does not support checking for updates." ), title=i18n_catalog.i18nc( "@info:title", "Warning")).show() return except ValueError: Logger.log( "w", "Could not determine application version from string %s, not checking for updates", Application.getInstance().getVersion()) if not self.silent: Message(i18n_catalog.i18nc( "@info", "The version you are using does not support checking for updates." ), title=i18n_catalog.i18nc( "@info:title", "Version Upgrade")).show() return latest_version_array = data["tag_name"].split("v")[1].split(".") latest_version = Version(list(map(int, latest_version_array))) if local_version < latest_version: Logger.log( "i", "Found a new version of the software. Spawning message") message = Message( i18n_catalog.i18nc("@info", "A new version is available!"), title=i18n_catalog.i18nc("@info:title", "Version Upgrade")) message.addAction( "download", i18n_catalog.i18nc("@action:button", "Download"), "[no_icon]", "[no_description]") browser_download_url = "" if self._set_download_url_callback: for asset in data["assets"]: os = "Windows" if ".exe" in asset[ "name"] else "Darwin" if ".dmg" in asset[ "name"] else "Linux" if os == platform.system(): browser_download_url = asset[ "browser_download_url"] break self._set_download_url_callback(browser_download_url) message.actionTriggered.connect(self._callback) message.show() no_new_version = False except Exception: Logger.logException( "e", "Exception in update checker while parsing the JSON file.") Message(i18n_catalog.i18nc( "@info", "An error occurred while checking for updates."), title=i18n_catalog.i18nc("@info:title", "Error")).show() no_new_version = False # Just to suppress the message below. if no_new_version and not self.silent: Message(i18n_catalog.i18nc("@info", "No new version was found."), title=i18n_catalog.i18nc("@info:title", "Version Upgrade")).show()
class LocalClusterOutputDeviceManager: META_NETWORK_KEY = "um_network_key" MANUAL_DEVICES_PREFERENCE_KEY = "um3networkprinting/manual_instances" MIN_SUPPORTED_CLUSTER_VERSION = Version("4.0.0") # The translation catalog for this device. I18N_CATALOG = i18nCatalog("cura") # Signal emitted when the list of discovered devices changed. discoveredDevicesChanged = Signal() def __init__(self) -> None: # Persistent dict containing the networked clusters. self._discovered_devices = { } # type: Dict[str, LocalClusterOutputDevice] self._output_device_manager = CuraApplication.getInstance( ).getOutputDeviceManager() # Hook up ZeroConf client. self._zero_conf_client = ZeroConfClient() self._zero_conf_client.addedNetworkCluster.connect( self._onDeviceDiscovered) self._zero_conf_client.removedNetworkCluster.connect( self._onDiscoveredDeviceRemoved) # Persistent dict containing manually connected clusters. self._manual_instances = {} # type: Dict[str, Optional[Callable]] ## Start the network discovery. def start(self) -> None: self._zero_conf_client.start() # Load all manual devices. self._manual_instances = self._getStoredManualInstances() for address in self._manual_instances: self.addManualDevice(address) ## Stop network discovery and clean up discovered devices. def stop(self) -> None: self._zero_conf_client.stop() # Cleanup all manual devices. for instance_name in list(self._discovered_devices): self._onDiscoveredDeviceRemoved(instance_name) ## Add a networked printer manually by address. def addManualDevice( self, address: str, callback: Optional[Callable[[bool, str], None]] = None) -> None: self._manual_instances[address] = callback new_manual_devices = ",".join(self._manual_instances.keys()) CuraApplication.getInstance().getPreferences().setValue( self.MANUAL_DEVICES_PREFERENCE_KEY, new_manual_devices) api_client = ClusterApiClient(address, lambda error: print(error)) api_client.getSystem( lambda status: self._onCheckManualDeviceResponse(address, status)) ## Remove a manually added networked printer. def removeManualDevice(self, device_id: str, address: Optional[str] = None) -> None: if device_id not in self._discovered_devices and address is not None: device_id = "manual:{}".format(address) if device_id in self._discovered_devices: address = address or self._discovered_devices[device_id].ipAddress self._onDiscoveredDeviceRemoved(device_id) if address in self._manual_instances: manual_instance_callback = self._manual_instances.pop(address) new_devices = ",".join(self._manual_instances.keys()) CuraApplication.getInstance().getPreferences().setValue( self.MANUAL_DEVICES_PREFERENCE_KEY, new_devices) if manual_instance_callback: CuraApplication.getInstance().callLater( manual_instance_callback, False, address) ## Force reset all network device connections. def refreshConnections(self): self._connectToActiveMachine() ## Callback for when the active machine was changed by the user or a new remote cluster was found. def _connectToActiveMachine(self): active_machine = CuraApplication.getInstance().getGlobalContainerStack( ) if not active_machine: return output_device_manager = CuraApplication.getInstance( ).getOutputDeviceManager() stored_device_id = active_machine.getMetaDataEntry( self.META_NETWORK_KEY) for device in self._discovered_devices.values(): if device.key == stored_device_id: # Connect to it if the stored key matches. self._connectToOutputDevice(device, active_machine) elif device.key in output_device_manager.getOutputDeviceIds(): # Remove device if it is not meant for the active machine. CuraApplication.getInstance().getOutputDeviceManager( ).removeOutputDevice(device.key) ## Callback for when a manual device check request was responded to. def _onCheckManualDeviceResponse(self, address: str, status: PrinterSystemStatus) -> None: callback = self._manual_instances.get(address, None) if callback is None: return self._onDeviceDiscovered( "manual:{}".format(address), address, { b"name": status.name.encode("utf-8"), b"address": address.encode("utf-8"), b"manual": b"true", b"incomplete": b"true", b"temporary": b"true" }) CuraApplication.getInstance().callLater(callback, True, address) ## Returns a dict of printer BOM numbers to machine types. # These numbers are available in the machine definition already so we just search for them here. @staticmethod def _getPrinterTypeIdentifiers() -> Dict[str, str]: container_registry = CuraApplication.getInstance( ).getContainerRegistry() ultimaker_machines = container_registry.findContainersMetadata( type="machine", manufacturer="Ultimaker B.V.") found_machine_type_identifiers = {} # type: Dict[str, str] for machine in ultimaker_machines: machine_bom_number = machine.get("firmware_update_info", {}).get("id", None) machine_type = machine.get("id", None) if machine_bom_number and machine_type: found_machine_type_identifiers[str( machine_bom_number)] = machine_type return found_machine_type_identifiers ## Add a new device. def _onDeviceDiscovered(self, key: str, address: str, properties: Dict[bytes, bytes]) -> None: cluster_size = int(properties.get(b"cluster_size", -1)) machine_identifier = properties.get(b"machine", b"").decode("utf-8") printer_type_identifiers = self._getPrinterTypeIdentifiers() # Detect the machine type based on the BOM number that is sent over the network. properties[b"printer_type"] = b"Unknown" for bom, p_type in printer_type_identifiers.items(): if machine_identifier.startswith(bom): properties[b"printer_type"] = bytes(p_type, encoding="utf8") break # We no longer support legacy devices, so check that here. if cluster_size == -1: return device = LocalClusterOutputDevice(key, address, properties) CuraApplication.getInstance().getDiscoveredPrintersModel( ).addDiscoveredPrinter( ip_address=address, key=device.getId(), name=device.getName(), create_callback=self._createMachineFromDiscoveredDevice, machine_type=device.printerType, device=device) self._discovered_devices[device.getId()] = device self.discoveredDevicesChanged.emit() self._connectToActiveMachine() ## Remove a device. def _onDiscoveredDeviceRemoved(self, device_id: str) -> None: device = self._discovered_devices.pop( device_id, None) # type: Optional[LocalClusterOutputDevice] if not device: return device.close() CuraApplication.getInstance().getDiscoveredPrintersModel( ).removeDiscoveredPrinter(device.address) self.discoveredDevicesChanged.emit() ## Create a machine instance based on the discovered network printer. def _createMachineFromDiscoveredDevice(self, device_id: str) -> None: device = self._discovered_devices.get(device_id) if device is None: return # The newly added machine is automatically activated. CuraApplication.getInstance().getMachineManager().addMachine( device.printerType, device.name) active_machine = CuraApplication.getInstance().getGlobalContainerStack( ) if not active_machine: return active_machine.setMetaDataEntry(self.META_NETWORK_KEY, device.key) active_machine.setMetaDataEntry("group_name", device.name) self._connectToOutputDevice(device, active_machine) CloudFlowMessage(device.ipAddress).show( ) # Nudge the user to start using Ultimaker Cloud. ## Load the user-configured manual devices from Cura preferences. def _getStoredManualInstances(self) -> Dict[str, Optional[Callable]]: preferences = CuraApplication.getInstance().getPreferences() preferences.addPreference(self.MANUAL_DEVICES_PREFERENCE_KEY, "") manual_instances = preferences.getValue( self.MANUAL_DEVICES_PREFERENCE_KEY).split(",") return {address: None for address in manual_instances} ## Add a device to the current active machine. @staticmethod def _connectToOutputDevice(device: PrinterOutputDevice, active_machine: GlobalStack) -> None: device.connect() active_machine.addConfiguredConnectionType(device.connectionType.value) CuraApplication.getInstance().getOutputDeviceManager().addOutputDevice( device)
def ratePackage(self, package_id: str, rating: int) -> None: url = "{base_url}/packages/{package_id}/ratings".format(base_url = CloudApiModel.api_url, package_id = package_id) data = "{\"data\": {\"cura_version\": \"%s\", \"rating\": %i}}" % (Version(self._application.getVersion()), rating) self._application.getHttpRequestManager().put(url, data = data.encode(), scope = self._json_scope)
def _loadChangeLogText(self) -> str: # Load change log texts and organize them with a dict try: file_path = Resources.getPath(Resources.Texts, "change_log.txt") except FileNotFoundError as e: # I have no idea how / when this happens, but we're getting crash reports about it. return catalog.i18nc( "@text:window", "The release notes could not be opened.") + "<br>" + str(e) change_logs_dict = {} # type: Dict[Version, Dict[str, List[str]]] try: with open(file_path, "r", encoding="utf-8") as f: open_version = None # type: Optional[Version] open_header = "" # Initialise to an empty header in case there is no "*" in the first line of the changelog for line in f: line = line.replace("\n", "") if "[" in line and "]" in line: line = line.replace("[", "") line = line.replace("]", "") open_version = Version(line) if open_version > Version( [14, 99, 99] ): # Bit of a hack: We released the 15.x.x versions before 2.x open_version = Version([ 0, open_version.getMinor(), open_version.getRevision(), open_version.getPostfixVersion() ]) open_header = "" change_logs_dict[ open_version] = collections.OrderedDict() elif line.startswith("*"): open_header = line.replace("*", "") change_logs_dict[cast(Version, open_version)][open_header] = [] elif line != "": if open_header not in change_logs_dict[cast( Version, open_version)]: change_logs_dict[cast( Version, open_version)][open_header] = [] change_logs_dict[cast( Version, open_version)][open_header].append(line) except EnvironmentError as e: return catalog.i18nc( "@text:window", "The release notes could not be opened.") + "<br>" + str(e) # Format changelog text content = "" for version in sorted(change_logs_dict.keys(), reverse=True): text_version = version if version < Version([ 1, 0, 0 ]): # Bit of a hack: We released the 15.x.x versions before 2.x text_version = Version([ 15, version.getMinor(), version.getRevision(), version.getPostfixVersion() ]) content += "<h1>" + str(text_version) + "</h1><br>" content += "" for change in change_logs_dict[version]: if str(change) != "": content += "<b>" + str(change) + "</b><br>" for line in change_logs_dict[version][change]: content += str(line) + "<br>" content += "<br>" return content
def test_addNewVersionThatCantUpdate(self): manager = PackageManager(MagicMock()) manager.checkIfPackageCanUpdate = MagicMock(return_value=False) manager.addAvailablePackageVersion("beep", Version("1.0.0")) assert manager.packagesWithUpdate == set()
class FirmwareUpdateCheckerJob(Job): STRING_ZERO_VERSION = "0.0.0" STRING_EPSILON_VERSION = "0.0.1" ZERO_VERSION = Version(STRING_ZERO_VERSION) EPSILON_VERSION = Version(STRING_EPSILON_VERSION) def __init__(self, silent, machine_name, metadata, callback) -> None: super().__init__() self.silent = silent self._callback = callback self._machine_name = machine_name self._metadata = metadata self._lookups = FirmwareUpdateCheckerLookup(self._machine_name, self._metadata) self._headers = {} # type:Dict[str, str] # Don't set headers yet. def getUrlResponse(self, url: str) -> str: result = self.STRING_ZERO_VERSION try: request = urllib.request.Request(url, headers=self._headers) response = urllib.request.urlopen(request) result = response.read().decode("utf-8") except URLError: Logger.log( "w", "Could not reach '{0}', if this URL is old, consider removal.". format(url)) return result def parseVersionResponse(self, response: str) -> Version: raw_str = response.split("\n", 1)[0].rstrip() return Version(raw_str) def getCurrentVersion(self) -> Version: max_version = self.ZERO_VERSION if self._lookups is None: return max_version machine_urls = self._lookups.getCheckUrls() if machine_urls is not None: for url in machine_urls: version = self.parseVersionResponse(self.getUrlResponse(url)) if version > max_version: max_version = version if max_version < self.EPSILON_VERSION: Logger.log( "w", "MachineID {0} not handled!".format( self._lookups.getMachineName())) return max_version def run(self): try: # Initialize a Preference that stores the last version checked for this printer. Application.getInstance().getPreferences().addPreference( getSettingsKeyForMachine(self._lookups.getMachineId()), "") # Get headers application_name = Application.getInstance().getApplicationName() application_version = Application.getInstance().getVersion() self._headers = { "User-Agent": "%s - %s" % (application_name, application_version) } # If it is not None, then we compare between the checked_version and the current_version machine_id = self._lookups.getMachineId() if machine_id is not None: Logger.log( "i", "You have a(n) {0} in the printer list. Do firmware-check." .format(self._machine_name)) current_version = self.getCurrentVersion() # This case indicates that was an error checking the version. # It happens for instance when not connected to internet. if current_version == self.ZERO_VERSION: return # If it is the first time the version is checked, the checked_version is "" setting_key_str = getSettingsKeyForMachine(machine_id) checked_version = Version( Application.getInstance().getPreferences().getValue( setting_key_str)) # If the checked_version is "", it's because is the first time we check firmware and in this case # we will not show the notification, but we will store it for the next time Application.getInstance().getPreferences().setValue( setting_key_str, current_version) Logger.log( "i", "Reading firmware version of %s: checked = %s - latest = %s", self._machine_name, checked_version, current_version) # The first time we want to store the current version, the notification will not be shown, # because the new version of Cura will be release before the firmware and we don't want to # notify the user when no new firmware version is available. if (checked_version != "") and (checked_version != current_version): Logger.log( "i", "Showing firmware update message for new version: {version}" .format(current_version)) message = FirmwareUpdateCheckerMessage( machine_id, self._machine_name, self._lookups.getRedirectUserUrl()) message.actionTriggered.connect(self._callback) message.show() else: Logger.log( "i", "No machine with name {0} in list of firmware to check.". format(self._machine_name)) except Exception as e: Logger.log("w", "Failed to check for new version: %s", e) if not self.silent: Message( i18n_catalog.i18nc( "@info", "Could not access update information.")).show() return
def requestWrite(self, nodes: List[SceneNode], file_name: Optional[str] = None, limit_mimetypes: bool = False, file_handler: Optional[FileHandler] = None, **kwargs: str) -> None: self.writeStarted.emit(self) #Formats supported by this application (file types that we can actually write). if file_handler: file_formats = file_handler.getSupportedFileTypesWrite() else: file_formats = CuraApplication.getInstance().getMeshFileHandler().getSupportedFileTypesWrite() #Create a list from the supported file formats string. machine_file_formats = CuraApplication.getInstance().getGlobalContainerStack().getMetaDataEntry("file_formats").split(";") machine_file_formats = [file_type.strip() for file_type in machine_file_formats] #Exception for UM3 firmware version >=4.4: UFP is now supported and should be the preferred file format. if "application/x-ufp" not in machine_file_formats and self.printerType == "ultimaker3" and Version(self.firmwareVersion) >= Version("4.4"): machine_file_formats = ["application/x-ufp"] + machine_file_formats # Take the intersection between file_formats and machine_file_formats. format_by_mimetype = {format["mime_type"]: format for format in file_formats} file_formats = [format_by_mimetype[mimetype] for mimetype in machine_file_formats] #Keep them ordered according to the preference in machine_file_formats. if len(file_formats) == 0: Logger.log("e", "There are no file formats available to write with!") raise OutputDeviceError.WriteRequestFailedError(i18n_catalog.i18nc("@info:status", "There are no file formats available to write with!")) preferred_format = file_formats[0] #Just take the first file format available. if file_handler is not None: writer = file_handler.getWriterByMimeType(cast(str, preferred_format["mime_type"])) else: writer = CuraApplication.getInstance().getMeshFileHandler().getWriterByMimeType(cast(str, preferred_format["mime_type"])) #This function pauses with the yield, waiting on instructions on which printer it needs to print with. self._sending_job = self._sendPrintJob(writer, preferred_format, nodes) self._sending_job.send(None) #Start the generator. if len(self._printers) > 1: #We need to ask the user. self._spawnPrinterSelectionDialog() is_job_sent = True else: #Just immediately continue. self._sending_job.send("") #No specifically selected printer. is_job_sent = self._sending_job.send(None)
def parseVersionResponse(self, response: str) -> Version: raw_str = response.split("\n", 1)[0].rstrip() return Version(raw_str)
def _notifyPackageManager(self): for package in self._server_response_data["packages"]: self._package_manager.addAvailablePackageVersion(package["package_id"], Version(package["package_version"]))
def run(self): try: # Initialize a Preference that stores the last version checked for this printer. Application.getInstance().getPreferences().addPreference( getSettingsKeyForMachine(self._lookups.getMachineId()), "") # Get headers application_name = Application.getInstance().getApplicationName() application_version = Application.getInstance().getVersion() self._headers = { "User-Agent": "%s - %s" % (application_name, application_version) } # If it is not None, then we compare between the checked_version and the current_version machine_id = self._lookups.getMachineId() if machine_id is not None: Logger.log( "i", "You have a(n) {0} in the printer list. Do firmware-check." .format(self._machine_name)) current_version = self.getCurrentVersion() # This case indicates that was an error checking the version. # It happens for instance when not connected to internet. if current_version == self.ZERO_VERSION: return # If it is the first time the version is checked, the checked_version is "" setting_key_str = getSettingsKeyForMachine(machine_id) checked_version = Version( Application.getInstance().getPreferences().getValue( setting_key_str)) # If the checked_version is "", it's because is the first time we check firmware and in this case # we will not show the notification, but we will store it for the next time Application.getInstance().getPreferences().setValue( setting_key_str, current_version) Logger.log( "i", "Reading firmware version of %s: checked = %s - latest = %s", self._machine_name, checked_version, current_version) # The first time we want to store the current version, the notification will not be shown, # because the new version of Cura will be release before the firmware and we don't want to # notify the user when no new firmware version is available. if (checked_version != "") and (checked_version != current_version): Logger.log( "i", "Showing firmware update message for new version: {version}" .format(current_version)) message = FirmwareUpdateCheckerMessage( machine_id, self._machine_name, self._lookups.getRedirectUserUrl()) message.actionTriggered.connect(self._callback) message.show() else: Logger.log( "i", "No machine with name {0} in list of firmware to check.". format(self._machine_name)) except Exception as e: Logger.log("w", "Failed to check for new version: %s", e) if not self.silent: Message( i18n_catalog.i18nc( "@info", "Could not access update information.")).show() return
def supportsPrintJobActions(self) -> bool: if not self._printers: return False version_number = self.printers[0].firmwareVersion.split(".") firmware_version = Version([version_number[0], version_number[1], version_number[2]]) return firmware_version >= self.PRINT_JOB_ACTIONS_MIN_VERSION
def check_version_equals(first_version: Version, second_version: Version): assert first_version == second_version assert first_version.getMajor() == second_version.getMajor() assert first_version.getMinor() == second_version.getMinor() assert first_version.getRevision() == second_version.getRevision()