def _findPlugin(self, plugin_id: str) -> types.ModuleType: location = None for folder in self._plugin_locations: location = self._locatePlugin(plugin_id, folder) if location: break if not location: return None try: file, path, desc = imp.find_module(plugin_id, [location]) except Exception: Logger.logException("e", "Import error when importing %s", plugin_id) return None try: module = imp.load_module(plugin_id, file, path, desc) except Exception: Logger.logException("e", "Import error loading module %s", plugin_id) return None finally: if file: os.close(file) return module
def loadJsonFromReply(reply): try: result = json.loads(bytes(reply.readAll()).decode("utf-8")) except json.decoder.JSONDecodeError: Logger.logException("w", "Unable to decode JSON from reply.") return return result
def execute(self, output_device) -> None: scene = Application.getInstance().getController().getScene() # If the scene does not have a gcode, do nothing if not hasattr(scene, "gcode_dict"): return gcode_dict = getattr(scene, "gcode_dict") if not gcode_dict: return # get gcode list for the active build plate active_build_plate_id = CuraApplication.getInstance().getMultiBuildPlateModel().activeBuildPlate gcode_list = gcode_dict[active_build_plate_id] if not gcode_list: return if ";POSTPROCESSED" not in gcode_list[0]: for script in self._script_list: try: gcode_list = script.execute(gcode_list) except Exception: Logger.logException("e", "Exception in post-processing script.") if len(self._script_list): # Add comment to g-code if any changes were made. gcode_list[0] += ";POSTPROCESSED\n" gcode_dict[active_build_plate_id] = gcode_list setattr(scene, "gcode_dict", gcode_dict) else: Logger.log("e", "Already post processed")
def _onFinished(self, job): if self._stream: # Explicitly closing the stream flushes the write-buffer try: self._stream.close() self._stream = None except: Logger.logException("w", "An execption occured while trying to write to removable drive.") message = Message(catalog.i18nc("@info:status", "Could not save to removable drive {0}: {1}").format(self.getName(),str(job.getError())), title = catalog.i18nc("@info:title", "Error")) message.show() self.writeError.emit(self) return self._writing = False self.writeFinished.emit(self) if job.getResult(): message = Message(catalog.i18nc("@info:status", "Saved to Removable Drive {0} as {1}").format(self.getName(), os.path.basename(job.getFileName())), title = catalog.i18nc("@info:title", "File Saved")) message.addAction("eject", catalog.i18nc("@action:button", "Eject"), "eject", catalog.i18nc("@action", "Eject removable device {0}").format(self.getName())) message.actionTriggered.connect(self._onActionTriggered) message.show() self.writeSuccess.emit(self) else: message = Message(catalog.i18nc("@info:status", "Could not save to removable drive {0}: {1}").format(self.getName(), str(job.getError())), title = catalog.i18nc("@info:title", "Warning")) message.show() self.writeError.emit(self) job.getStream().close()
def __call__(self, value_provider, *args, **kwargs): if not value_provider: return state = ValidatorState.Unknown try: minimum = value_provider.getProperty(self._key, "minimum_value") maximum = value_provider.getProperty(self._key, "maximum_value") minimum_warning = value_provider.getProperty(self._key, "minimum_value_warning") maximum_warning = value_provider.getProperty(self._key, "maximum_value_warning") if minimum is not None and maximum is not None and minimum > maximum: raise ValueError("Cannot validate a state of setting {0} with minimum > maximum".format(self._key)) value = value_provider.getProperty(self._key, "value") if value is None or value != value: raise ValueError("Cannot validate None, NaN or similar values in setting {0}, actual value: {1}".format(self._key, value)) if minimum is not None and value < minimum: state = ValidatorState.MinimumError elif maximum is not None and value > maximum: state = ValidatorState.MaximumError elif minimum_warning is not None and value < minimum_warning: state = ValidatorState.MinimumWarning elif maximum_warning is not None and value > maximum_warning: state = ValidatorState.MaximumWarning else: state = ValidatorState.Valid except: Logger.logException("w", "Could not validate setting %s, an exception was raised", self._key) state = ValidatorState.Exception return state
def _findPlugin(self, plugin_id: str) -> Optional[types.ModuleType]: location = None for folder in self._plugin_locations: location = self._locatePlugin(plugin_id, folder) if location: break if not location: return None try: file, path, desc = imp.find_module(plugin_id, [location]) except Exception: Logger.logException("e", "Import error when importing %s", plugin_id) return None try: module = imp.load_module(plugin_id, file, path, desc) #type: ignore #MyPy gets the wrong output type from imp.find_module for some reason. except Exception: Logger.logException("e", "Import error loading module %s", plugin_id) return None finally: if file: os.close(file) #type: ignore #MyPy gets the wrong output type from imp.find_module for some reason. return module
def _installPlugin(self, plugin_id: str, plugin_path: str) -> None: Logger.log("i", "Attempting to install a new plugin %s from file '%s'", plugin_id, plugin_path) local_plugin_path = os.path.join(Resources.getStoragePath(Resources.Resources), "plugins") if plugin_id in self._bundled_plugin_cache: del self._bundled_plugin_cache[plugin_id] try: with zipfile.ZipFile(plugin_path, "r") as zip_ref: plugin_folder = os.path.join(local_plugin_path, plugin_id) # Overwrite the existing plugin if already installed if os.path.isdir(plugin_folder): shutil.rmtree(plugin_folder, ignore_errors = True) os.makedirs(plugin_folder, exist_ok = True) # Extract all files for info in zip_ref.infolist(): extracted_path = zip_ref.extract(info.filename, path = plugin_folder) permissions = os.stat(extracted_path).st_mode os.chmod(extracted_path, permissions | stat.S_IEXEC) # Make these files executable. except: # Installing a new plugin should never crash the application. Logger.logException("e", "An exception occurred while installing plugin {path}".format(path = plugin_path)) if plugin_id in self._disabled_plugins: self._disabled_plugins.remove(plugin_id)
def _addPluginObject(self, plugin_object: PluginObject, plugin_id: str, plugin_type: str) -> None: plugin_object.setPluginId(plugin_id) self._plugin_objects[plugin_id] = plugin_object try: self._type_register_map[plugin_type](plugin_object) except Exception as e: Logger.logException("e", "Unable to add plugin %s", plugin_id)
def _parsePluginInfo(self, plugin_id, file_data, meta_data): try: meta_data["plugin"] = json.loads(file_data) except json.decoder.JSONDecodeError: Logger.logException("e", "Failed to parse plugin.json for plugin %s", plugin_id) raise InvalidMetaDataError(plugin_id) # Check if metadata is valid; if "version" not in meta_data["plugin"]: Logger.log("e", "Version must be set!") raise InvalidMetaDataError(plugin_id) # Check if the plugin states what API version it needs. if "api" not in meta_data["plugin"] and "supported_sdk_versions" not in meta_data["plugin"]: Logger.log("e", "The API or the supported_sdk_versions must be set!") raise InvalidMetaDataError(plugin_id) else: # Store the api_version as a Version object. all_supported_sdk_versions = [] # type: List[Version] if "supported_sdk_versions" in meta_data["plugin"]: all_supported_sdk_versions += [Version(supported_version) for supported_version in meta_data["plugin"]["supported_sdk_versions"]] if "api" in meta_data["plugin"]: all_supported_sdk_versions += [Version(meta_data["plugin"]["api"])] meta_data["plugin"]["supported_sdk_versions"] = all_supported_sdk_versions if "i18n-catalog" in meta_data["plugin"]: # A catalog was set, try to translate a few strings i18n_catalog = i18nCatalog(meta_data["plugin"]["i18n-catalog"]) if "name" in meta_data["plugin"]: meta_data["plugin"]["name"] = i18n_catalog.i18n(meta_data["plugin"]["name"]) if "description" in meta_data["plugin"]: meta_data["plugin"]["description"] = i18n_catalog.i18n(meta_data["plugin"]["description"])
def _sendCrashReport(self): # Before sending data, the user comments are stored self.data["user_info"] = self.user_description_text_area.toPlainText() # Convert data to bytes binary_data = json.dumps(self.data).encode("utf-8") # Submit data kwoptions = {"data": binary_data, "timeout": 5} if Platform.isOSX(): kwoptions["context"] = ssl._create_unverified_context() Logger.log("i", "Sending crash report info to [%s]...", self.crash_url) if not self.has_started: print("Sending crash report info to [%s]...\n" % self.crash_url) try: f = urllib.request.urlopen(self.crash_url, **kwoptions) Logger.log("i", "Sent crash report info.") if not self.has_started: print("Sent crash report info.\n") f.close() except urllib.error.HTTPError as e: Logger.logException("e", "An HTTP error occurred while trying to send crash report") if not self.has_started: print("An HTTP error occurred while trying to send crash report: %s" % e) except Exception as e: # We don't want any exception to cause problems Logger.logException("e", "An exception occurred while trying to send crash report") if not self.has_started: print("An exception occurred while trying to send crash report: %s" % e) os._exit(1)
def _loadCachedDefinition(self, definition_id: str, path: str) -> None: try: cache_path = Resources.getPath(Resources.Cache, "definitions", self._application.getVersion(), definition_id) cache_mtime = os.path.getmtime(cache_path) definition_mtime = os.path.getmtime(path) if definition_mtime > cache_mtime: # The definition is newer than the cached version, so ignore the cached version. Logger.log("d", "Definition file %s is newer than cache, ignoring cached version", path) return None definition = None with open(cache_path, "rb") as f: definition = pickle.load(f) for file_path in definition.getInheritedFiles(): if os.path.getmtime(file_path) > cache_mtime: return None return definition except FileNotFoundError: return None except Exception as e: # We could not load a cached version for some reason. Ignore it. Logger.logException("d", "Could not load cached definition for %s", path) return None
def loadJsonFromReply(reply: QNetworkReply) -> Optional[List[Dict[str, Any]]]: try: result = json.loads(bytes(reply.readAll()).decode("utf-8")) except json.decoder.JSONDecodeError: Logger.logException("w", "Unable to decode JSON from reply.") return None return result
def updateFilesData(self, configuration_type: str, version, files_data, file_names_without_extension) -> Optional[FilesDataUpdateResult]: old_configuration_type = configuration_type # Keep converting the file until it's at one of the current versions. while (configuration_type, version) not in self._current_versions: if (configuration_type, version) not in self._upgrade_routes: # No version upgrade plug-in claims to be able to upgrade this file. return None new_type, new_version, upgrade_step = self._upgrade_routes[(configuration_type, version)] new_file_names_without_extension = [] # type: List[str] new_files_data = [] # type: List[str] for file_idx, file_data in enumerate(files_data): try: upgrade_step_result = upgrade_step(file_data, file_names_without_extension[file_idx]) except Exception: # Upgrade failed due to a coding error in the plug-in. Logger.logException("w", "Exception in %s upgrade with %s: %s", old_configuration_type, upgrade_step.__module__, traceback.format_exc()) return None if upgrade_step_result: this_file_names_without_extension, this_files_data = upgrade_step_result else: # Upgrade failed. Logger.log("w", "Unable to upgrade the file %s with %s.%s. Skipping it.", file_names_without_extension[file_idx], upgrade_step.__module__, upgrade_step.__name__) return None new_file_names_without_extension += this_file_names_without_extension new_files_data += this_files_data file_names_without_extension = new_file_names_without_extension files_data = new_files_data version = new_version configuration_type = new_type return FilesDataUpdateResult(configuration_type=configuration_type, version=version, files_data=files_data, file_names_without_extension=file_names_without_extension)
def loadScripts(self, path: str) -> None: ## Load all scripts in the scripts folders scripts = pkgutil.iter_modules(path = [path]) for loader, script_name, ispkg in scripts: # Iterate over all scripts. if script_name not in sys.modules: try: spec = importlib.util.spec_from_file_location(__name__ + "." + script_name, os.path.join(path, script_name + ".py")) loaded_script = importlib.util.module_from_spec(spec) if spec.loader is None: continue spec.loader.exec_module(loaded_script) sys.modules[script_name] = loaded_script #TODO: This could be a security risk. Overwrite any module with a user-provided name? loaded_class = getattr(loaded_script, script_name) temp_object = loaded_class() Logger.log("d", "Begin loading of script: %s", script_name) try: setting_data = temp_object.getSettingData() if "name" in setting_data and "key" in setting_data: self._script_labels[setting_data["key"]] = setting_data["name"] self._loaded_scripts[setting_data["key"]] = loaded_class else: Logger.log("w", "Script %s.py has no name or key", script_name) self._script_labels[script_name] = script_name self._loaded_scripts[script_name] = loaded_class except AttributeError: Logger.log("e", "Script %s.py is not a recognised script type. Ensure it inherits Script", script_name) except NotImplementedError: Logger.log("e", "Script %s.py has no implemented settings", script_name) except Exception as e: Logger.logException("e", "Exception occurred while loading post processing plugin: {error_msg}".format(error_msg = str(e)))
def loadMetadata(self, container_id: str) -> Dict[str, Any]: registry = ContainerRegistry.getInstance() if container_id in registry.metadata: return registry.metadata[container_id] filename = self._id_to_path[container_id] #Raises KeyError if container ID does not exist in the (cache of the) files! clazz = ContainerRegistry.mime_type_map[self._id_to_mime[container_id].name] requested_metadata = {} # type: Dict[str, Any] try: with open(filename, "r", encoding = "utf-8") as f: result_metadatas = clazz.deserializeMetadata(f.read(), container_id) #pylint: disable=no-member except IOError as e: Logger.log("e", "Unable to load metadata from file {filename}: {error_msg}".format(filename = filename, error_msg = str(e))) ConfigurationErrorMessage.getInstance().addFaultyContainers(container_id) return {} except Exception as e: Logger.logException("e", "Unable to deserialize metadata for container {filename}: {container_id}: {error_msg}".format(filename = filename, container_id = container_id, error_msg = str(e))) ConfigurationErrorMessage.getInstance().addFaultyContainers(container_id) return {} for metadata in result_metadatas: if "id" not in metadata: Logger.log("w", "Metadata obtained from deserializeMetadata of {class_name} didn't contain an ID.".format(class_name = clazz.__name__)) continue if metadata["id"] == container_id: requested_metadata = metadata #Side-load the metadata into the registry if we get multiple containers. if metadata["id"] not in registry.metadata: #This wouldn't get loaded normally. self._id_to_path[metadata["id"]] = filename self._id_to_mime[metadata["id"]] = self._id_to_mime[container_id] #Assume that they only return one MIME type. registry.metadata[metadata["id"]] = metadata registry.source_provider[metadata["id"]] = self return requested_metadata
def _handleOnServiceChangedRequests(self): while True: # wait for the event to be set self._service_changed_request_event.wait(timeout = 5.0) # stop if the application is shutting down if Application.getInstance().isShuttingDown(): return self._service_changed_request_event.clear() # handle all pending requests reschedule_requests = [] # a list of requests that have failed so later they will get re-scheduled while not self._service_changed_request_queue.empty(): request = self._service_changed_request_queue.get() zeroconf, service_type, name, state_change = request try: result = self._onServiceChanged(zeroconf, service_type, name, state_change) if not result: reschedule_requests.append(request) except Exception: Logger.logException("e", "Failed to get service info for [%s] [%s], the request will be rescheduled", service_type, name) reschedule_requests.append(request) # re-schedule the failed requests if any if reschedule_requests: for request in reschedule_requests: self._service_changed_request_queue.put(request)
def __call__(self, value_provider: ContainerInterface, context: Optional[PropertyEvaluationContext] = None) -> Any: if not value_provider: return None if not self._valid: return None locals = {} # type: Dict[str, Any] # if there is a context, evaluate the values from the perspective of the original caller if context is not None: value_provider = context.rootStack() for name in self._used_values: value = value_provider.getProperty(name, "value", context) if value is None: continue locals[name] = value g = {} # type: Dict[str, Any] g.update(globals()) g.update(self.__operators) # override operators if there is any in the context if context is not None: g.update(context.context.get("override_operators", {})) try: if self._compiled: return eval(self._compiled, g, locals) Logger.log("e", "An error ocurred evaluating the function {0}.".format(self)) return 0 except Exception as e: Logger.logException("d", "An exception occurred in inherit function {0}: {1}".format(self, str(e))) return 0 # Settings may be used in calculations and they need a value
def load_file(self, file_name, mesh_builder, _use_numpystl = False): file_read = False if _use_numpystl: Logger.log("i", "Using NumPy-STL to load STL data.") try: self._loadWithNumpySTL(file_name, mesh_builder) file_read = True except: Logger.logException("e", "Reading file failed with Numpy-STL!") if not file_read: Logger.log("i", "Using legacy code to load STL data.") f = open(file_name, "rb") if not self._loadBinary(mesh_builder, f): f.close() f = open(file_name, "rt", encoding = "utf-8") try: self._loadAscii(mesh_builder, f) except UnicodeDecodeError: return None f.close() Job.yieldThread() # Yield somewhat to ensure the GUI has time to update a bit. mesh_builder.calculateNormals(fast = True) mesh_builder.setFileName(file_name)
def load(self): files = [] for resource_type in self._resource_types: resources = Resources.getAllResourcesOfType(resource_type) try: resource_storage_path = Resources.getStoragePathForType(resource_type) except UnsupportedStorageTypeError: resource_storage_path = "" # Pre-process the list of files to insert relevant data # Most importantly, we need to ensure the loading order is DefinitionContainer, InstanceContainer, ContainerStack for path in resources: mime = MimeTypeDatabase.getMimeTypeForFile(path) container_type = self.__mime_type_map.get(mime.name) if not container_type: Logger.log("w", "Could not determine container type for file %s, ignoring", path) continue type_priority = 2 if issubclass(container_type, DefinitionContainer.DefinitionContainer): type_priority = 0 if issubclass(container_type, InstanceContainer.InstanceContainer): type_priority = 1 # Since we have the mime type and resource type here, process these two properties so we do not # need to look up mime types etc. again. container_id = urllib.parse.unquote_plus(mime.stripExtension(os.path.basename(path))) read_only = os.path.dirname(path) != resource_storage_path files.append((type_priority, container_id, path, read_only, container_type)) # Sort the list of files by type_priority so we can ensure correct loading order. files = sorted(files, key = lambda i: i[0]) for _, container_id, file_path, read_only, container_type in files: if container_id in self._id_container_cache: continue try: if issubclass(container_type, DefinitionContainer.DefinitionContainer): definition = self._loadCachedDefinition(container_id, file_path) if definition: self.addContainer(definition) continue new_container = container_type(container_id) with open(file_path, encoding = "utf-8") as f: new_container.deserialize(f.read()) new_container.setReadOnly(read_only) if issubclass(container_type, DefinitionContainer.DefinitionContainer): self._saveCachedDefinition(new_container) self.addContainer(new_container) except Exception as e: Logger.logException("e", "Could not deserialize container %s", container_id)
def loadPlugin(self, plugin_id: str) -> None: # If plugin has already been loaded, do not load it again: if plugin_id in self._plugins: Logger.log("w", "Plugin %s was already loaded", plugin_id) return # Find the actual plugin on drive: plugin = self._findPlugin(plugin_id) # If not found, raise error: if not plugin: raise PluginNotFoundError(plugin_id) # If found, but isn't in the metadata dictionary, add it: if plugin_id not in self._metadata: try: self._populateMetaData(plugin_id) except InvalidMetaDataError: return # Do not load plugin that has been disabled if plugin_id in self._disabled_plugins: Logger.log("i", "Plugin [%s] has been disabled. Skip loading it.", plugin_id) return # If API version is incompatible, don't load it. supported_sdk_versions = self._metadata[plugin_id].get("plugin", {}).get("supported_sdk_versions", [Version("0")]) is_plugin_supported = False for supported_sdk_version in supported_sdk_versions: is_plugin_supported |= self.isPluginApiVersionCompatible(supported_sdk_version) if is_plugin_supported: break if not is_plugin_supported: Logger.log("w", "Plugin [%s] with supported sdk versions [%s] is incompatible with the current sdk version [%s].", plugin_id, [str(version) for version in supported_sdk_versions], self._api_version) self._outdated_plugins.append(plugin_id) return try: to_register = plugin.register(self._application) # type: ignore # We catch AttributeError on this in case register() doesn't exist. if not to_register: Logger.log("e", "Plugin %s did not return any objects to register", plugin_id) return for plugin_type, plugin_object in to_register.items(): if type(plugin_object) == list: for nested_plugin_object in plugin_object: nested_plugin_object.setVersion(self._metadata[plugin_id].get("plugin", {}).get("version")) self._addPluginObject(nested_plugin_object, plugin_id, plugin_type) else: plugin_object.setVersion(self._metadata[plugin_id].get("plugin", {}).get("version")) self._addPluginObject(plugin_object, plugin_id, plugin_type) self._plugins[plugin_id] = plugin self.enablePlugin(plugin_id) Logger.log("i", "Loaded plugin %s", plugin_id) except Exception as ex: Logger.logException("e", "Error loading plugin %s:", plugin_id)
def read(self, file_name): result = [] # The base object of 3mf is a zipped archive. try: archive = zipfile.ZipFile(file_name, "r") self._base_name = os.path.basename(file_name) parser = Savitar.ThreeMFParser() scene_3mf = parser.parse(archive.open("3D/3dmodel.model").read()) self._unit = scene_3mf.getUnit() for node in scene_3mf.getSceneNodes(): um_node = self._convertSavitarNodeToUMNode(node) if um_node is None: continue # compensate for original center position, if object(s) is/are not around its zero position transform_matrix = Matrix() mesh_data = um_node.getMeshData() if mesh_data is not None: extents = mesh_data.getExtents() center_vector = Vector(extents.center.x, extents.center.y, extents.center.z) transform_matrix.setByTranslation(center_vector) transform_matrix.multiply(um_node.getLocalTransformation()) um_node.setTransformation(transform_matrix) global_container_stack = Application.getInstance().getGlobalContainerStack() # Create a transformation Matrix to convert from 3mf worldspace into ours. # First step: flip the y and z axis. transformation_matrix = Matrix() transformation_matrix._data[1, 1] = 0 transformation_matrix._data[1, 2] = 1 transformation_matrix._data[2, 1] = -1 transformation_matrix._data[2, 2] = 0 # Second step: 3MF defines the left corner of the machine as center, whereas cura uses the center of the # build volume. if global_container_stack: translation_vector = Vector(x=-global_container_stack.getProperty("machine_width", "value") / 2, y=-global_container_stack.getProperty("machine_depth", "value") / 2, z=0) translation_matrix = Matrix() translation_matrix.setByTranslation(translation_vector) transformation_matrix.multiply(translation_matrix) # Third step: 3MF also defines a unit, wheras Cura always assumes mm. scale_matrix = Matrix() scale_matrix.setByScaleVector(self._getScaleFromUnit(self._unit)) transformation_matrix.multiply(scale_matrix) # Pre multiply the transformation with the loaded transformation, so the data is handled correctly. um_node.setTransformation(um_node.getLocalTransformation().preMultiply(transformation_matrix)) result.append(um_node) except Exception: Logger.logException("e", "An exception occurred in 3mf reader.") return [] return result
def __getitem__(self, container_id: str) -> ContainerInterface: if container_id not in self._containers: try: self._containers[container_id] = self.loadContainer(container_id) except: Logger.logException("e", "Failed to load container %s", container_id) raise return self._containers[container_id]
def _expandGcodeTokens(self, key, value, settings): try: # any setting can be used as a token fmt = GcodeStartEndFormatter() return str(fmt.format(value, **settings)).encode("utf-8") except: Logger.logException("w", "Unable to do token replacement on start/end gcode") return str(value).encode("utf-8")
def getFileVersion(self, configuration_type: str, file_data: str) -> Optional[int]: if configuration_type not in self._get_version_functions: return None try: return self._get_version_functions[configuration_type](file_data) except: Logger.logException("w", "Unable to get version from file.") return None
def callDecoration(self, function: str, *args, **kwargs) -> Any: for decorator in self._decorators: if hasattr(decorator, function): try: return getattr(decorator, function)(*args, **kwargs) except Exception as e: Logger.logException("e", "Exception calling decoration %s: %s", str(function), str(e)) return None
def saveSettings(self): if not self._started: # Do not do saving during application start return for instance in ContainerRegistry.getInstance().findInstanceContainers(): if not instance.isDirty(): continue try: data = instance.serialize() except NotImplementedError: continue except Exception: Logger.logException("e", "An exception occurred when serializing container %s", instance.getId()) continue mime_type = ContainerRegistry.getMimeTypeForContainer(type(instance)) file_name = urllib.parse.quote_plus(instance.getId()) + "." + mime_type.preferredSuffix instance_type = instance.getMetaDataEntry("type") path = None if instance_type == "material": path = Resources.getStoragePath(self.ResourceTypes.MaterialInstanceContainer, file_name) elif instance_type == "quality" or instance_type == "quality_changes": path = Resources.getStoragePath(self.ResourceTypes.QualityInstanceContainer, file_name) elif instance_type == "user": path = Resources.getStoragePath(self.ResourceTypes.UserInstanceContainer, file_name) elif instance_type == "variant": path = Resources.getStoragePath(self.ResourceTypes.VariantInstanceContainer, file_name) if path: instance.setPath(path) with SaveFile(path, "wt", -1, "utf-8") as f: f.write(data) for stack in ContainerRegistry.getInstance().findContainerStacks(): if not stack.isDirty(): continue try: data = stack.serialize() except NotImplementedError: continue except Exception: Logger.logException("e", "An exception occurred when serializing container %s", instance.getId()) continue mime_type = ContainerRegistry.getMimeTypeForContainer(type(stack)) file_name = urllib.parse.quote_plus(stack.getId()) + "." + mime_type.preferredSuffix stack_type = stack.getMetaDataEntry("type", None) path = None if not stack_type or stack_type == "machine": path = Resources.getStoragePath(self.ResourceTypes.MachineStack, file_name) elif stack_type == "extruder_train": path = Resources.getStoragePath(self.ResourceTypes.ExtruderStack, file_name) if path: stack.setPath(path) with SaveFile(path, "wt", -1, "utf-8") as f: f.write(data)
def readerRead(self, reader, file_name, **kwargs): try: results = reader.read(file_name) return results except: Logger.logException("e", "An exception occured while loading workspace.") Logger.log("w", "Unable to load workspace %s", file_name) return None
def run(self): loading_message = Message(i18n_catalog.i18nc("@info:status", "Loading <filename>{0}</filename>", self._filename), lifetime = 0, dismissable = False) loading_message.setProgress(-1) loading_message.show() Job.yieldThread() # Yield to any other thread that might want to do something else. node = None try: begin_time = time.time() node = self._handler.read(self._filename) end_time = time.time() Logger.log("d", "Loading mesh took %s seconds", end_time - begin_time) except: Logger.logException("e", "Exception in mesh loader") if not node: loading_message.hide() result_message = Message(i18n_catalog.i18nc("@info:status", "Failed to load <filename>{0}</filename>", self._filename)) result_message.show() return if node.getMeshData(): node.getMeshData().setFileName(self._filename) # Scale down to maximum bounds size if that is available if hasattr(Application.getInstance().getController().getScene(), "_maximum_bounds"): max_bounds = Application.getInstance().getController().getScene()._maximum_bounds node._resetAABB() bounding_box = node.getBoundingBox() timeout_counter = 0 #As the calculation of the bounding box is in a seperate thread it might be that it's not done yet. while bounding_box.width == 0 or bounding_box.height == 0 or bounding_box.depth == 0: bounding_box = node.getBoundingBox() time.sleep(0.1) timeout_counter += 1 if timeout_counter > 10: break if max_bounds.width < bounding_box.width or max_bounds.height < bounding_box.height or max_bounds.depth < bounding_box.depth: scale_factor_width = max_bounds.width / bounding_box.width scale_factor_height = max_bounds.height / bounding_box.height scale_factor_depth = max_bounds.depth / bounding_box.depth scale_factor = min(scale_factor_width,scale_factor_height,scale_factor_depth) scale_vector = Vector(scale_factor, scale_factor, scale_factor) display_scale_factor = scale_factor * 100 if Preferences.getInstance().getValue("mesh/scale_to_fit") == True: scale_message = Message(i18n_catalog.i18nc("@info:status", "Auto scaled object to {0}% of original size", ("%i" % display_scale_factor))) try: node.scale(scale_vector) scale_message.show() except Exception as e: print(e) self.setResult(node) loading_message.hide()
def installPlugin(self, plugin_path: str): plugin_path = QUrl(plugin_path).toLocalFile() Logger.log("d", "Attempting to install a new plugin %s", plugin_path) local_plugin_path = os.path.join(Resources.getStoragePath(Resources.Resources), "plugins") plugin_folder = "" result = {"status": "error", "message": "", "id": ""} success_message = i18n_catalog.i18nc("@info:status", "The plugin has been installed.\n Please re-start the application to activate the plugin.") try: with zipfile.ZipFile(plugin_path, "r") as zip_ref: plugin_id = None for file in zip_ref.infolist(): if file.filename.endswith("/"): plugin_id = file.filename.strip("/") break if plugin_id is None: result["message"] = i18n_catalog.i18nc("@info:status", "Failed to install plugin from <filename>{0}</filename>:\n<message>{1}</message>", plugin_path, "Invalid plugin file") return result result["id"] = plugin_id plugin_folder = os.path.join(local_plugin_path, plugin_id) if os.path.isdir(plugin_folder): # Plugin is already installed by user (so not a bundled plugin) metadata = {} with zip_ref.open(plugin_id + "/plugin.json") as metadata_file: metadata = json.loads(metadata_file.read().decode("utf-8")) if "version" in metadata: new_version = Version(metadata["version"]) old_version = Version(self.getMetaData(plugin_id)["plugin"]["version"]) if new_version > old_version: zip_ref.extractall(plugin_folder) result["status"] = "ok" result["message"] = success_message return result Logger.log("w", "The plugin was already installed. Unable to install it again!") result["status"] = "duplicate" result["message"] = i18n_catalog.i18nc("@info:status", "Failed to install the plugin; \n<message>{0}</message>", "Plugin was already installed") return result elif plugin_id in self._plugins: # Plugin is already installed, but not by the user (eg; this is a bundled plugin) # TODO: Right now we don't support upgrading bundled plugins at all, but we might do so in the future. result["message"] = i18n_catalog.i18nc("@info:status", "Failed to install the plugin; \n<message>{0}</message>", "Unable to upgrade or instal bundled plugins.") return result zip_ref.extractall(plugin_folder) except: # Installing a new plugin should never crash the application. Logger.logException("d", "An exception occurred while installing plugin ") result["message"] = i18n_catalog.i18nc("@info:status", "Failed to install plugin from <filename>{0}</filename>:\n<message>{1}</message>", plugin_folder, "Invalid plugin file") return result result["status"] = "ok" result["message"] = success_message return result
def read(self, file_name): try: self.defs = {} self.shapes = [] tree = ET.parse(file_name) xml_root = tree.getroot() if xml_root.tag != "X3D": return None scale = 1000 # Default X3D unit it one meter, while Cura's is one millimeters if xml_root[0].tag == "head": for head_node in xml_root[0]: if head_node.tag == "unit" and head_node.attrib.get("category") == "length": scale *= float(head_node.attrib["conversionFactor"]) break xml_scene = xml_root[1] else: xml_scene = xml_root[0] if xml_scene.tag != "Scene": return None self.transform = Matrix() self.transform.setByScaleFactor(scale) self.index_base = 0 # Traverse the scene tree, populate the shapes list self.processChildNodes(xml_scene) if self.shapes: builder = MeshBuilder() builder.setVertices(numpy.concatenate([shape.verts for shape in self.shapes])) builder.setIndices(numpy.concatenate([shape.faces for shape in self.shapes])) builder.calculateNormals() builder.setFileName(file_name) mesh_data = builder.build() # Manually try and get the extents of the mesh_data. This should prevent nasty NaN issues from # leaving the reader. mesh_data.getExtents() node = SceneNode() node.setMeshData(mesh_data) node.setSelectable(True) node.setName(file_name) else: return None except Exception: Logger.logException("e", "Exception in X3D reader") return None return node
def _exceptionInfoWidget(self): group = QGroupBox() group.setTitle(catalog.i18nc("@title:groupbox", "Error traceback")) layout = QVBoxLayout() text_area = QTextEdit() trace_list = traceback.format_exception(self.exception_type, self.value, self.traceback) trace = "".join(trace_list) text_area.setText(trace) text_area.setReadOnly(True) layout.addWidget(text_area) group.setLayout(layout) # Parsing all the information to fill the dictionary summary = "" if len(trace_list) >= 1: summary = trace_list[len(trace_list)-1].rstrip("\n") module = [""] if len(trace_list) >= 2: module = trace_list[len(trace_list)-2].rstrip("\n").split("\n") module_split = module[0].split(", ") filepath_directory_split = module_split[0].split("\"") filepath = "" if len(filepath_directory_split) > 1: filepath = filepath_directory_split[1] directory, filename = os.path.split(filepath) line = "" if len(module_split) > 1: line = int(module_split[1].lstrip("line ")) function = "" if len(module_split) > 2: function = module_split[2].lstrip("in ") code = "" if len(module) > 1: code = module[1].lstrip(" ") # Using this workaround for a cross-platform path splitting split_path = [] folder_name = "" # Split until reach folder "cura" while folder_name != "cura": directory, folder_name = os.path.split(directory) if not folder_name: break split_path.append(folder_name) # Look for plugins. If it's not a plugin, the current cura version is set isPlugin = False module_version = self.cura_version module_name = "Cura" if split_path.__contains__("plugins"): isPlugin = True # Look backwards until plugin.json is found directory, name = os.path.split(filepath) while not os.listdir(directory).__contains__("plugin.json"): directory, name = os.path.split(directory) json_metadata_file = os.path.join(directory, "plugin.json") try: with open(json_metadata_file, "r", encoding = "utf-8") as f: try: metadata = json.loads(f.read()) module_version = metadata["version"] module_name = metadata["name"] except json.decoder.JSONDecodeError: # Not throw new exceptions Logger.logException("e", "Failed to parse plugin.json for plugin %s", name) except: # Not throw new exceptions pass exception_dict = dict() exception_dict["traceback"] = {"summary": summary, "full_trace": trace} exception_dict["location"] = {"path": filepath, "file": filename, "function": function, "code": code, "line": line, "module_name": module_name, "version": module_version, "is_plugin": isPlugin} self.data["exception"] = exception_dict if with_sentry_sdk: with configure_scope() as scope: scope.set_tag("is_plugin", isPlugin) scope.set_tag("module", module_name) return group
def read(self, file_name): result = [] self._object_count = 0 # Used to name objects as there is no node name yet. # The base object of 3mf is a zipped archive. try: archive = zipfile.ZipFile(file_name, "r") self._base_name = os.path.basename(file_name) parser = Savitar.ThreeMFParser() scene_3mf = parser.parse(archive.open("3D/3dmodel.model").read()) self._unit = scene_3mf.getUnit() for node in scene_3mf.getSceneNodes(): um_node = self._convertSavitarNodeToUMNode(node) if um_node is None: continue # compensate for original center position, if object(s) is/are not around its zero position transform_matrix = Matrix() mesh_data = um_node.getMeshData() if mesh_data is not None: extents = mesh_data.getExtents() center_vector = Vector(extents.center.x, extents.center.y, extents.center.z) transform_matrix.setByTranslation(center_vector) transform_matrix.multiply(um_node.getLocalTransformation()) um_node.setTransformation(transform_matrix) global_container_stack = Application.getInstance().getGlobalContainerStack() # Create a transformation Matrix to convert from 3mf worldspace into ours. # First step: flip the y and z axis. transformation_matrix = Matrix() transformation_matrix._data[1, 1] = 0 transformation_matrix._data[1, 2] = 1 transformation_matrix._data[2, 1] = -1 transformation_matrix._data[2, 2] = 0 # Second step: 3MF defines the left corner of the machine as center, whereas cura uses the center of the # build volume. if global_container_stack: translation_vector = Vector(x=-global_container_stack.getProperty("machine_width", "value") / 2, y=-global_container_stack.getProperty("machine_depth", "value") / 2, z=0) translation_matrix = Matrix() translation_matrix.setByTranslation(translation_vector) transformation_matrix.multiply(translation_matrix) # Third step: 3MF also defines a unit, whereas Cura always assumes mm. scale_matrix = Matrix() scale_matrix.setByScaleVector(self._getScaleFromUnit(self._unit)) transformation_matrix.multiply(scale_matrix) # Pre multiply the transformation with the loaded transformation, so the data is handled correctly. um_node.setTransformation(um_node.getLocalTransformation().preMultiply(transformation_matrix)) # Check if the model is positioned below the build plate and honor that when loading project files. if um_node.getMeshData() is not None: minimum_z_value = um_node.getMeshData().getExtents(um_node.getWorldTransformation()).minimum.y # y is z in transformation coordinates if minimum_z_value < 0: um_node.addDecorator(ZOffsetDecorator()) um_node.callDecoration("setZOffset", minimum_z_value) result.append(um_node) except Exception: Logger.logException("e", "An exception occurred in 3mf reader.") return [] return result
def _onWriteStarted(self, output_device): try: if not Preferences.getInstance().getValue("info/send_slice_info"): Logger.log("d", "'info/send_slice_info' is turned off.") return # Do nothing, user does not want to send data global_container_stack = Application.getInstance( ).getGlobalContainerStack() print_information = Application.getInstance().getPrintInformation() data = dict() # The data that we're going to submit. data["time_stamp"] = time.time() data["schema_version"] = 0 data["cura_version"] = Application.getInstance().getVersion() active_mode = Preferences.getInstance().getValue( "cura/active_mode") if active_mode == 0: data["active_mode"] = "recommended" else: data["active_mode"] = "custom" definition_changes = global_container_stack.definitionChanges machine_settings_changed_by_user = False if definition_changes.getId() != "empty": # Now a definition_changes container will always be created for a stack, # so we also need to check if there is any instance in the definition_changes container if definition_changes.getAllKeys(): machine_settings_changed_by_user = True data[ "machine_settings_changed_by_user"] = machine_settings_changed_by_user data["language"] = Preferences.getInstance().getValue( "general/language") data["os"] = { "type": platform.system(), "version": platform.version() } data["active_machine"] = { "definition_id": global_container_stack.definition.getId(), "manufacturer": global_container_stack.definition.getMetaData().get( "manufacturer", "") } # add extruder specific data to slice info data["extruders"] = [] extruders = list(ExtruderManager.getInstance().getMachineExtruders( global_container_stack.getId())) extruders = sorted( extruders, key=lambda extruder: extruder.getMetaDataEntry("position")) for extruder in extruders: extruder_dict = dict() extruder_dict["active"] = ExtruderManager.getInstance( ).getActiveExtruderStack() == extruder extruder_dict["material"] = { "GUID": extruder.material.getMetaData().get("GUID", ""), "type": extruder.material.getMetaData().get("material", ""), "brand": extruder.material.getMetaData().get("brand", "") } extruder_position = int( extruder.getMetaDataEntry("position", "0")) if len(print_information.materialLengths) > extruder_position: extruder_dict[ "material_used"] = print_information.materialLengths[ extruder_position] extruder_dict["variant"] = extruder.variant.getName() extruder_dict["nozzle_size"] = extruder.getProperty( "machine_nozzle_size", "value") extruder_settings = dict() extruder_settings["wall_line_count"] = extruder.getProperty( "wall_line_count", "value") extruder_settings["retraction_enable"] = extruder.getProperty( "retraction_enable", "value") extruder_settings[ "infill_sparse_density"] = extruder.getProperty( "infill_sparse_density", "value") extruder_settings["infill_pattern"] = extruder.getProperty( "infill_pattern", "value") extruder_settings[ "gradual_infill_steps"] = extruder.getProperty( "gradual_infill_steps", "value") extruder_settings[ "default_material_print_temperature"] = extruder.getProperty( "default_material_print_temperature", "value") extruder_settings[ "material_print_temperature"] = extruder.getProperty( "material_print_temperature", "value") extruder_dict["extruder_settings"] = extruder_settings data["extruders"].append(extruder_dict) data[ "quality_profile"] = global_container_stack.quality.getMetaData( ).get("quality_type") data["models"] = [] # Listing all files placed on the build plate for node in DepthFirstIterator(CuraApplication.getInstance( ).getController().getScene().getRoot()): if node.callDecoration("isSliceable"): model = dict() model["hash"] = node.getMeshData().getHash() bounding_box = node.getBoundingBox() model["bounding_box"] = { "minimum": { "x": bounding_box.minimum.x, "y": bounding_box.minimum.y, "z": bounding_box.minimum.z }, "maximum": { "x": bounding_box.maximum.x, "y": bounding_box.maximum.y, "z": bounding_box.maximum.z } } model["transformation"] = { "data": str(node.getWorldTransformation().getData()).replace( "\n", "") } extruder_position = node.callDecoration( "getActiveExtruderPosition") model[ "extruder"] = 0 if extruder_position is None else int( extruder_position) model_settings = dict() model_stack = node.callDecoration("getStack") if model_stack: model_settings[ "support_enabled"] = model_stack.getProperty( "support_enable", "value") model_settings["support_extruder_nr"] = int( model_stack.getProperty("support_extruder_nr", "value")) # Mesh modifiers; model_settings[ "infill_mesh"] = model_stack.getProperty( "infill_mesh", "value") model_settings[ "cutting_mesh"] = model_stack.getProperty( "cutting_mesh", "value") model_settings[ "support_mesh"] = model_stack.getProperty( "support_mesh", "value") model_settings[ "anti_overhang_mesh"] = model_stack.getProperty( "anti_overhang_mesh", "value") model_settings[ "wall_line_count"] = model_stack.getProperty( "wall_line_count", "value") model_settings[ "retraction_enable"] = model_stack.getProperty( "retraction_enable", "value") # Infill settings model_settings[ "infill_sparse_density"] = model_stack.getProperty( "infill_sparse_density", "value") model_settings[ "infill_pattern"] = model_stack.getProperty( "infill_pattern", "value") model_settings[ "gradual_infill_steps"] = model_stack.getProperty( "gradual_infill_steps", "value") model["model_settings"] = model_settings data["models"].append(model) print_times = print_information.printTimes() data["print_times"] = { "travel": int(print_times["travel"].getDisplayString( DurationFormat.Format.Seconds)), "support": int(print_times["support"].getDisplayString( DurationFormat.Format.Seconds)), "infill": int(print_times["infill"].getDisplayString( DurationFormat.Format.Seconds)), "total": int( print_information.currentPrintTime.getDisplayString( DurationFormat.Format.Seconds)) } print_settings = dict() print_settings[ "layer_height"] = global_container_stack.getProperty( "layer_height", "value") # Support settings print_settings[ "support_enabled"] = global_container_stack.getProperty( "support_enable", "value") print_settings["support_extruder_nr"] = int( global_container_stack.getProperty("support_extruder_nr", "value")) # Platform adhesion settings print_settings[ "adhesion_type"] = global_container_stack.getProperty( "adhesion_type", "value") # Shell settings print_settings[ "wall_line_count"] = global_container_stack.getProperty( "wall_line_count", "value") print_settings[ "retraction_enable"] = global_container_stack.getProperty( "retraction_enable", "value") # Prime tower settings print_settings[ "prime_tower_enable"] = global_container_stack.getProperty( "prime_tower_enable", "value") # Infill settings print_settings[ "infill_sparse_density"] = global_container_stack.getProperty( "infill_sparse_density", "value") print_settings[ "infill_pattern"] = global_container_stack.getProperty( "infill_pattern", "value") print_settings[ "gradual_infill_steps"] = global_container_stack.getProperty( "gradual_infill_steps", "value") print_settings[ "print_sequence"] = global_container_stack.getProperty( "print_sequence", "value") data["print_settings"] = print_settings # Send the name of the output device type that is used. data["output_to"] = type(output_device).__name__ # Convert data to bytes binary_data = json.dumps(data).encode("utf-8") # Sending slice info non-blocking reportJob = SliceInfoJob(self.info_url, binary_data) reportJob.start() except Exception: # We really can't afford to have a mistake here, as this would break the sending of g-code to a device # (Either saving or directly to a printer). The functionality of the slice data is not *that* important. Logger.logException( "e", "Exception raised while sending slice info." ) # But we should be notified about these problems of course.
def deserialize(self, serialized, file_name = None): containers_to_add = [] # update the serialized data first from UM.Settings.Interfaces import ContainerInterface serialized = ContainerInterface.deserialize(self, serialized, file_name) try: data = ET.fromstring(serialized) except: Logger.logException("e", "An exception occurred while parsing the material profile") return # Reset previous metadata old_id = self.getId() self.clearData() # Ensure any previous data is gone. meta_data = {} meta_data["type"] = "material" meta_data["base_file"] = self.getId() meta_data["status"] = "unknown" # TODO: Add material verification meta_data["id"] = old_id meta_data["container_type"] = XmlMaterialProfile common_setting_values = {} inherits = data.find("./um:inherits", self.__namespaces) if inherits is not None: inherited = self._resolveInheritance(inherits.text) data = self._mergeXML(inherited, data) # set setting_version in metadata if "version" in data.attrib: meta_data["setting_version"] = self.xmlVersionToSettingVersion(data.attrib["version"]) else: meta_data["setting_version"] = self.xmlVersionToSettingVersion("1.2") #1.2 and lower didn't have that version number there yet. meta_data["name"] = "Unknown Material" #In case the name tag is missing. for entry in data.iterfind("./um:metadata/*", self.__namespaces): tag_name = _tag_without_namespace(entry) if tag_name == "name": brand = entry.find("./um:brand", self.__namespaces) material = entry.find("./um:material", self.__namespaces) color = entry.find("./um:color", self.__namespaces) label = entry.find("./um:label", self.__namespaces) if label is not None: meta_data["name"] = label.text else: meta_data["name"] = self._profile_name(material.text, color.text) meta_data["brand"] = brand.text meta_data["material"] = material.text meta_data["color_name"] = color.text continue # setting_version is derived from the "version" tag in the schema earlier, so don't set it here if tag_name == "setting_version": continue meta_data[tag_name] = entry.text if tag_name in self.__material_metadata_setting_map: common_setting_values[self.__material_metadata_setting_map[tag_name]] = entry.text if "description" not in meta_data: meta_data["description"] = "" if "adhesion_info" not in meta_data: meta_data["adhesion_info"] = "" validation_message = XmlMaterialValidator.validateMaterialMetaData(meta_data) if validation_message is not None: raise Exception("Not valid material profile: %s" % (validation_message)) property_values = {} properties = data.iterfind("./um:properties/*", self.__namespaces) for entry in properties: tag_name = _tag_without_namespace(entry) property_values[tag_name] = entry.text if tag_name in self.__material_properties_setting_map: common_setting_values[self.__material_properties_setting_map[tag_name]] = entry.text meta_data["approximate_diameter"] = str(round(float(property_values.get("diameter", 2.85)))) # In mm meta_data["properties"] = property_values meta_data["definition"] = "fdmprinter" common_compatibility = True settings = data.iterfind("./um:settings/um:setting", self.__namespaces) for entry in settings: key = entry.get("key") if key in self.__material_settings_setting_map: common_setting_values[self.__material_settings_setting_map[key]] = entry.text elif key in self.__unmapped_settings: if key == "hardware compatible": common_compatibility = self._parseCompatibleValue(entry.text) self._cached_values = common_setting_values # from InstanceContainer ancestor meta_data["compatible"] = common_compatibility self.setMetaData(meta_data) self._dirty = False # Map machine human-readable names to IDs product_id_map = self.getProductIdMap() machines = data.iterfind("./um:settings/um:machine", self.__namespaces) for machine in machines: machine_compatibility = common_compatibility machine_setting_values = {} settings = machine.iterfind("./um:setting", self.__namespaces) for entry in settings: key = entry.get("key") if key in self.__material_settings_setting_map: machine_setting_values[self.__material_settings_setting_map[key]] = entry.text elif key in self.__unmapped_settings: if key == "hardware compatible": machine_compatibility = self._parseCompatibleValue(entry.text) else: Logger.log("d", "Unsupported material setting %s", key) cached_machine_setting_properties = common_setting_values.copy() cached_machine_setting_properties.update(machine_setting_values) identifiers = machine.iterfind("./um:machine_identifier", self.__namespaces) for identifier in identifiers: machine_id_list = product_id_map.get(identifier.get("product"), []) if not machine_id_list: machine_id_list = self.getPossibleDefinitionIDsFromName(identifier.get("product")) for machine_id in machine_id_list: definitions = ContainerRegistry.getInstance().findDefinitionContainersMetadata(id = machine_id) if not definitions: continue definition = definitions[0] machine_manufacturer = identifier.get("manufacturer", definition.get("manufacturer", "Unknown")) #If the XML material doesn't specify a manufacturer, use the one in the actual printer definition. if machine_compatibility: new_material_id = self.getId() + "_" + machine_id # The child or derived material container may already exist. This can happen when a material in a # project file and the a material in Cura have the same ID. # In the case if a derived material already exists, override that material container because if # the data in the parent material has been changed, the derived ones should be updated too. if ContainerRegistry.getInstance().isLoaded(new_material_id): new_material = ContainerRegistry.getInstance().findContainers(id = new_material_id)[0] is_new_material = False else: new_material = XmlMaterialProfile(new_material_id) is_new_material = True new_material.setMetaData(copy.deepcopy(self.getMetaData())) new_material.getMetaData()["id"] = new_material_id new_material.getMetaData()["name"] = self.getName() new_material.setDefinition(machine_id) # Don't use setMetadata, as that overrides it for all materials with same base file new_material.getMetaData()["compatible"] = machine_compatibility new_material.getMetaData()["machine_manufacturer"] = machine_manufacturer new_material.getMetaData()["definition"] = machine_id new_material.setCachedValues(cached_machine_setting_properties) new_material._dirty = False if is_new_material: containers_to_add.append(new_material) # Find the buildplates compatibility buildplates = machine.iterfind("./um:buildplate", self.__namespaces) buildplate_map = {} buildplate_map["buildplate_compatible"] = {} buildplate_map["buildplate_recommended"] = {} for buildplate in buildplates: buildplate_id = buildplate.get("id") if buildplate_id is None: continue from cura.Machines.VariantManager import VariantType variant_manager = CuraApplication.getInstance().getVariantManager() variant_node = variant_manager.getVariantNode(machine_id, buildplate_id) if not variant_node: continue buildplate_compatibility = machine_compatibility buildplate_recommended = machine_compatibility settings = buildplate.iterfind("./um:setting", self.__namespaces) for entry in settings: key = entry.get("key") if key in self.__unmapped_settings: if key == "hardware compatible": buildplate_compatibility = self._parseCompatibleValue(entry.text) elif key == "hardware recommended": buildplate_recommended = self._parseCompatibleValue(entry.text) else: Logger.log("d", "Unsupported material setting %s", key) buildplate_map["buildplate_compatible"][buildplate_id] = buildplate_compatibility buildplate_map["buildplate_recommended"][buildplate_id] = buildplate_recommended hotends = machine.iterfind("./um:hotend", self.__namespaces) for hotend in hotends: # The "id" field for hotends in material profiles are actually hotend_name = hotend.get("id") if hotend_name is None: continue variant_manager = CuraApplication.getInstance().getVariantManager() variant_node = variant_manager.getVariantNode(machine_id, hotend_name) if not variant_node: continue hotend_compatibility = machine_compatibility hotend_setting_values = {} settings = hotend.iterfind("./um:setting", self.__namespaces) for entry in settings: key = entry.get("key") if key in self.__material_settings_setting_map: hotend_setting_values[self.__material_settings_setting_map[key]] = entry.text elif key in self.__unmapped_settings: if key == "hardware compatible": hotend_compatibility = self._parseCompatibleValue(entry.text) else: Logger.log("d", "Unsupported material setting %s", key) new_hotend_specific_material_id = self.getId() + "_" + machine_id + "_" + hotend_name.replace(" ", "_") # Same as machine compatibility, keep the derived material containers consistent with the parent material if ContainerRegistry.getInstance().isLoaded(new_hotend_specific_material_id): new_hotend_material = ContainerRegistry.getInstance().findContainers(id = new_hotend_specific_material_id)[0] is_new_material = False else: new_hotend_material = XmlMaterialProfile(new_hotend_specific_material_id) is_new_material = True new_hotend_material.setMetaData(copy.deepcopy(self.getMetaData())) new_hotend_material.getMetaData()["id"] = new_hotend_specific_material_id new_hotend_material.getMetaData()["name"] = self.getName() new_hotend_material.getMetaData()["variant_name"] = hotend_name new_hotend_material.setDefinition(machine_id) # Don't use setMetadata, as that overrides it for all materials with same base file new_hotend_material.getMetaData()["compatible"] = hotend_compatibility new_hotend_material.getMetaData()["machine_manufacturer"] = machine_manufacturer new_hotend_material.getMetaData()["definition"] = machine_id if buildplate_map["buildplate_compatible"]: new_hotend_material.getMetaData()["buildplate_compatible"] = buildplate_map["buildplate_compatible"] new_hotend_material.getMetaData()["buildplate_recommended"] = buildplate_map["buildplate_recommended"] cached_hotend_setting_properties = cached_machine_setting_properties.copy() cached_hotend_setting_properties.update(hotend_setting_values) new_hotend_material.setCachedValues(cached_hotend_setting_properties) new_hotend_material._dirty = False if is_new_material: containers_to_add.append(new_hotend_material) # there is only one ID for a machine. Once we have reached here, it means we have already found # a workable ID for that machine, so there is no need to continue break for container_to_add in containers_to_add: ContainerRegistry.getInstance().addContainer(container_to_add)
def _backupAndStartClean(self): # backup the current cura directories and create clean ones from cura.CuraVersion import CuraVersion from UM.Resources import Resources # The early crash may happen before those information is set in Resources, so we need to set them here to # make sure that Resources can find the correct place. Resources.ApplicationIdentifier = "cura" Resources.ApplicationVersion = CuraVersion config_path = Resources.getConfigStoragePath() data_path = Resources.getDataStoragePath() cache_path = Resources.getCacheStoragePath() folders_to_backup = [] folders_to_remove = [] # only cache folder needs to be removed folders_to_backup.append(config_path) if data_path != config_path: folders_to_backup.append(data_path) # Only remove the cache folder if it's not the same as data or config if cache_path not in (config_path, data_path): folders_to_remove.append(cache_path) for folder in folders_to_remove: shutil.rmtree(folder, ignore_errors=True) for folder in folders_to_backup: base_name = os.path.basename(folder) root_dir = os.path.dirname(folder) import datetime date_now = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") idx = 0 file_name = base_name + "_" + date_now zip_file_path = os.path.join(root_dir, file_name + ".zip") while os.path.exists(zip_file_path): idx += 1 file_name = base_name + "_" + date_now + "_" + idx zip_file_path = os.path.join(root_dir, file_name + ".zip") try: # only create the zip backup when the folder exists if os.path.exists(folder): # remove the .zip extension because make_archive() adds it zip_file_path = zip_file_path[:-4] shutil.make_archive(zip_file_path, "zip", root_dir=root_dir, base_dir=base_name) # remove the folder only when the backup is successful shutil.rmtree(folder, ignore_errors=True) # create an empty folder so Resources will not try to copy the old ones os.makedirs(folder, 0o0755, exist_ok=True) except Exception as e: Logger.logException("e", "Failed to backup [%s] to file [%s]", folder, zip_file_path) if not self.has_started: print("Failed to backup [%s] to file [%s]: %s", folder, zip_file_path, e) self.early_crash_dialog.close()
def run(self): if not self._url: Logger.log("e", "Can not check for a new release. URL not set!") no_new_version = True application_name = Application.getInstance().getApplicationName() Logger.log("i", "Checking for new version of %s" % application_name) try: headers = { "User-Agent": "%s - %s" % (application_name, Application.getInstance().getVersion()) } # CURA-6698 Create an SSL context and use certifi CA certificates for verification. context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLSv1_2) context.verify_mode = ssl.CERT_REQUIRED context.load_verify_locations(cafile=certifi.where()) request = urllib.request.Request(self._url, headers=headers) latest_version_file = urllib.request.urlopen(request, context=context) except Exception as e: Logger.logException("w", "Failed to check for new version: %s" % e) if not self.silent: Message(i18n_catalog.i18nc( "@info", "Could not access update information."), title=i18n_catalog.i18nc("@info:title", "Version Upgrade")).show() return try: reader = codecs.getreader("utf-8") data = json.load(reader(latest_version_file)) try: if Application.getInstance().getVersion() != "master": local_version = Version( Application.getInstance().getVersion()) else: if not self.silent: Message(i18n_catalog.i18nc( "@info", "The version you are using does not support checking for updates." ), title=i18n_catalog.i18nc( "@info:title", "Warning")).show() return except ValueError: Logger.log( "w", "Could not determine application version from string %s, not checking for updates", Application.getInstance().getVersion()) if not self.silent: Message(i18n_catalog.i18nc( "@info", "The version you are using does not support checking for updates." ), title=i18n_catalog.i18nc( "@info:title", "Version Upgrade")).show() return if application_name in data: for key, value in data[application_name].items(): if "major" in value and "minor" in value and "revision" in value and "url" in value: os = key if platform.system().lower() == os.lower( ): #TODO: add architecture check newest_version = Version([ int(value["major"]), int(value["minor"]), int(value["revision"]) ]) if local_version < newest_version: preferences = Application.getInstance( ).getPreferences() latest_version_shown = preferences.getValue( "info/latest_update_version_shown") if latest_version_shown == newest_version and not self.display_same_version: continue # Don't show this update again. The user already clicked it away and doesn't want it again. preferences.setValue( "info/latest_update_version_shown", str(newest_version)) Logger.log( "i", "Found a new version of the software. Spawning message" ) self.showUpdate(newest_version, value["url"]) no_new_version = False break else: Logger.log( "w", "Could not find version information or download url for update." ) else: Logger.log( "w", "Did not find any version information for %s." % application_name) except Exception: Logger.logException( "e", "Exception in update checker while parsing the JSON file.") Message(i18n_catalog.i18nc( "@info", "An error occurred while checking for updates."), title=i18n_catalog.i18nc("@info:title", "Error")).show() no_new_version = False # Just to suppress the message below. if no_new_version and not self.silent: Message(i18n_catalog.i18nc("@info", "No new version was found."), title=i18n_catalog.i18nc("@info:title", "Version Upgrade")).show()
def run(self) -> None: Logger.log("d", "Auto detect baud rate started.") wait_response_timeouts = [3, 15, 30] wait_bootloader_times = [1.5, 5, 15] write_timeout = 3 read_timeout = 3 tries = 2 programmer = Stk500v2() serial = None try: programmer.connect(self._serial_port) serial = programmer.leaveISP() except ispBase.IspError: programmer.close() for retry in range(tries): for baud_rate in self._all_baud_rates: if retry < len(wait_response_timeouts): wait_response_timeout = wait_response_timeouts[retry] else: wait_response_timeout = wait_response_timeouts[-1] if retry < len(wait_bootloader_times): wait_bootloader = wait_bootloader_times[retry] else: wait_bootloader = wait_bootloader_times[-1] Logger.log( "d", "Checking {serial} if baud rate {baud_rate} works. Retry nr: {retry}. Wait timeout: {timeout}" .format(serial=self._serial_port, baud_rate=baud_rate, retry=retry, timeout=wait_response_timeout)) if serial is None: try: serial = Serial(str(self._serial_port), baud_rate, timeout=read_timeout, writeTimeout=write_timeout) except SerialException: Logger.logException("w", "Unable to create serial") continue else: # We already have a serial connection, just change the baud rate. try: serial.baudrate = baud_rate except ValueError: continue sleep( wait_bootloader ) # Ensure that we are not talking to the boot loader. 1.5 seconds seems to be the magic number serial.write(b"\n") # Ensure we clear out previous responses serial.write(b"M105\n") start_timeout_time = time() timeout_time = time() + wait_response_timeout while timeout_time > time(): line = serial.readline() if b"ok" in line and b"T:" in line: self.setResult(baud_rate) Logger.log( "d", "Detected baud rate {baud_rate} on serial {serial} on retry {retry} with after {time_elapsed:0.2f} seconds." .format(serial=self._serial_port, baud_rate=baud_rate, retry=retry, time_elapsed=time() - start_timeout_time)) serial.close( ) # close serial port so it can be opened by the USBPrinterOutputDevice return serial.write(b"M105\n") sleep(15) # Give the printer some time to init and try again. self.setResult(None) # Unable to detect the correct baudrate.
def _update(self): Logger.log( "d", "Updating {model_class_name}.".format( model_class_name=self.__class__.__name__)) if not self._selected_quality_item: self.setItems([]) return items = [] global_container_stack = self._application.getGlobalContainerStack() definition_container = global_container_stack.definition quality_group = self._selected_quality_item["quality_group"] quality_changes_group = self._selected_quality_item[ "quality_changes_group"] if self._selected_position == self.GLOBAL_STACK_POSITION: quality_node = quality_group.node_for_global else: quality_node = quality_group.nodes_for_extruders.get( str(self._selected_position)) settings_keys = quality_group.getAllKeys() quality_containers = [] if quality_node is not None: quality_containers.append(quality_node.getContainer()) # Here, if the user has selected a quality changes, then "quality_changes_group" will not be None, and we fetch # the settings in that quality_changes_group. if quality_changes_group is not None: if self._selected_position == self.GLOBAL_STACK_POSITION: quality_changes_node = quality_changes_group.node_for_global else: quality_changes_node = quality_changes_group.nodes_for_extruders.get( str(self._selected_position)) if quality_changes_node is not None: # it can be None if number of extruders are changed during runtime try: quality_containers.insert( 0, quality_changes_node.getContainer()) except RuntimeError: # FIXME: This is to prevent incomplete update of QualityManager Logger.logException( "d", "Failed to get container for quality changes node %s", quality_changes_node) return settings_keys.update(quality_changes_group.getAllKeys()) # We iterate over all definitions instead of settings in a quality/qualtiy_changes group is because in the GUI, # the settings are grouped together by categories, and we had to go over all the definitions to figure out # which setting belongs in which category. current_category = "" for definition in definition_container.findDefinitions(): if definition.type == "category": current_category = definition.label if self._i18n_catalog: current_category = self._i18n_catalog.i18nc( definition.key + " label", definition.label) continue profile_value = None profile_value_source = "" for quality_container in quality_containers: new_value = quality_container.getProperty( definition.key, "value") if new_value is not None: profile_value_source = quality_container.getMetaDataEntry( "type") profile_value = new_value # Global tab should use resolve (if there is one) if self._selected_position == self.GLOBAL_STACK_POSITION: resolve_value = global_container_stack.getProperty( definition.key, "resolve") if resolve_value is not None and definition.key in settings_keys: profile_value = resolve_value if profile_value is not None: break if self._selected_position == self.GLOBAL_STACK_POSITION: user_value = global_container_stack.userChanges.getProperty( definition.key, "value") else: extruder_stack = global_container_stack.extruders[str( self._selected_position)] user_value = extruder_stack.userChanges.getProperty( definition.key, "value") if profile_value is None and user_value is None: continue label = definition.label if self._i18n_catalog: label = self._i18n_catalog.i18nc(definition.key + " label", label) items.append({ "key": definition.key, "label": label, "unit": definition.unit, "profile_value": "" if profile_value is None else str(profile_value), # it is for display only "profile_value_source": profile_value_source, "user_value": "" if user_value is None else str(user_value), "category": current_category }) self.setItems(items)
def _upgradeFile(self, storage_path_absolute, configuration_file, old_configuration_type): configuration_file_absolute = os.path.join(storage_path_absolute, configuration_file) #Read the old file. try: with open(configuration_file_absolute, encoding="utf-8", errors="ignore") as file_handle: files_data = [file_handle.read()] except MemoryError: #File is too big. It might be the log. return False except FileNotFoundError: #File was already moved to an /old directory. return False except IOError: Logger.log("w", "Can't open configuration file %s for reading.", configuration_file_absolute) return False #Get the version number of the old file. try: old_version = self._get_version_functions[old_configuration_type]( files_data[0]) except: #Version getter gives an exception. Not a valid file. Can't upgrade it then. return False version = old_version configuration_type = old_configuration_type try: mime_type = UM.MimeTypeDatabase.getMimeTypeForFile( configuration_file ) # Get the actual MIME type object, from the name. except UM.MimeTypeDatabase.MimeTypeNotFoundError: return False filenames_without_extension = [ self._stripMimeTypeExtension(mime_type, configuration_file) ] #Keep converting the file until it's at one of the current versions. while (configuration_type, version) not in self._current_versions: if (configuration_type, version) not in self._upgrade_routes: #No version upgrade plug-in claims to be able to upgrade this file. return False new_type, new_version, upgrade_step = self._upgrade_routes[( configuration_type, version)] new_filenames_without_extension = [] new_files_data = [] for file_idx, file_data in enumerate(files_data): try: upgrade_step_result = upgrade_step( file_data, filenames_without_extension[file_idx]) except Exception as e: #Upgrade failed due to a coding error in the plug-in. Logger.logException("w", "Exception in %s upgrade with %s: %s", old_configuration_type, upgrade_step.__module__, traceback.format_exc()) return False if upgrade_step_result: this_filenames_without_extension, this_files_data = upgrade_step_result else: #Upgrade failed. Logger.log( "w", "Unable to upgrade the file %s with %s.%s. Skipping it.", filenames_without_extension[file_idx], upgrade_step.__module__, upgrade_step.__name__) return False new_filenames_without_extension += this_filenames_without_extension new_files_data += this_files_data filenames_without_extension = new_filenames_without_extension files_data = new_files_data version = new_version configuration_type = new_type #If the version changed, save the new files. if version != old_version or configuration_type != old_configuration_type: self._storeOldFile(storage_path_absolute, configuration_file, old_version) #Finding out where to store these files. resource_type, mime_type = self._current_versions[( configuration_type, version)] storage_path = Resources.getStoragePathForType(resource_type) mime_type = UM.MimeTypeDatabase.getMimeType( mime_type) #Get the actual MIME type object, from the name. if mime_type.preferredSuffix: extension = "." + mime_type.preferredSuffix elif mime_type.suffixes: extension = "." + mime_type.suffixes[0] else: extension = "" #No known suffix. Put no extension behind it. new_filenames = [ filename + extension for filename in filenames_without_extension ] configuration_files_absolute = [ os.path.join(storage_path, filename) for filename in new_filenames ] for file_idx, configuration_file_absolute in enumerate( configuration_files_absolute): try: with open(os.path.join(configuration_file_absolute), "w", encoding="utf-8") as file_handle: file_handle.write( files_data[file_idx]) #Save the new file. except IOError: Logger.log("w", "Couldn't write new configuration file to %s.", configuration_file_absolute) return False Logger.log("i", "Upgraded %s to version %s.", configuration_file, str(version)) return True return False #Version didn't change. Was already current.
def createExtruderStackWithDefaultSetup(cls, global_stack: "GlobalStack", extruder_position: int) -> None: """Create a default Extruder Stack :param global_stack: The global stack this extruder refers to. :param extruder_position: The position of the current extruder. """ from cura.CuraApplication import CuraApplication application = CuraApplication.getInstance() registry = application.getContainerRegistry() # Get the extruder definition. extruder_definition_dict = global_stack.getMetaDataEntry( "machine_extruder_trains") extruder_definition_id = extruder_definition_dict[str( extruder_position)] try: extruder_definition = registry.findDefinitionContainers( id=extruder_definition_id)[0] except IndexError: # It still needs to break, but we want to know what extruder ID made it break. msg = "Unable to find extruder definition with the id [%s]" % extruder_definition_id Logger.logException("e", msg) raise IndexError(msg) # Find out what filament diameter we need. approximate_diameter = round( extruder_definition.getProperty("material_diameter", "value") ) # Can't be modified by definition changes since we are just initialising the stack here. # Find the preferred containers. machine_node = ContainerTree.getInstance().machines[ global_stack.definition.getId()] extruder_variant_node = machine_node.variants.get( machine_node.preferred_variant_name) if not extruder_variant_node: Logger.log( "w", "Could not find preferred nozzle {nozzle_name}. Falling back to {fallback}." .format(nozzle_name=machine_node.preferred_variant_name, fallback=next(iter(machine_node.variants)))) extruder_variant_node = next(iter(machine_node.variants.values())) extruder_variant_container = extruder_variant_node.container material_node = extruder_variant_node.preferredMaterial( approximate_diameter) material_container = material_node.container quality_node = material_node.preferredQuality() new_extruder_id = registry.uniqueName(extruder_definition_id) new_extruder = cls.createExtruderStack( new_extruder_id, extruder_definition=extruder_definition, machine_definition_id=global_stack.definition.getId(), position=extruder_position, variant_container=extruder_variant_container, material_container=material_container, quality_container=quality_node.container) new_extruder.setNextStack(global_stack) registry.addContainer(new_extruder)
def createMachine( cls, name: str, definition_id: str, machine_extruder_count: Optional[int] = None ) -> Optional[GlobalStack]: """Create a new instance of a machine. :param name: The name of the new machine. :param definition_id: The ID of the machine definition to use. :param machine_extruder_count: The number of extruders in the machine. :return: The new global stack or None if an error occurred. """ from cura.CuraApplication import CuraApplication application = CuraApplication.getInstance() registry = application.getContainerRegistry() container_tree = ContainerTree.getInstance() definitions = registry.findDefinitionContainers(id=definition_id) if not definitions: ConfigurationErrorMessage.getInstance().addFaultyContainers( definition_id) Logger.log("w", "Definition {definition} was not found!", definition=definition_id) return None machine_definition = definitions[0] machine_node = container_tree.machines[machine_definition.getId()] generated_name = registry.createUniqueName( "machine", "", name, machine_definition.getName()) # Make sure the new name does not collide with any definition or (quality) profile # createUniqueName() only looks at other stacks, but not at definitions or quality profiles # Note that we don't go for uniqueName() immediately because that function matches with ignore_case set to true if registry.findContainersMetadata(id=generated_name): generated_name = registry.uniqueName(generated_name) new_global_stack = cls.createGlobalStack( new_stack_id=generated_name, definition=machine_definition, variant_container=application.empty_variant_container, material_container=application.empty_material_container, quality_container=machine_node.preferredGlobalQuality().container, ) new_global_stack.setName(generated_name) # Create ExtruderStacks extruder_dict = machine_definition.getMetaDataEntry( "machine_extruder_trains") for position in extruder_dict: try: cls.createExtruderStackWithDefaultSetup( new_global_stack, position) except IndexError as e: Logger.logException( "e", "Failed to create an extruder stack for position {pos}: {err}" .format(pos=position, err=str(e))) return None # If given, set the machine_extruder_count when creating the machine, or else the extruderList used bellow will # not return the correct extruder list (since by default, the machine_extruder_count is 1) in machines with # settable number of extruders. if machine_extruder_count and 0 <= machine_extruder_count <= len( extruder_dict): new_global_stack.setProperty("machine_extruder_count", "value", machine_extruder_count) # Only register the extruders if we're sure that all of them are correct. for new_extruder in new_global_stack.extruderList: registry.addContainer(new_extruder) # Register the global stack after the extruder stacks are created. This prevents the registry from adding another # extruder stack because the global stack didn't have one yet (which is enforced since Cura 3.1). registry.addContainer(new_global_stack) return new_global_stack
def _installPackage(self, installation_package_data: Dict[str, Any]) -> None: package_info = installation_package_data["package_info"] filename = installation_package_data["filename"] package_id = package_info["package_id"] Logger.log("i", "Installing package [%s] from file [%s]", package_id, filename) # Load the cached package file and extract all contents to a temporary directory if not os.path.exists(filename): Logger.log( "w", "Package [%s] file '%s' is missing, cannot install this package", package_id, filename) return try: with zipfile.ZipFile(filename, "r") as archive: temp_dir = tempfile.TemporaryDirectory() archive.extractall(temp_dir.name) except Exception: Logger.logException("e", "Failed to install package from file [%s]", filename) return # Remove it first and then install try: self._purgePackage(package_id) except Exception as e: message = Message(catalog.i18nc( "@error:update", "There was an error uninstalling the package {package} before installing " "new version:\n{error}.\nPlease try to upgrade again later.". format(package=package_id, error=str(e))), title=catalog.i18nc("@info:title", "Updating error")) message.show() return # Copy the folders there for sub_dir_name, installation_root_dir in self._installation_dirs_dict.items( ): src_dir_path = os.path.join(temp_dir.name, "files", sub_dir_name) dst_dir_path = os.path.join(installation_root_dir, package_id) if not os.path.exists(src_dir_path): Logger.log( "w", "The path %s does not exist, so not installing the files", src_dir_path) continue self.__installPackageFiles(package_id, src_dir_path, dst_dir_path) # Remove the file try: os.remove(filename) except Exception: Logger.log("w", "Tried to delete file [%s], but it failed", filename) # Move the info to the installed list of packages only when it succeeds self._installed_package_dict[ package_id] = self._to_install_package_dict[package_id] self._installed_package_dict[package_id]["package_info"][ "is_installed"] = True
def installPackage(self, filename: str) -> Optional[str]: """Schedules the given package file to be installed upon the next start. :return: The to-be-installed package_id or None if something went wrong """ has_changes = False package_id = "" try: # Get package information package_info = self.getPackageInfo(filename) if not package_info: return None package_id = package_info["package_id"] # If the package is being installed but it is in the list on to remove, then it is deleted from that list. if package_id in self._to_remove_package_set: self._to_remove_package_set.remove(package_id) # We do not check if the same package has been installed already here because, for example, in Cura, # it may need to install a package with the same package-version but with a higher SDK version. So, # the package-version is not the only version that can be in play here. # Need to use the lock file to prevent concurrent I/O issues. with self._container_registry.lockFile(): Logger.log( "i", "Package [%s] version [%s] is scheduled to be installed.", package_id, package_info["package_version"]) # Copy the file to cache dir so we don't need to rely on the original file to be present package_cache_dir = os.path.join( os.path.abspath(Resources.getCacheStoragePath()), "cura_packages") if not os.path.exists(package_cache_dir): os.makedirs(package_cache_dir, exist_ok=True) target_file_path = os.path.join(package_cache_dir, package_id + ".curapackage") shutil.copy2(filename, target_file_path) self._to_install_package_dict[package_id] = { "package_info": package_info, "filename": target_file_path } has_changes = True except: Logger.logException("c", "Failed to install package file '%s'", filename) finally: self._saveManagementData() if has_changes: self.installedPackagesChanged.emit() if package_id in self._packages_with_update_available: # After installing the update, the check will return that not other updates are available. # In that case we remove it from the list. This is actually a safe check (could be removed) if not self.checkIfPackageCanUpdate(package_id): # The install ensured that the package no longer has a valid update option. self._packages_with_update_available.remove(package_id) self.packagesWithUpdateChanged.emit() if has_changes: self.packageInstalled.emit(package_id) return package_id else: return None
def _loadManagementData(self) -> None: # The bundled package management file should always be there if len(self._bundled_package_management_file_paths) == 0: Logger.log("w", "Bundled package management files could not be found!") return # Load the bundled packages: self._bundled_package_dict = {} for search_path in self._bundled_package_management_file_paths: try: with open(search_path, "r", encoding="utf-8") as f: self._bundled_package_dict.update(json.load(f)) Logger.log("i", "Loaded bundled packages data from %s", search_path) except UnicodeDecodeError: Logger.logException( "e", "Can't decode package management files. File is corrupt.") return # Need to use the file lock here to prevent concurrent I/O from other processes/threads container_registry = self._application.getContainerRegistry() with container_registry.lockFile(): try: # Load the user packages: with open(cast(str, self._user_package_management_file_path), "r", encoding="utf-8") as f: try: management_dict = json.load(f) except JSONDecodeError: # The file got corrupted, ignore it. This happens extremely infrequently. # The file will get overridden once a user downloads something. return self._installed_package_dict = management_dict.get( "installed", {}) self._to_remove_package_set = set( management_dict.get("to_remove", [])) self._to_install_package_dict = management_dict.get( "to_install", {}) self._dismissed_packages = set( management_dict.get("dismissed", [])) Logger.log("i", "Loaded user packages management file from %s", self._user_package_management_file_path) except FileNotFoundError: Logger.log( "i", "User package management file %s doesn't exist, do nothing", self._user_package_management_file_path) return # For packages that become bundled in the new releases, but a lower version was installed previously, we need # to remove the old lower version that's installed in the user's folder. for package_id, installed_package_dict in self._installed_package_dict.items( ): bundled_package_dict = self._bundled_package_dict.get(package_id) if bundled_package_dict is None: continue should_install = self._shouldInstallCandidate( installed_package_dict["package_info"], bundled_package_dict["package_info"]) # The bundled package is newer if not should_install: self._to_remove_package_set.add(package_id) continue # Also check the to-install packages to avoid installing packages that have a lower version than the bundled # ones. to_remove_package_ids = set() for package_id, to_install_package_dict in self._to_install_package_dict.items( ): bundled_package_dict = self._bundled_package_dict.get(package_id) if bundled_package_dict is None: continue should_install = self._shouldInstallCandidate( to_install_package_dict["package_info"], bundled_package_dict["package_info"]) # The bundled package is newer if not should_install: Logger.info( "Ignoring package {} since it's sdk or package version is lower than the bundled package", package_id) to_remove_package_ids.add(package_id) continue for package_id in to_remove_package_ids: del self._to_install_package_dict[package_id]
def write(self, stream, nodes, mode = MeshWriter.OutputMode.BinaryMode): self._archive = None # Reset archive archive = zipfile.ZipFile(stream, "w", compression = zipfile.ZIP_DEFLATED) try: model_file = zipfile.ZipInfo("3D/3dmodel.model") # Because zipfile is stupid and ignores archive-level compression settings when writing with ZipInfo. model_file.compress_type = zipfile.ZIP_DEFLATED # Create content types file content_types_file = zipfile.ZipInfo("[Content_Types].xml") content_types_file.compress_type = zipfile.ZIP_DEFLATED content_types = ET.Element("Types", xmlns = self._namespaces["content-types"]) rels_type = ET.SubElement(content_types, "Default", Extension = "rels", ContentType = "application/vnd.openxmlformats-package.relationships+xml") model_type = ET.SubElement(content_types, "Default", Extension = "model", ContentType = "application/vnd.ms-package.3dmanufacturing-3dmodel+xml") # Create _rels/.rels file relations_file = zipfile.ZipInfo("_rels/.rels") relations_file.compress_type = zipfile.ZIP_DEFLATED relations_element = ET.Element("Relationships", xmlns = self._namespaces["relationships"]) model_relation_element = ET.SubElement(relations_element, "Relationship", Target = "/3D/3dmodel.model", Id = "rel0", Type = "http://schemas.microsoft.com/3dmanufacturing/2013/01/3dmodel") savitar_scene = Savitar.Scene() transformation_matrix = Matrix() transformation_matrix._data[1, 1] = 0 transformation_matrix._data[1, 2] = -1 transformation_matrix._data[2, 1] = 1 transformation_matrix._data[2, 2] = 0 global_container_stack = Application.getInstance().getGlobalContainerStack() # Second step: 3MF defines the left corner of the machine as center, whereas cura uses the center of the # build volume. if global_container_stack: translation_vector = Vector(x=global_container_stack.getProperty("machine_width", "value") / 2, y=global_container_stack.getProperty("machine_depth", "value") / 2, z=0) translation_matrix = Matrix() translation_matrix.setByTranslation(translation_vector) transformation_matrix.preMultiply(translation_matrix) root_node = UM.Application.Application.getInstance().getController().getScene().getRoot() for node in nodes: if node == root_node: for root_child in node.getChildren(): if type(root_child) != DuplicatedNode: savitar_node = self._convertUMNodeToSavitarNode(root_child, transformation_matrix) if savitar_node: savitar_scene.addSceneNode(savitar_node) elif type(node) != DuplicatedNode: savitar_node = self._convertUMNodeToSavitarNode(node, transformation_matrix) if savitar_node: savitar_scene.addSceneNode(savitar_node) parser = Savitar.ThreeMFParser() scene_string = parser.sceneToString(savitar_scene) archive.writestr(model_file, scene_string) archive.writestr(content_types_file, b'<?xml version="1.0" encoding="UTF-8"?> \n' + ET.tostring(content_types)) archive.writestr(relations_file, b'<?xml version="1.0" encoding="UTF-8"?> \n' + ET.tostring(relations_element)) except Exception as e: Logger.logException("e", "Error writing zip file") return False finally: if not self._store_archive: archive.close() else: self._archive = archive return True
def run(self): reader = self._handler.getReaderForFile(self._filename) if not reader: result_message = Message(i18n_catalog.i18nc( "@info:status", "Cannot open file type <filename>{0}</filename>", self._filename), lifetime=0) result_message.show() return # Give the plugin a chance to display a dialog before showing the loading UI pre_read_result = reader.preRead(self._filename) if pre_read_result != MeshReader.PreReadResult.accepted: if pre_read_result == MeshReader.PreReadResult.failed: result_message = Message(i18n_catalog.i18nc( "@info:status", "Failed to load <filename>{0}</filename>", self._filename), lifetime=0) result_message.show() return loading_message = Message(i18n_catalog.i18nc( "@info:status", "Loading <filename>{0}</filename>", self._filename), lifetime=0, dismissable=False) loading_message.setProgress(-1) loading_message.show() Job.yieldThread( ) # Yield to any other thread that might want to do something else. node = None try: begin_time = time.time() node = self._handler.readerRead(reader, self._filename) end_time = time.time() Logger.log("d", "Loading mesh took %s seconds", end_time - begin_time) except: Logger.logException("e", "Exception in mesh loader") if not node: loading_message.hide() result_message = Message(i18n_catalog.i18nc( "@info:status", "Failed to load <filename>{0}</filename>", self._filename), lifetime=0) result_message.show() return # Scale down to maximum bounds size if that is available if hasattr(Application.getInstance().getController().getScene(), "_maximum_bounds"): max_bounds = Application.getInstance().getController().getScene( )._maximum_bounds node._resetAABB() build_bounds = node.getBoundingBox() if Preferences.getInstance().getValue( "mesh/scale_to_fit") == True or Preferences.getInstance( ).getValue("mesh/scale_tiny_meshes") == True: scale_factor_width = max_bounds.width / build_bounds.width scale_factor_height = max_bounds.height / build_bounds.height scale_factor_depth = max_bounds.depth / build_bounds.depth scale_factor = min(scale_factor_width, scale_factor_depth, scale_factor_height) if Preferences.getInstance().getValue( "mesh/scale_to_fit") == True and ( scale_factor_width < 1 or scale_factor_height < 1 or scale_factor_depth < 1 ): # Use scale factor to scale large object down # Ignore scaling on models which are less than 1.25 times bigger than the build volume ignore_factor = 1.25 if 1 / scale_factor < ignore_factor: Logger.log( "i", "Ignoring auto-scaling, because %.3d < %.3d" % (1 / scale_factor, ignore_factor)) scale_factor = 1 pass elif Preferences.getInstance( ).getValue("mesh/scale_tiny_meshes") == True and ( scale_factor_width > 100 and scale_factor_height > 100 and scale_factor_depth > 100): # Round scale factor to lower factor of 10 to scale tiny object up (eg convert m to mm units) scale_factor = math.pow( 10, math.floor(math.log(scale_factor) / math.log(10))) else: scale_factor = 1 if scale_factor != 1: scale_vector = Vector(scale_factor, scale_factor, scale_factor) display_scale_factor = scale_factor * 100 scale_message = Message( i18n_catalog.i18nc( "@info:status", "Auto scaled object to {0}% of original size", ("%i" % display_scale_factor))) try: node.scale(scale_vector) scale_message.show() except Exception: Logger.logException( "e", "While auto-scaling an exception has been raised") self.setResult(node) loading_message.hide()
def run(self): try: # Initialize a Preference that stores the last version checked for this printer. Application.getInstance().getPreferences().addPreference( getSettingsKeyForMachine(self._lookups.getMachineId()), "") # Get headers application_name = Application.getInstance().getApplicationName() application_version = Application.getInstance().getVersion() self._headers = { "User-Agent": "%s - %s" % (application_name, application_version) } # If it is not None, then we compare between the checked_version and the current_version machine_id = self._lookups.getMachineId() if machine_id is not None: Logger.log( "i", "You have a(n) {0} in the printer list. Do firmware-check." .format(self._machine_name)) current_version = self.getCurrentVersion() # This case indicates that was an error checking the version. # It happens for instance when not connected to internet. if current_version == self.ZERO_VERSION: return # If it is the first time the version is checked, the checked_version is "" setting_key_str = getSettingsKeyForMachine(machine_id) checked_version = Version( Application.getInstance().getPreferences().getValue( setting_key_str)) # If the checked_version is "", it's because is the first time we check firmware and in this case # we will not show the notification, but we will store it for the next time Application.getInstance().getPreferences().setValue( setting_key_str, current_version) Logger.log( "i", "Reading firmware version of %s: checked = %s - latest = %s", self._machine_name, checked_version, current_version) # The first time we want to store the current version, the notification will not be shown, # because the new version of Cura will be release before the firmware and we don't want to # notify the user when no new firmware version is available. if (checked_version != "") and (checked_version != current_version): Logger.log( "i", "Showing firmware update message for new version: {version}" .format(version=current_version)) message = FirmwareUpdateCheckerMessage( machine_id, self._machine_name, self._lookups.getRedirectUserUrl()) message.actionTriggered.connect(self._callback) message.show() else: Logger.log( "i", "No machine with name {0} in list of firmware to check.". format(self._machine_name)) except Exception as e: Logger.logException("w", "Failed to check for new version: %s", e) if not self.silent: Message( i18n_catalog.i18nc( "@info", "Could not access update information.")).show() return
def run(self): if not self._url: Logger.log("e", "Can not check for a new release. URL not set!") no_new_version = True application_name = Application.getInstance().getApplicationName() Logger.log("i", "Checking for new version of %s" % application_name) try: headers = {"User-Agent": "%s - %s" % (application_name, Application.getInstance().getVersion())} request = urllib.request.Request(self._url, headers = headers) latest_version_file = urllib.request.urlopen(request) except Exception as e: Logger.log("w", "Failed to check for new version: %s" % e) if not self.silent: Message(i18n_catalog.i18nc("@info", "Could not access update information."), title = i18n_catalog.i18nc("@info:title", "Version Upgrade") ).show() return try: reader = codecs.getreader("utf-8") data = json.load(reader(latest_version_file)) try: if Application.getInstance().getVersion() is not "master": local_version = Version(Application.getInstance().getVersion()) else: if not self.silent: Message(i18n_catalog.i18nc("@info", "The version you are using does not support checking for updates."), title = i18n_catalog.i18nc("@info:title", "Warning")).show() return except ValueError: Logger.log("w", "Could not determine application version from string %s, not checking for updates", Application.getInstance().getVersion()) if not self.silent: Message(i18n_catalog.i18nc("@info", "The version you are using does not support checking for updates."), title = i18n_catalog.i18nc("@info:title", "Version Upgrade")).show() return if application_name in data: for key, value in data[application_name].items(): if "major" in value and "minor" in value and "revision" in value and "url" in value: os = key if platform.system() == os: #TODO: add architecture check newest_version = Version([int(value["major"]), int(value["minor"]), int(value["revision"])]) if local_version < newest_version: Logger.log("i", "Found a new version of the software. Spawning message") message = Message(i18n_catalog.i18nc("@info", "A new version is available!"), title = i18n_catalog.i18nc("@info:title", "Version Upgrade")) message.addAction("download", i18n_catalog.i18nc("@action:button", "Download"), "[no_icon]", "[no_description]") if self._set_download_url_callback: self._set_download_url_callback(value["url"]) message.actionTriggered.connect(self._callback) message.show() no_new_version = False break else: Logger.log("w", "Could not find version information or download url for update.") else: Logger.log("w", "Did not find any version information for %s." % application_name) except Exception: Logger.logException("e", "Exception in update checker while parsing the JSON file.") Message(i18n_catalog.i18nc("@info", "An exception occurred while checking for updates."), title = i18n_catalog.i18nc("@info:title", "Error")).show() no_new_version = False # Just to suppress the message below. if no_new_version and not self.silent: Message(i18n_catalog.i18nc("@info", "No new version was found."), title = i18n_catalog.i18nc("@info:title", "Version Upgrade")).show()
def _update(self): items = [] if self._metadata is None: Logger.logException("w", "Failed to load packages for Marketplace") self.setItems(items) return for package in self._metadata: has_configs = False configs_model = None links_dict = {} if "data" in package: if "supported_configs" in package["data"]: if len(package["data"]["supported_configs"]) > 0: has_configs = True configs_model = ConfigsModel() configs_model.setConfigs( package["data"]["supported_configs"]) # Links is a list of dictionaries with "title" and "url". Convert this list into a dict so it's easier # to process. link_list = package['data']['links'] if 'links' in package[ 'data'] else [] links_dict = {d["title"]: d["url"] for d in link_list} if "author_id" not in package[ "author"] or "display_name" not in package["author"]: package["author"]["author_id"] = "" package["author"]["display_name"] = "" items.append({ "id": package["package_id"], "type": package["package_type"], "name": package["display_name"], "version": package["package_version"], "author_id": package["author"]["author_id"], "author_name": package["author"]["display_name"], "author_email": package["author"]["email"] if "email" in package["author"] else None, "description": package["description"] if "description" in package else None, "icon_url": package["icon_url"] if "icon_url" in package else None, "image_urls": package["image_urls"] if "image_urls" in package else None, "download_url": package["download_url"] if "download_url" in package else None, "last_updated": package["last_updated"] if "last_updated" in package else None, "is_bundled": package["is_bundled"] if "is_bundled" in package else False, "is_active": package["is_active"] if "is_active" in package else False, "is_installed": package["is_installed"] if "is_installed" in package else False, "has_configs": has_configs, "supported_configs": configs_model, "download_count": package["download_count"] if "download_count" in package else 0, "tags": package["tags"] if "tags" in package else [], "links": links_dict, "website": package["website"] if "website" in package else None, }) # Filter on all the key-word arguments. for key, value in self._filter.items(): if key is "tags": key_filter = lambda item, v=value: v in item["tags"] elif "*" in value: key_filter = lambda candidate, k=key, v=value: self._matchRegExp( candidate, k, v) else: key_filter = lambda candidate, k=key, v=value: self._matchString( candidate, k, v) items = filter(key_filter, items) # Execute all filters. filtered_items = list(items) filtered_items.sort(key=lambda k: k["name"]) self.setItems(filtered_items)
def _populateMetaData(self, plugin_id: str) -> bool: """Populate the list of metadata""" plugin = self._findPlugin(plugin_id) if not plugin: Logger.log("w", "Could not find plugin %s", plugin_id) return False location = None for folder in self._plugin_locations: location = self._locatePlugin(plugin_id, folder) if location: break if not location: Logger.log("w", "Could not find plugin %s", plugin_id) return False location = os.path.join(location, plugin_id) try: meta_data = plugin.getMetaData( ) #type: ignore #We catch the AttributeError that this would raise if the module has no getMetaData function. metadata_file = os.path.join(location, "plugin.json") try: with open(metadata_file, "r", encoding="utf-8") as file_stream: self._parsePluginInfo(plugin_id, file_stream.read(), meta_data) except FileNotFoundError: Logger.logException( "e", "Unable to find the required plugin.json file for plugin %s", plugin_id) raise InvalidMetaDataError(plugin_id) except UnicodeDecodeError: Logger.logException( "e", "The plug-in metadata file for plug-in {plugin_id} is corrupt." .format(plugin_id=plugin_id)) raise InvalidMetaDataError(plugin_id) except EnvironmentError as e: Logger.logException( "e", "Can't open the metadata file for plug-in {plugin_id}: {err}" .format(plugin_id=plugin_id, err=str(e))) raise InvalidMetaDataError(plugin_id) except AttributeError as e: Logger.log( "e", "Plug-in {plugin_id} has no getMetaData function to get metadata of the plug-in: {err}" .format(plugin_id=plugin_id, err=str(e))) raise InvalidMetaDataError(plugin_id) except TypeError as e: Logger.log( "e", "Plug-in {plugin_id} has a getMetaData function with the wrong signature: {err}" .format(plugin_id=plugin_id, err=str(e))) raise InvalidMetaDataError(plugin_id) if not meta_data: raise InvalidMetaDataError(plugin_id) meta_data["id"] = plugin_id meta_data["location"] = location # Application-specific overrides appname = self._application.getApplicationName() if appname in meta_data: meta_data.update(meta_data[appname]) del meta_data[appname] self._metadata[plugin_id] = meta_data return True
def deserializeMetadata(cls, serialized: str, container_id: str) -> List[Dict[str, Any]]: result_metadata = [] #All the metadata that we found except the base (because the base is returned). #Update the serialized data to the latest version. serialized = cls._updateSerialized(serialized) base_metadata = { "type": "material", "status": "unknown", #TODO: Add material verification. "container_type": XmlMaterialProfile, "id": container_id, "base_file": container_id } try: data = ET.fromstring(serialized) except: Logger.logException("e", "An exception occurred while parsing the material profile") return [] #TODO: Implement the <inherits> tag. It's unused at the moment though. if "version" in data.attrib: base_metadata["setting_version"] = cls.xmlVersionToSettingVersion(data.attrib["version"]) else: base_metadata["setting_version"] = cls.xmlVersionToSettingVersion("1.2") #1.2 and lower didn't have that version number there yet. for entry in data.iterfind("./um:metadata/*", cls.__namespaces): tag_name = _tag_without_namespace(entry) if tag_name == "name": brand = entry.find("./um:brand", cls.__namespaces) material = entry.find("./um:material", cls.__namespaces) color = entry.find("./um:color", cls.__namespaces) label = entry.find("./um:label", cls.__namespaces) if label is not None: base_metadata["name"] = label.text else: base_metadata["name"] = cls._profile_name(material.text, color.text) base_metadata["brand"] = brand.text base_metadata["material"] = material.text base_metadata["color_name"] = color.text continue #Setting_version is derived from the "version" tag in the schema earlier, so don't set it here. if tag_name == "setting_version": continue base_metadata[tag_name] = entry.text if "description" not in base_metadata: base_metadata["description"] = "" if "adhesion_info" not in base_metadata: base_metadata["adhesion_info"] = "" property_values = {} properties = data.iterfind("./um:properties/*", cls.__namespaces) for entry in properties: tag_name = _tag_without_namespace(entry) property_values[tag_name] = entry.text base_metadata["approximate_diameter"] = str(round(float(property_values.get("diameter", 2.85)))) # In mm base_metadata["properties"] = property_values base_metadata["definition"] = "fdmprinter" compatible_entries = data.iterfind("./um:settings/um:setting[@key='hardware compatible']", cls.__namespaces) try: common_compatibility = cls._parseCompatibleValue(next(compatible_entries).text) except StopIteration: #No 'hardware compatible' setting. common_compatibility = True base_metadata["compatible"] = common_compatibility result_metadata.append(base_metadata) # Map machine human-readable names to IDs product_id_map = cls.getProductIdMap() for machine in data.iterfind("./um:settings/um:machine", cls.__namespaces): machine_compatibility = common_compatibility for entry in machine.iterfind("./um:setting", cls.__namespaces): key = entry.get("key") if key == "hardware compatible": machine_compatibility = cls._parseCompatibleValue(entry.text) for identifier in machine.iterfind("./um:machine_identifier", cls.__namespaces): machine_id_list = product_id_map.get(identifier.get("product"), []) if not machine_id_list: machine_id_list = cls.getPossibleDefinitionIDsFromName(identifier.get("product")) for machine_id in machine_id_list: definition_metadata = ContainerRegistry.getInstance().findDefinitionContainersMetadata(id = machine_id) if not definition_metadata: continue definition_metadata = definition_metadata[0] machine_manufacturer = identifier.get("manufacturer", definition_metadata.get("manufacturer", "Unknown")) #If the XML material doesn't specify a manufacturer, use the one in the actual printer definition. if machine_compatibility: new_material_id = container_id + "_" + machine_id # The child or derived material container may already exist. This can happen when a material in a # project file and the a material in Cura have the same ID. # In the case if a derived material already exists, override that material container because if # the data in the parent material has been changed, the derived ones should be updated too. found_materials = ContainerRegistry.getInstance().findInstanceContainersMetadata(id = new_material_id) if found_materials: new_material_metadata = found_materials[0] else: new_material_metadata = {} new_material_metadata.update(base_metadata) new_material_metadata["id"] = new_material_id new_material_metadata["compatible"] = machine_compatibility new_material_metadata["machine_manufacturer"] = machine_manufacturer new_material_metadata["definition"] = machine_id if len(found_materials) == 0: #This is a new material. result_metadata.append(new_material_metadata) buildplates = machine.iterfind("./um:buildplate", cls.__namespaces) buildplate_map = {} buildplate_map["buildplate_compatible"] = {} buildplate_map["buildplate_recommended"] = {} for buildplate in buildplates: buildplate_id = buildplate.get("id") if buildplate_id is None: continue variant_containers = ContainerRegistry.getInstance().findInstanceContainersMetadata(id = buildplate_id) if not variant_containers: # It is not really properly defined what "ID" is so also search for variants by name. variant_containers = ContainerRegistry.getInstance().findInstanceContainersMetadata(definition = machine_id, name = buildplate_id) if not variant_containers: continue settings = buildplate.iterfind("./um:setting", cls.__namespaces) for entry in settings: key = entry.get("key") if key == "hardware compatible": buildplate_compatibility = cls._parseCompatibleValue(entry.text) elif key == "hardware recommended": buildplate_recommended = cls._parseCompatibleValue(entry.text) buildplate_map["buildplate_compatible"][buildplate_id] = buildplate_map["buildplate_compatible"] buildplate_map["buildplate_recommended"][buildplate_id] = buildplate_map["buildplate_recommended"] for hotend in machine.iterfind("./um:hotend", cls.__namespaces): hotend_name = hotend.get("id") if hotend_name is None: continue hotend_compatibility = machine_compatibility for entry in hotend.iterfind("./um:setting", cls.__namespaces): key = entry.get("key") if key == "hardware compatible": hotend_compatibility = cls._parseCompatibleValue(entry.text) new_hotend_specific_material_id = container_id + "_" + machine_id + "_" + hotend_name.replace(" ", "_") # Same as machine compatibility, keep the derived material containers consistent with the parent material found_materials = ContainerRegistry.getInstance().findInstanceContainersMetadata(id = new_hotend_specific_material_id) if found_materials: new_hotend_material_metadata = found_materials[0] else: new_hotend_material_metadata = {} new_hotend_material_metadata.update(base_metadata) new_hotend_material_metadata["variant_name"] = hotend_name new_hotend_material_metadata["compatible"] = hotend_compatibility new_hotend_material_metadata["machine_manufacturer"] = machine_manufacturer new_hotend_material_metadata["id"] = new_hotend_specific_material_id new_hotend_material_metadata["definition"] = machine_id if buildplate_map["buildplate_compatible"]: new_hotend_material_metadata["buildplate_compatible"] = buildplate_map["buildplate_compatible"] new_hotend_material_metadata["buildplate_recommended"] = buildplate_map["buildplate_recommended"] if len(found_materials) == 0: result_metadata.append(new_hotend_material_metadata) # there is only one ID for a machine. Once we have reached here, it means we have already found # a workable ID for that machine, so there is no need to continue break return result_metadata
def _populateMetaData(self, plugin_id: str) -> bool: plugin = self._findPlugin(plugin_id) if not plugin: Logger.log("w", "Could not find plugin %s", plugin_id) return False meta_data = None location = None for folder in self._plugin_locations: location = self._locatePlugin(plugin_id, folder) if location: break if not location: Logger.log("w", "Could not find plugin %s", plugin_id) return False location = os.path.join(location, plugin_id) try: meta_data = plugin.getMetaData() metadata_file = os.path.join(location, "plugin.json") try: with open(metadata_file, "r") as f: try: meta_data["plugin"] = json.loads(f.read()) except json.decoder.JSONDecodeError: Logger.logException("e", "Failed to parse plugin.json for plugin %s", plugin_id) raise InvalidMetaDataError(plugin_id) # Check if metadata is valid; if "version" not in meta_data["plugin"]: Logger.log("e", "Version must be set!") raise InvalidMetaDataError(plugin_id) if "i18n-catalog" in meta_data["plugin"]: # A catalog was set, try to translate a few strings i18n_catalog = i18nCatalog(meta_data["plugin"]["i18n-catalog"]) if "name" in meta_data["plugin"]: meta_data["plugin"]["name"] = i18n_catalog.i18n(meta_data["plugin"]["name"]) if "description" in meta_data["plugin"]: meta_data["plugin"]["description"] = i18n_catalog.i18n(meta_data["plugin"]["description"]) except FileNotFoundError: Logger.logException("e", "Unable to find the required plugin.json file for plugin %s", plugin_id) raise InvalidMetaDataError(plugin_id) except AttributeError as e: Logger.log("e", "An error occurred getting metadata from plugin %s: %s", plugin_id, str(e)) raise InvalidMetaDataError(plugin_id) if not meta_data: raise InvalidMetaDataError(plugin_id) meta_data["id"] = plugin_id meta_data["location"] = location # Application-specific overrides appname = self._application.getApplicationName() if appname in meta_data: meta_data.update(meta_data[appname]) del meta_data[appname] self._metadata[plugin_id] = meta_data return True
def __init__(self, key, address, port, properties): super().__init__(key) self._address = address self._port = port self._path = properties.get(b"path", b"/").decode("utf-8") if self._path[-1:] != "/": self._path += "/" self._key = key self._properties = properties # Properties dict as provided by zero conf self._gcode = None self._auto_print = True # We start with a single extruder, but update this when we get data from octoprint self._num_extruders_set = False self._num_extruders = 1 # Try to get version information from plugin.json plugin_file_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "plugin.json") try: with open(plugin_file_path) as plugin_file: plugin_info = json.load(plugin_file) plugin_version = plugin_info["version"] except: # The actual version info is not critical to have so we can continue plugin_version = "Unknown" Logger.logException( "w", "Could not get version information for the plugin") self._user_agent_header = "User-Agent".encode() self._user_agent = ( "%s/%s %s/%s" % (Application.getInstance().getApplicationName(), Application.getInstance().getVersion(), "OctoPrintPlugin", Application.getInstance().getVersion())).encode() self._api_prefix = "api/" self._api_header = "X-Api-Key".encode() self._api_key = None self._protocol = "https" if properties.get( b'useHttps') == b"true" else "http" self._base_url = "%s://%s:%d%s" % (self._protocol, self._address, self._port, self._path) self._api_url = self._base_url + self._api_prefix self._basic_auth_header = "Authentication".encode() self._basic_auth_data = None basic_auth_username = properties.get(b"userName", b"").decode("utf-8") basic_auth_password = properties.get(b"password", b"").decode("utf-8") if basic_auth_username and basic_auth_password: self._basic_auth_data = ( "%s:%s" % (basic_auth_username, basic_auth_username)).encode() self._monitor_view_qml_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "MonitorItem.qml") self.setPriority( 2 ) # Make sure the output device gets selected above local file output self.setName(key) self.setShortDescription( i18n_catalog.i18nc("@action:button", "Print with OctoPrint")) self.setDescription( i18n_catalog.i18nc("@properties:tooltip", "Print with OctoPrint")) self.setIconName("print") self.setConnectionText( i18n_catalog.i18nc("@info:status", "Connected to OctoPrint on {0}").format( self._key)) # QNetwork manager needs to be created in advance. If we don't it can happen that it doesn't correctly # hook itself into the event loop, which results in events never being fired / done. self._manager = QNetworkAccessManager() self._manager.finished.connect(self._onRequestFinished) ## Ensure that the qt networking stuff isn't garbage collected (unless we want it to) self._settings_reply = None self._printer_reply = None self._job_reply = None self._command_reply = None self._image_reply = None self._stream_buffer = b"" self._stream_buffer_start_index = -1 self._post_reply = None self._post_multi_part = None self._post_part = None self._progress_message = None self._error_message = None self._connection_message = None self._update_timer = QTimer() self._update_timer.setInterval( 2000) # TODO; Add preference for update interval self._update_timer.setSingleShot(False) self._update_timer.timeout.connect(self._update) self._camera_image_id = 0 self._camera_image = QImage() self._camera_mirror = "" self._camera_rotation = 0 self._camera_url = "" self._camera_shares_proxy = False self._sd_supported = False self._connection_state_before_timeout = None self._last_response_time = None self._last_request_time = None self._response_timeout_time = 5 self._recreate_network_manager_time = 30 # If we have no connection, re-create network manager every 30 sec. self._recreate_network_manager_count = 1 self._preheat_timer = QTimer() self._preheat_timer.setSingleShot(True) self._preheat_timer.timeout.connect(self.cancelPreheatBed)
def loadPlugin(self, plugin_id: str): # If plugin has already been loaded, do not load it again: if plugin_id in self._plugins: Logger.log("w", "Plugin %s was already loaded", plugin_id) return # If the plugin is in the list of disabled plugins, alert and return: if plugin_id in self._disabled_plugins: Logger.log("d", "Plugin %s was disabled", plugin_id) return # Find the actual plugin on drive: plugin = self._findPlugin(plugin_id) # If not found, raise error: if not plugin: raise PluginNotFoundError(plugin_id) # If found, but isn't in the metadata dictionary, add it: if plugin_id not in self._metadata: try: self._populateMetaData(plugin_id) except InvalidMetaDataError: return #If API version is incompatible, don't load it. if self._metadata[plugin_id].get("plugin", {}).get("api", 0) != self.APIVersion: Logger.log("w", "Plugin %s uses an incompatible API version, ignoring", plugin_id) del self._metadata[plugin_id] return #HACK: For OctoPrint plug-in version 3.2.2, it broke the start-up sequence when auto-connecting. #Remove this hack once we've increased the API version number to something higher than 4. version = self._metadata[plugin_id].get("plugin", {}).get("version", "0.0.0") if plugin_id == "OctoPrintPlugin" and Version(version) < Version("3.3.0"): Logger.log("e", "Plugin OctoPrintPlugin version {version} was disabled because it was using an old API for network connection.".format(version = version)) return try: to_register = plugin.register(self._application) if not to_register: Logger.log("e", "Plugin %s did not return any objects to register", plugin_id) return for plugin_type, plugin_object in to_register.items(): if type(plugin_object) == list: for nested_plugin_object in plugin_object: nested_plugin_object.setVersion(self._metadata[plugin_id].get("plugin", {}).get("version")) self._addPluginObject(nested_plugin_object, plugin_id, plugin_type) else: plugin_object.setVersion(self._metadata[plugin_id].get("plugin", {}).get("version")) self._addPluginObject(plugin_object, plugin_id, plugin_type) self._plugins[plugin_id] = plugin self.enablePlugin(plugin_id) Logger.log("i", "Loaded plugin %s", plugin_id) except KeyError as e: Logger.log("e", "Error loading plugin %s:", plugin_id) Logger.log("e", "Unknown plugin type: %s", str(e)) except Exception as e: Logger.logException("e", "Error loading plugin %s:", plugin_id)
def write(self, stream, nodes, mode = MeshWriter.OutputMode.BinaryMode): self._archive = None # Reset archive archive = zipfile.ZipFile(stream, "w", compression = zipfile.ZIP_DEFLATED) try: model_file = zipfile.ZipInfo("3D/3dmodel.model") # Because zipfile is stupid and ignores archive-level compression settings when writing with ZipInfo. model_file.compress_type = zipfile.ZIP_DEFLATED # Create content types file content_types_file = zipfile.ZipInfo("[Content_Types].xml") content_types_file.compress_type = zipfile.ZIP_DEFLATED content_types = ET.Element("Types", xmlns = self._namespaces["content-types"]) rels_type = ET.SubElement(content_types, "Default", Extension = "rels", ContentType = "application/vnd.openxmlformats-package.relationships+xml") model_type = ET.SubElement(content_types, "Default", Extension = "model", ContentType = "application/vnd.ms-package.3dmanufacturing-3dmodel+xml") # Create _rels/.rels file relations_file = zipfile.ZipInfo("_rels/.rels") relations_file.compress_type = zipfile.ZIP_DEFLATED relations_element = ET.Element("Relationships", xmlns = self._namespaces["relationships"]) model_relation_element = ET.SubElement(relations_element, "Relationship", Target = "/3D/3dmodel.model", Id = "rel0", Type = "http://schemas.microsoft.com/3dmanufacturing/2013/01/3dmodel") model = ET.Element("model", unit = "millimeter", xmlns = self._namespaces["3mf"]) model.set("xmlns:cura", self._namespaces["cura"]) # Add the version of Cura this was created with. Since there is no "version" or similar metadata name we need # to prefix it with the cura namespace, as specified by the 3MF specification. version_metadata = ET.SubElement(model, "metadata", name = "cura:version") version_metadata.text = Application.getInstance().getVersion() resources = ET.SubElement(model, "resources") build = ET.SubElement(model, "build") added_nodes = [] index = 0 # Ensure index always exists (even if there are no nodes to write) # Write all nodes with meshData to the file as objects inside the resource tag for index, n in enumerate(MeshWriter._meshNodes(nodes)): added_nodes.append(n) # Save the nodes that have mesh data object = ET.SubElement(resources, "object", id = str(index+1), type = "model") mesh = ET.SubElement(object, "mesh") mesh_data = n.getMeshData() vertices = ET.SubElement(mesh, "vertices") verts = mesh_data.getVertices() if verts is None: Logger.log("d", "3mf writer can't write nodes without mesh data. Skipping this node.") continue # No mesh data, nothing to do. if mesh_data.hasIndices(): for face in mesh_data.getIndices(): v1 = verts[face[0]] v2 = verts[face[1]] v3 = verts[face[2]] xml_vertex1 = ET.SubElement(vertices, "vertex", x = str(v1[0]), y = str(v1[1]), z = str(v1[2])) xml_vertex2 = ET.SubElement(vertices, "vertex", x = str(v2[0]), y = str(v2[1]), z = str(v2[2])) xml_vertex3 = ET.SubElement(vertices, "vertex", x = str(v3[0]), y = str(v3[1]), z = str(v3[2])) triangles = ET.SubElement(mesh, "triangles") for face in mesh_data.getIndices(): triangle = ET.SubElement(triangles, "triangle", v1 = str(face[0]) , v2 = str(face[1]), v3 = str(face[2])) else: triangles = ET.SubElement(mesh, "triangles") for idx, vert in enumerate(verts): xml_vertex = ET.SubElement(vertices, "vertex", x = str(vert[0]), y = str(vert[1]), z = str(vert[2])) # If we have no faces defined, assume that every three subsequent vertices form a face. if idx % 3 == 0: triangle = ET.SubElement(triangles, "triangle", v1 = str(idx), v2 = str(idx + 1), v3 = str(idx + 2)) # Handle per object settings stack = n.callDecoration("getStack") if stack is not None: changed_setting_keys = set(stack.getTop().getAllKeys()) # Ensure that we save the extruder used for this object. if stack.getProperty("machine_extruder_count", "value") > 1: changed_setting_keys.add("extruder_nr") settings_xml = ET.SubElement(object, "settings", xmlns=self._namespaces["cura"]) # Get values for all changed settings & save them. for key in changed_setting_keys: setting_xml = ET.SubElement(settings_xml, "setting", key = key) setting_xml.text = str(stack.getProperty(key, "value")) # Add one to the index as we haven't incremented the last iteration. index += 1 nodes_to_add = set() for node in added_nodes: # Check the parents of the nodes with mesh_data and ensure that they are also added. parent_node = node.getParent() while parent_node is not None: if parent_node.callDecoration("isGroup"): nodes_to_add.add(parent_node) parent_node = parent_node.getParent() else: parent_node = None # Sort all the nodes by depth (so nodes with the highest depth are done first) sorted_nodes_to_add = sorted(nodes_to_add, key=lambda node: node.getDepth(), reverse = True) # We have already saved the nodes with mesh data, but now we also want to save nodes required for the scene for node in sorted_nodes_to_add: object = ET.SubElement(resources, "object", id=str(index + 1), type="model") components = ET.SubElement(object, "components") for child in node.getChildren(): if child in added_nodes: component = ET.SubElement(components, "component", objectid = str(added_nodes.index(child) + 1), transform = self._convertMatrixToString(child.getLocalTransformation())) index += 1 added_nodes.append(node) # Create a transformation Matrix to convert from our worldspace into 3MF. # First step: flip the y and z axis. transformation_matrix = Matrix() transformation_matrix._data[1, 1] = 0 transformation_matrix._data[1, 2] = -1 transformation_matrix._data[2, 1] = 1 transformation_matrix._data[2, 2] = 0 global_container_stack = UM.Application.getInstance().getGlobalContainerStack() # Second step: 3MF defines the left corner of the machine as center, whereas cura uses the center of the # build volume. if global_container_stack: translation_vector = Vector(x=global_container_stack.getProperty("machine_width", "value") / 2, y=global_container_stack.getProperty("machine_depth", "value") / 2, z=0) translation_matrix = Matrix() translation_matrix.setByTranslation(translation_vector) transformation_matrix.preMultiply(translation_matrix) # Find out what the final build items are and add them. for node in added_nodes: if node.getParent().callDecoration("isGroup") is None: node_matrix = node.getLocalTransformation() ET.SubElement(build, "item", objectid = str(added_nodes.index(node) + 1), transform = self._convertMatrixToString(node_matrix.preMultiply(transformation_matrix))) archive.writestr(model_file, b'<?xml version="1.0" encoding="UTF-8"?> \n' + ET.tostring(model)) archive.writestr(content_types_file, b'<?xml version="1.0" encoding="UTF-8"?> \n' + ET.tostring(content_types)) archive.writestr(relations_file, b'<?xml version="1.0" encoding="UTF-8"?> \n' + ET.tostring(relations_element)) except Exception as e: Logger.logException("e", "Error writing zip file") return False finally: if not self._store_archive: archive.close() else: self._archive = archive return True
def saveContainer(self, container: "ContainerInterface") -> None: """Find out where to save a container and save it there""" try: data = container.serialize() except NotImplementedError: return except Exception: Logger.logException( "e", "An exception occurred when serializing container %s", container.getId()) return mime_type = ContainerRegistry.getMimeTypeForContainer(type(container)) if mime_type is None: Logger.log("w", "Failed to get MIME type for container type [%s]", type(container)) return file_name = urllib.parse.quote_plus( container.getId()) + "." + mime_type.preferredSuffix container_type = container.getMetaDataEntry("type") resource_types = ContainerRegistry.getInstance().getResourceTypes() if container_type in resource_types: path = Resources.getStoragePath(resource_types[container_type], file_name) try: with SaveFile(path, "wt") as f: f.write(data) except OSError as e: Logger.log( "e", "Unable to store local container to path {path}: {err}". format(path=path, err=str(e))) return container.setPath(path) # Register it internally as being saved self._id_to_path[container.getId()] = path mime = self._pathToMime(path) if mime is not None: self._id_to_mime[container.getId()] = mime else: Logger.log( "e", "Failed to find MIME type for container ID [%s] with path [%s]", container.getId(), path) base_file = container.getMetaData().get("base_file") if base_file: for container_md in ContainerRegistry.getInstance( ).findContainersMetadata(base_file=base_file): self._id_to_path[container_md["id"]] = path mime = self._pathToMime(path) if mime is not None: self._id_to_mime[container_md["id"]] = mime else: Logger.log( "e", "Failed to find MIME type for container ID [%s] with path [%s]", container.getId(), path) else: Logger.log( "w", "Dirty container [%s] is not saved because the resource type is unknown in ContainerRegistry", container_type)
def read(self, file_name): archive = zipfile.ZipFile(file_name, "r") cura_file_names = [ name for name in archive.namelist() if name.startswith("Cura/") ] # Create a shadow copy of the preferences (we don't want all of the preferences, but we do want to re-use its # parsing code. temp_preferences = Preferences() temp_preferences.readFromFile( io.TextIOWrapper(archive.open("Cura/preferences.cfg")) ) # We need to wrap it, else the archive parser breaks. # Copy a number of settings from the temp preferences to the global global_preferences = Preferences.getInstance() visible_settings = temp_preferences.getValue( "general/visible_settings") if visible_settings is None: Logger.log( "w", "Workspace did not contain visible settings. Leaving visibility unchanged" ) else: global_preferences.setValue("general/visible_settings", visible_settings) categories_expanded = temp_preferences.getValue( "cura/categories_expanded") if categories_expanded is None: Logger.log( "w", "Workspace did not contain expanded categories. Leaving them unchanged" ) else: global_preferences.setValue("cura/categories_expanded", categories_expanded) Application.getInstance().expandedCategoriesChanged.emit( ) # Notify the GUI of the change self._id_mapping = {} # We don't add containers right away, but wait right until right before the stack serialization. # We do this so that if something goes wrong, it's easier to clean up. containers_to_add = [] # TODO: For the moment we use pretty naive existence checking. If the ID is the same, we assume in quite a few # TODO: cases that the container loaded is the same (most notable in materials & definitions). # TODO: It might be possible that we need to add smarter checking in the future. Logger.log("d", "Workspace loading is checking definitions...") # Get all the definition files & check if they exist. If not, add them. definition_container_files = [ name for name in cura_file_names if name.endswith(self._definition_container_suffix) ] for definition_container_file in definition_container_files: container_id = self._stripFileToId(definition_container_file) definitions = self._container_registry.findDefinitionContainers( id=container_id) if not definitions: definition_container = DefinitionContainer(container_id) definition_container.deserialize( archive.open(definition_container_file).read().decode( "utf-8")) self._container_registry.addContainer(definition_container) Job.yieldThread() Logger.log("d", "Workspace loading is checking materials...") material_containers = [] # Get all the material files and check if they exist. If not, add them. xml_material_profile = self._getXmlProfileClass() if self._material_container_suffix is None: self._material_container_suffix = ContainerRegistry.getMimeTypeForContainer( xml_material_profile).suffixes[0] if xml_material_profile: material_container_files = [ name for name in cura_file_names if name.endswith(self._material_container_suffix) ] for material_container_file in material_container_files: container_id = self._stripFileToId(material_container_file) materials = self._container_registry.findInstanceContainers( id=container_id) if not materials: material_container = xml_material_profile(container_id) material_container.deserialize( archive.open(material_container_file).read().decode( "utf-8")) containers_to_add.append(material_container) else: if not materials[0].isReadOnly( ): # Only create new materials if they are not read only. if self._resolve_strategies["material"] == "override": materials[0].deserialize( archive.open(material_container_file).read(). decode("utf-8")) elif self._resolve_strategies["material"] == "new": # Note that we *must* deserialize it with a new ID, as multiple containers will be # auto created & added. material_container = xml_material_profile( self.getNewId(container_id)) material_container.deserialize( archive.open(material_container_file).read(). decode("utf-8")) containers_to_add.append(material_container) material_containers.append(material_container) Job.yieldThread() Logger.log("d", "Workspace loading is checking instance containers...") # Get quality_changes and user profiles saved in the workspace instance_container_files = [ name for name in cura_file_names if name.endswith(self._instance_container_suffix) ] user_instance_containers = [] quality_changes_instance_containers = [] for instance_container_file in instance_container_files: container_id = self._stripFileToId(instance_container_file) instance_container = InstanceContainer(container_id) # Deserialize InstanceContainer by converting read data from bytes to string instance_container.deserialize( archive.open(instance_container_file).read().decode("utf-8")) container_type = instance_container.getMetaDataEntry("type") Job.yieldThread() if container_type == "user": # Check if quality changes already exists. user_containers = self._container_registry.findInstanceContainers( id=container_id) if not user_containers: containers_to_add.append(instance_container) else: if self._resolve_strategies[ "machine"] == "override" or self._resolve_strategies[ "machine"] is None: user_containers[0].deserialize( archive.open(instance_container_file).read(). decode("utf-8")) elif self._resolve_strategies["machine"] == "new": # The machine is going to get a spiffy new name, so ensure that the id's of user settings match. extruder_id = instance_container.getMetaDataEntry( "extruder", None) if extruder_id: new_id = self.getNewId( extruder_id) + "_current_settings" instance_container._id = new_id instance_container.setName(new_id) instance_container.setMetaDataEntry( "extruder", self.getNewId(extruder_id)) containers_to_add.append(instance_container) machine_id = instance_container.getMetaDataEntry( "machine", None) if machine_id: new_id = self.getNewId( machine_id) + "_current_settings" instance_container._id = new_id instance_container.setName(new_id) instance_container.setMetaDataEntry( "machine", self.getNewId(machine_id)) containers_to_add.append(instance_container) user_instance_containers.append(instance_container) elif container_type == "quality_changes": # Check if quality changes already exists. quality_changes = self._container_registry.findInstanceContainers( id=container_id) if not quality_changes: containers_to_add.append(instance_container) else: if self._resolve_strategies[ "quality_changes"] == "override": quality_changes[0].deserialize( archive.open(instance_container_file).read(). decode("utf-8")) elif self._resolve_strategies["quality_changes"] is None: # The ID already exists, but nothing in the values changed, so do nothing. pass quality_changes_instance_containers.append(instance_container) else: continue # Add all the containers right before we try to add / serialize the stack for container in containers_to_add: self._container_registry.addContainer(container) container.setDirty(True) # Get the stack(s) saved in the workspace. Logger.log("d", "Workspace loading is checking stacks containers...") container_stack_files = [ name for name in cura_file_names if name.endswith(self._container_stack_suffix) ] global_stack = None extruder_stacks = [] container_stacks_added = [] try: for container_stack_file in container_stack_files: container_id = self._stripFileToId(container_stack_file) # Check if a stack by this ID already exists; container_stacks = self._container_registry.findContainerStacks( id=container_id) if container_stacks: stack = container_stacks[0] if self._resolve_strategies["machine"] == "override": # TODO: HACK # There is a machine, check if it has authenticationd data. If so, keep that data. network_authentication_id = container_stacks[ 0].getMetaDataEntry("network_authentication_id") network_authentication_key = container_stacks[ 0].getMetaDataEntry("network_authentication_key") container_stacks[0].deserialize( archive.open(container_stack_file).read().decode( "utf-8")) if network_authentication_id: container_stacks[0].addMetaDataEntry( "network_authentication_id", network_authentication_id) if network_authentication_key: container_stacks[0].addMetaDataEntry( "network_authentication_key", network_authentication_key) elif self._resolve_strategies["machine"] == "new": new_id = self.getNewId(container_id) stack = ContainerStack(new_id) stack.deserialize( archive.open(container_stack_file).read().decode( "utf-8")) # Ensure a unique ID and name stack._id = new_id # Extruder stacks are "bound" to a machine. If we add the machine as a new one, the id of the # bound machine also needs to change. if stack.getMetaDataEntry("machine", None): stack.setMetaDataEntry( "machine", self.getNewId( stack.getMetaDataEntry("machine"))) if stack.getMetaDataEntry("type") != "extruder_train": # Only machines need a new name, stacks may be non-unique stack.setName( self._container_registry.uniqueName( stack.getName())) container_stacks_added.append(stack) self._container_registry.addContainer(stack) else: Logger.log( "w", "Resolve strategy of %s for machine is not supported", self._resolve_strategies["machine"]) else: stack = ContainerStack(container_id) # Deserialize stack by converting read data from bytes to string stack.deserialize( archive.open(container_stack_file).read().decode( "utf-8")) container_stacks_added.append(stack) self._container_registry.addContainer(stack) if stack.getMetaDataEntry("type") == "extruder_train": extruder_stacks.append(stack) else: global_stack = stack Job.yieldThread() except: Logger.logException( "w", "We failed to serialize the stack. Trying to clean up.") # Something went really wrong. Try to remove any data that we added. for container in containers_to_add: self._container_registry.getInstance().removeContainer( container.getId()) for container in container_stacks_added: self._container_registry.getInstance().removeContainer( container.getId()) return None if self._resolve_strategies["machine"] == "new": # A new machine was made, but it was serialized with the wrong user container. Fix that now. for container in user_instance_containers: extruder_id = container.getMetaDataEntry("extruder", None) if extruder_id: for extruder in extruder_stacks: if extruder.getId() == extruder_id: extruder.replaceContainer(0, container) continue machine_id = container.getMetaDataEntry("machine", None) if machine_id: if global_stack.getId() == machine_id: global_stack.replaceContainer(0, container) continue if self._resolve_strategies["quality_changes"] == "new": # Quality changes needs to get a new ID, added to registry and to the right stacks for container in quality_changes_instance_containers: old_id = container.getId() container.setName( self._container_registry.uniqueName(container.getName())) # We're not really supposed to change the ID in normal cases, but this is an exception. container._id = self.getNewId(container.getId()) # The container was not added yet, as it didn't have an unique ID. It does now, so add it. self._container_registry.addContainer(container) # Replace the quality changes container old_container = global_stack.findContainer( {"type": "quality_changes"}) if old_container.getId() == old_id: quality_changes_index = global_stack.getContainerIndex( old_container) global_stack.replaceContainer(quality_changes_index, container) continue for stack in extruder_stacks: old_container = stack.findContainer( {"type": "quality_changes"}) if old_container.getId() == old_id: quality_changes_index = stack.getContainerIndex( old_container) stack.replaceContainer(quality_changes_index, container) if self._resolve_strategies["material"] == "new": for material in material_containers: old_material = global_stack.findContainer({"type": "material"}) if old_material.getId() in self._id_mapping: material_index = global_stack.getContainerIndex( old_material) global_stack.replaceContainer(material_index, material) continue for stack in extruder_stacks: old_material = stack.findContainer({"type": "material"}) if old_material.getId() in self._id_mapping: material_index = stack.getContainerIndex(old_material) stack.replaceContainer(material_index, material) continue for stack in extruder_stacks: ExtruderManager.getInstance().registerExtruder( stack, global_stack.getId()) else: # Machine has no extruders, but it needs to be registered with the extruder manager. ExtruderManager.getInstance().registerExtruder( None, global_stack.getId()) Logger.log( "d", "Workspace loading is notifying rest of the code of changes...") # Notify everything/one that is to notify about changes. global_stack.containersChanged.emit(global_stack.getTop()) for stack in extruder_stacks: stack.setNextStack(global_stack) stack.containersChanged.emit(stack.getTop()) # Actually change the active machine. Application.getInstance().setGlobalContainerStack(global_stack) # Load all the nodes / meshdata of the workspace nodes = self._3mf_mesh_reader.read(file_name) if nodes is None: nodes = [] return nodes
def initializeBeforePluginsAreLoaded(self) -> None: config_path = Resources.getConfigStoragePath() # File to store plugin info, such as which ones to install/remove and which ones are disabled. # At this point we can load this here because we already know the actual Application name, so the directory name self._plugin_config_filename = os.path.join( os.path.abspath(config_path), "plugins.json") # type: str from UM.Settings.ContainerRegistry import ContainerRegistry container_registry = ContainerRegistry.getInstance() try: with container_registry.lockFile(): # Load the plugin info if exists if os.path.exists(self._plugin_config_filename): Logger.log("i", "Loading plugin configuration file '%s'", self._plugin_config_filename) with open(self._plugin_config_filename, "r", encoding="utf-8") as f: data = json.load(f) self._disabled_plugins = data["disabled"] self._plugins_to_install = data["to_install"] self._plugins_to_remove = data["to_remove"] except: Logger.logException( "e", "Failed to load plugin configuration file '%s'", self._plugin_config_filename) # Also load data from preferences, where the plugin info used to be saved preferences = self._application.getPreferences() disabled_plugins = preferences.getValue("general/disabled_plugins") disabled_plugins = disabled_plugins.split( ",") if disabled_plugins else [] disabled_plugins = [ plugin for plugin in disabled_plugins if len(plugin.strip()) > 0 ] for plugin_id in disabled_plugins: if plugin_id not in self._disabled_plugins: self._disabled_plugins.append(plugin_id) plugins_to_remove = preferences.getValue("general/plugins_to_remove") plugins_to_remove = plugins_to_remove.split( ",") if plugins_to_remove else [] for plugin_id in plugins_to_remove: if plugin_id not in self._plugins_to_remove: self._plugins_to_remove.append(plugin_id) # Remove plugins that need to be removed for plugin_id in self._plugins_to_remove: self._removePlugin(plugin_id) self._plugins_to_remove = [] if plugins_to_remove is not None: preferences.setValue("general/plugins_to_remove", "") self._savePluginData() # Install the plugins that need to be installed (overwrite existing) for plugin_id, plugin_info in self._plugins_to_install.items(): self._installPlugin(plugin_id, plugin_info["filename"]) self._plugins_to_install = {} self._savePluginData()
def deserialize(self, serialized): # update the serialized data first from UM.Settings.Interfaces import ContainerInterface serialized = ContainerInterface.deserialize(self, serialized) try: data = ET.fromstring(serialized) except: Logger.logException("e", "An exception occured while parsing the material profile") return # Reset previous metadata self.clearData() # Ensure any previous data is gone. meta_data = {} meta_data["type"] = "material" meta_data["base_file"] = self.id meta_data["status"] = "unknown" # TODO: Add material verfication common_setting_values = {} inherits = data.find("./um:inherits", self.__namespaces) if inherits is not None: inherited = self._resolveInheritance(inherits.text) data = self._mergeXML(inherited, data) if "version" in data.attrib: meta_data["setting_version"] = self.xmlVersionToSettingVersion(data.attrib["version"]) else: meta_data["setting_version"] = self.xmlVersionToSettingVersion("1.2") #1.2 and lower didn't have that version number there yet. metadata = data.iterfind("./um:metadata/*", self.__namespaces) for entry in metadata: tag_name = _tag_without_namespace(entry) if tag_name == "name": brand = entry.find("./um:brand", self.__namespaces) material = entry.find("./um:material", self.__namespaces) color = entry.find("./um:color", self.__namespaces) label = entry.find("./um:label", self.__namespaces) if label is not None: self._name = label.text else: self._name = self._profile_name(material.text, color.text) meta_data["brand"] = brand.text meta_data["material"] = material.text meta_data["color_name"] = color.text continue meta_data[tag_name] = entry.text if tag_name in self.__material_metadata_setting_map: common_setting_values[self.__material_metadata_setting_map[tag_name]] = entry.text if "description" not in meta_data: meta_data["description"] = "" if "adhesion_info" not in meta_data: meta_data["adhesion_info"] = "" property_values = {} properties = data.iterfind("./um:properties/*", self.__namespaces) for entry in properties: tag_name = _tag_without_namespace(entry) property_values[tag_name] = entry.text if tag_name in self.__material_properties_setting_map: common_setting_values[self.__material_properties_setting_map[tag_name]] = entry.text meta_data["approximate_diameter"] = str(round(float(property_values.get("diameter", 2.85)))) # In mm meta_data["properties"] = property_values self.setDefinition(ContainerRegistry.getInstance().findDefinitionContainers(id = "fdmprinter")[0]) common_compatibility = True settings = data.iterfind("./um:settings/um:setting", self.__namespaces) for entry in settings: key = entry.get("key") if key in self.__material_settings_setting_map: common_setting_values[self.__material_settings_setting_map[key]] = entry.text elif key in self.__unmapped_settings: if key == "hardware compatible": common_compatibility = parseBool(entry.text) else: Logger.log("d", "Unsupported material setting %s", key) self._cached_values = common_setting_values # from InstanceContainer ancestor meta_data["compatible"] = common_compatibility self.setMetaData(meta_data) self._dirty = False machines = data.iterfind("./um:settings/um:machine", self.__namespaces) for machine in machines: machine_compatibility = common_compatibility machine_setting_values = {} settings = machine.iterfind("./um:setting", self.__namespaces) for entry in settings: key = entry.get("key") if key in self.__material_settings_setting_map: machine_setting_values[self.__material_settings_setting_map[key]] = entry.text elif key in self.__unmapped_settings: if key == "hardware compatible": machine_compatibility = parseBool(entry.text) else: Logger.log("d", "Unsupported material setting %s", key) cached_machine_setting_properties = common_setting_values.copy() cached_machine_setting_properties.update(machine_setting_values) identifiers = machine.iterfind("./um:machine_identifier", self.__namespaces) for identifier in identifiers: machine_id = self.__product_id_map.get(identifier.get("product"), None) if machine_id is None: # Lets try again with some naive heuristics. machine_id = identifier.get("product").replace(" ", "").lower() definitions = ContainerRegistry.getInstance().findDefinitionContainers(id = machine_id) if not definitions: Logger.log("w", "No definition found for machine ID %s", machine_id) continue definition = definitions[0] if machine_compatibility: new_material_id = self.id + "_" + machine_id new_material = XmlMaterialProfile(new_material_id) # Update the private directly, as we want to prevent the lookup that is done when using setName new_material._name = self.getName() new_material.setMetaData(copy.deepcopy(self.getMetaData())) new_material.setDefinition(definition) # Don't use setMetadata, as that overrides it for all materials with same base file new_material.getMetaData()["compatible"] = machine_compatibility new_material.setCachedValues(cached_machine_setting_properties) new_material._dirty = False ContainerRegistry.getInstance().addContainer(new_material) hotends = machine.iterfind("./um:hotend", self.__namespaces) for hotend in hotends: hotend_id = hotend.get("id") if hotend_id is None: continue variant_containers = ContainerRegistry.getInstance().findInstanceContainers(id = hotend_id) if not variant_containers: # It is not really properly defined what "ID" is so also search for variants by name. variant_containers = ContainerRegistry.getInstance().findInstanceContainers(definition = definition.id, name = hotend_id) if not variant_containers: #Logger.log("d", "No variants found with ID or name %s for machine %s", hotend_id, definition.id) continue hotend_compatibility = machine_compatibility hotend_setting_values = {} settings = hotend.iterfind("./um:setting", self.__namespaces) for entry in settings: key = entry.get("key") if key in self.__material_settings_setting_map: hotend_setting_values[self.__material_settings_setting_map[key]] = entry.text elif key in self.__unmapped_settings: if key == "hardware compatible": hotend_compatibility = parseBool(entry.text) else: Logger.log("d", "Unsupported material setting %s", key) new_hotend_id = self.id + "_" + machine_id + "_" + hotend_id.replace(" ", "_") new_hotend_material = XmlMaterialProfile(new_hotend_id) # Update the private directly, as we want to prevent the lookup that is done when using setName new_hotend_material._name = self.getName() new_hotend_material.setMetaData(copy.deepcopy(self.getMetaData())) new_hotend_material.setDefinition(definition) new_hotend_material.addMetaDataEntry("variant", variant_containers[0].id) # Don't use setMetadata, as that overrides it for all materials with same base file new_hotend_material.getMetaData()["compatible"] = hotend_compatibility cached_hotend_setting_properties = cached_machine_setting_properties.copy() cached_hotend_setting_properties.update(hotend_setting_values) new_hotend_material.setCachedValues(cached_hotend_setting_properties) new_hotend_material._dirty = False ContainerRegistry.getInstance().addContainer(new_hotend_material)
def loadPlugin(self, plugin_id: str) -> None: # If plugin has already been loaded, do not load it again: if plugin_id in self._plugins: Logger.log("w", "Plugin %s was already loaded", plugin_id) return # Find the actual plugin on drive, do security checks if necessary: plugin = self._findPlugin(plugin_id) # If not found, raise error: if not plugin: raise PluginNotFoundError(plugin_id) # If found, but isn't in the metadata dictionary, add it: if plugin_id not in self._metadata: try: self._populateMetaData(plugin_id) except InvalidMetaDataError: return # Do not load plugin that has been disabled if plugin_id in self._disabled_plugins: Logger.log("i", "Plugin [%s] has been disabled. Skip loading it.", plugin_id) return # If API version is incompatible, don't load it. supported_sdk_versions = self._metadata[plugin_id].get( "plugin", {}).get("supported_sdk_versions", [Version("0")]) is_plugin_supported = False for supported_sdk_version in supported_sdk_versions: is_plugin_supported |= self.isPluginApiVersionCompatible( supported_sdk_version) if is_plugin_supported: break if not is_plugin_supported: Logger.log( "w", "Plugin [%s] with supported sdk versions [%s] is incompatible with the current sdk version [%s].", plugin_id, [str(version) for version in supported_sdk_versions], self._api_version) self._outdated_plugins.append(plugin_id) return try: to_register = plugin.register( self._application ) # type: ignore # We catch AttributeError on this in case register() doesn't exist. if not to_register: Logger.log("w", "Plugin %s did not return any objects to register", plugin_id) return for plugin_type, plugin_object in to_register.items(): if type(plugin_object) == list: for metadata_index, nested_plugin_object in enumerate( plugin_object): nested_plugin_object.setVersion( self._metadata[plugin_id].get("plugin", {}).get("version")) all_metadata = self._metadata[plugin_id].get( plugin_type, []) try: nested_plugin_object.setMetaData( all_metadata[metadata_index]) except IndexError: nested_plugin_object.setMetaData({}) self._addPluginObject(nested_plugin_object, plugin_id, plugin_type) else: plugin_object.setVersion(self._metadata[plugin_id].get( "plugin", {}).get("version")) metadata = self._metadata[plugin_id].get(plugin_type, {}) if type(metadata) == list: try: metadata = metadata[0] except IndexError: metadata = {} plugin_object.setMetaData(metadata) self._addPluginObject(plugin_object, plugin_id, plugin_type) self._plugins[plugin_id] = plugin self.enablePlugin(plugin_id) Logger.info("Loaded plugin %s", plugin_id) except Exception as ex: Logger.logException("e", "Error loading plugin %s:", plugin_id)