def _onDevicesDiscovered(self, clusters: List[CloudClusterResponse]) -> None: """**Synchronously** create machines for discovered devices Any new machines are made available to the user. May take a long time to complete. As this code needs access to the Application and blocks the GIL, creating a Job for this would not make sense. Shows a Message informing the user of progress. """ new_devices = [] remote_clusters_added = False host_guid_map = { machine.getMetaDataEntry(self.META_HOST_GUID): device_cluster_id for device_cluster_id, machine in self._um_cloud_printers.items() if machine.getMetaDataEntry(self.META_HOST_GUID) } machine_manager = CuraApplication.getInstance().getMachineManager() for cluster_data in clusters: device = CloudOutputDevice(self._api, cluster_data) # If the machine already existed before, it will be present in the host_guid_map if cluster_data.host_guid in host_guid_map: machine = machine_manager.getMachine( device.printerType, {self.META_HOST_GUID: cluster_data.host_guid}) if machine and machine.getMetaDataEntry( self.META_CLUSTER_ID) != device.key: # If the retrieved device has a different cluster_id than the existing machine, bring the existing # machine up-to-date. self._updateOutdatedMachine(outdated_machine=machine, new_cloud_output_device=device) # Create a machine if we don't already have it. Do not make it the active machine. # We only need to add it if it wasn't already added by "local" network or by cloud. if machine_manager.getMachine(device.printerType, {self.META_CLUSTER_ID: device.key}) is None \ and machine_manager.getMachine(device.printerType, {self.META_NETWORK_KEY: cluster_data.host_name + "*"}) is None: # The host name is part of the network key. new_devices.append(device) elif device.getId() not in self._remote_clusters: self._remote_clusters[device.getId()] = device remote_clusters_added = True # If a printer that was removed from the account is re-added, change its metadata to mark it not removed # from the account elif not parseBool( self._um_cloud_printers[device.key].getMetaDataEntry( META_UM_LINKED_TO_ACCOUNT, "true")): self._um_cloud_printers[device.key].setMetaDataEntry( META_UM_LINKED_TO_ACCOUNT, True) # Inform the Cloud printers model about new devices. new_devices_list_of_dicts = [{ "key": d.getId(), "name": d.name, "machine_type": d.printerTypeName, "firmware_version": d.firmwareVersion } for d in new_devices] discovered_cloud_printers_model = CuraApplication.getInstance( ).getDiscoveredCloudPrintersModel() discovered_cloud_printers_model.addDiscoveredCloudPrinters( new_devices_list_of_dicts) if not new_devices: if remote_clusters_added: self._connectToActiveMachine() return # Sort new_devices on online status first, alphabetical second. # Since the first device might be activated in case there is no active printer yet, # it would be nice to prioritize online devices online_cluster_names = { c.friendly_name.lower() for c in clusters if c.is_online and not c.friendly_name is None } new_devices.sort(key=lambda x: ("a{}" if x.name.lower( ) in online_cluster_names else "b{}").format(x.name.lower())) image_path = os.path.join( CuraApplication.getInstance().getPluginRegistry().getPluginPath( "UM3NetworkPrinting") or "", "resources", "svg", "cloud-flow-completed.svg") message = Message(title=self.I18N_CATALOG.i18ncp( "info:status", "New printer detected from your Ultimaker account", "New printers detected from your Ultimaker account", len(new_devices)), progress=0, lifetime=0, image_source=image_path) message.show() for idx, device in enumerate(new_devices): message_text = self.I18N_CATALOG.i18nc( "info:status", "Adding printer {} ({}) from your account", device.name, device.printerTypeName) message.setText(message_text) if len(new_devices) > 1: message.setProgress((idx / len(new_devices)) * 100) CuraApplication.getInstance().processEvents() self._remote_clusters[device.getId()] = device # If there is no active machine, activate the first available cloud printer activate = not CuraApplication.getInstance().getMachineManager( ).activeMachine self._createMachineFromDiscoveredDevice(device.getId(), activate=activate) message.setProgress(None) max_disp_devices = 3 if len(new_devices) > max_disp_devices: num_hidden = len(new_devices) - max_disp_devices device_name_list = [ "<li>{} ({})</li>".format(device.name, device.printerTypeName) for device in new_devices[0:max_disp_devices] ] device_name_list.append( self.I18N_CATALOG.i18nc("info:hidden list items", "<li>... and {} others</li>", num_hidden)) device_names = "".join(device_name_list) else: device_names = "".join([ "<li>{} ({})</li>".format(device.name, device.printerTypeName) for device in new_devices ]) message_text = self.I18N_CATALOG.i18nc( "info:status", "Cloud printers added from your account:<ul>{}</ul>", device_names) message.setText(message_text)
def _onDevicesDiscovered(self, clusters: List[CloudClusterResponse]) -> None: """**Synchronously** create machines for discovered devices Any new machines are made available to the user. May take a long time to complete. As this code needs access to the Application and blocks the GIL, creating a Job for this would not make sense. Shows a Message informing the user of progress. """ new_devices = [] remote_clusters_added = False for cluster_data in clusters: device = CloudOutputDevice(self._api, cluster_data) # Create a machine if we don't already have it. Do not make it the active machine. machine_manager = CuraApplication.getInstance().getMachineManager() # We only need to add it if it wasn't already added by "local" network or by cloud. if machine_manager.getMachine(device.printerType, {self.META_CLUSTER_ID: device.key}) is None \ and machine_manager.getMachine(device.printerType, {self.META_NETWORK_KEY: cluster_data.host_name + "*"}) is None: # The host name is part of the network key. new_devices.append(device) elif device.getId() not in self._remote_clusters: self._remote_clusters[device.getId()] = device remote_clusters_added = True # Inform the Cloud printers model about new devices. new_devices_list_of_dicts = [{ "key": d.getId(), "name": d.name, "machine_type": d.printerTypeName, "firmware_version": d.firmwareVersion } for d in new_devices] discovered_cloud_printers_model = CuraApplication.getInstance( ).getDiscoveredCloudPrintersModel() discovered_cloud_printers_model.addDiscoveredCloudPrinters( new_devices_list_of_dicts) if not new_devices: if remote_clusters_added: self._connectToActiveMachine() return new_devices.sort(key=lambda x: x.name.lower()) image_path = os.path.join( CuraApplication.getInstance().getPluginRegistry().getPluginPath( "UM3NetworkPrinting") or "", "resources", "svg", "cloud-flow-completed.svg") message = Message(title=self.I18N_CATALOG.i18ncp( "info:status", "New printer detected from your Ultimaker account", "New printers detected from your Ultimaker account", len(new_devices)), progress=0, lifetime=0, image_source=image_path) message.show() for idx, device in enumerate(new_devices): message_text = self.I18N_CATALOG.i18nc( "info:status", "Adding printer {} ({}) from your account", device.name, device.printerTypeName) message.setText(message_text) if len(new_devices) > 1: message.setProgress((idx / len(new_devices)) * 100) CuraApplication.getInstance().processEvents() self._remote_clusters[device.getId()] = device # If there is no active machine, activate the first available cloud printer activate = not CuraApplication.getInstance().getMachineManager( ).activeMachine self._createMachineFromDiscoveredDevice(device.getId(), activate=activate) message.setProgress(None) max_disp_devices = 3 if len(new_devices) > max_disp_devices: num_hidden = len(new_devices) - max_disp_devices + 1 device_name_list = [ "- {} ({})".format(device.name, device.printerTypeName) for device in new_devices[0:num_hidden] ] device_name_list.append( self.I18N_CATALOG.i18nc("info:hidden list items", "- and {} others", num_hidden)) device_names = "\n".join(device_name_list) else: device_names = "\n".join([ "- {} ({})".format(device.name, device.printerTypeName) for device in new_devices ]) message_text = self.I18N_CATALOG.i18nc( "info:status", "Cloud printers added from your account:\n{}", device_names) message.setText(message_text)
def _installPackage(self, installation_package_data: dict): package_info = installation_package_data["package_info"] filename = installation_package_data["filename"] package_id = package_info["package_id"] Logger.log("i", "Installing package [%s] from file [%s]", package_id, filename) # Load the cached package file and extract all contents to a temporary directory if not os.path.exists(filename): Logger.log( "w", "Package [%s] file '%s' is missing, cannot install this package", package_id, filename) return try: with zipfile.ZipFile(filename, "r") as archive: temp_dir = tempfile.TemporaryDirectory() archive.extractall(temp_dir.name) except Exception: Logger.logException("e", "Failed to install package from file [%s]", filename) return # Remove it first and then install try: self._purgePackage(package_id) except Exception as e: message = Message(catalog.i18nc( "@error:update", "There was an error uninstalling the package {package} before installing" "new version:\n{error}.\nPlease try to upgrade again later.". format(package=package_id, error=str(e))), title=catalog.i18nc("@info:title", "Updating error")) message.show() return # Copy the folders there for sub_dir_name, installation_root_dir in self._installation_dirs_dict.items( ): src_dir_path = os.path.join(temp_dir.name, "files", sub_dir_name) dst_dir_path = os.path.join(installation_root_dir, package_id) if not os.path.exists(src_dir_path): continue self.__installPackageFiles(package_id, src_dir_path, dst_dir_path) # Remove the file try: os.remove(filename) except Exception: Logger.log("w", "Tried to delete file [%s], but it failed", filename) # Move the info to the installed list of packages only when it succeeds self._installed_package_dict[ package_id] = self._to_install_package_dict[package_id] self._installed_package_dict[package_id]["package_info"][ "is_installed"] = True
def run(self): status_message = Message(i18n_catalog.i18nc( "@info:status", "Multiplying and placing objects"), lifetime=0, dismissable=False, progress=0) status_message.show() scene = Application.getInstance().getController().getScene() total_progress = len(self._objects) * self._count current_progress = 0 root = scene.getRoot() arranger = Arrange.create(scene_root=root) nodes = [] for node in self._objects: # If object is part of a group, multiply group current_node = node while current_node.getParent() and current_node.getParent( ).callDecoration("isGroup"): current_node = current_node.getParent() node_too_big = False if node.getBoundingBox().width < 300 or node.getBoundingBox( ).depth < 300: offset_shape_arr, hull_shape_arr = ShapeArray.fromNode( current_node, min_offset=self._min_offset) else: node_too_big = True found_solution_for_all = True for i in range(self._count): # We do place the nodes one by one, as we want to yield in between. if not node_too_big: node, solution_found = arranger.findNodePlacement( current_node, offset_shape_arr, hull_shape_arr) if node_too_big or not solution_found: found_solution_for_all = False new_location = node.getPosition() new_location = new_location.set(z=100 - i * 20) node.setPosition(new_location) nodes.append(node) current_progress += 1 status_message.setProgress( (current_progress / total_progress) * 100) Job.yieldThread() Job.yieldThread() if nodes: op = GroupedOperation() for new_node in nodes: op.addOperation( AddSceneNodeOperation(new_node, current_node.getParent())) op.push() status_message.hide() if not found_solution_for_all: no_full_solution_message = Message( i18n_catalog.i18nc( "@info:status", "Unable to find a location within the build volume for all objects" )) no_full_solution_message.show()
def requestWrite(self, node, file_name=None, filter_by_machine=False): filter_by_machine = True # This plugin is indended to be used by machine (regardless of what it was told to do) if self._writing: raise OutputDeviceError.DeviceBusyError() file_formats = Application.getInstance().getMeshFileHandler( ).getSupportedFileTypesWrite() #Formats supported by this application. if filter_by_machine: machine_file_formats = Application.getInstance().getMachineManager( ).getActiveMachineInstance().getMachineDefinition().getFileFormats( ) file_formats = list( filter( lambda file_format: file_format["mime_type"] in machine_file_formats, file_formats) ) #Take the intersection between file_formats and machine_file_formats. if len(file_formats) == 0: Logger.log("e", "There are no file formats available to write with!") raise OutputDeviceError.WriteRequestFailedError() writer = Application.getInstance().getMeshFileHandler( ).getWriterByMimeType( file_formats[0] ["mime_type"]) #Just take the first file format available. extension = file_formats[0]["extension"] if file_name == None: for n in BreadthFirstIterator(node): if n.getMeshData(): file_name = n.getName() if file_name: break if not file_name: Logger.log( "e", "Could not determine a proper file name when trying to write to %s, aborting", self.getName()) raise OutputDeviceError.WriteRequestFailedError() if extension: #Not empty string. extension = "." + extension file_name = os.path.join(self.getId(), os.path.splitext(file_name)[0] + extension) try: Logger.log("d", "Writing to %s", file_name) stream = open(file_name, "wt") job = WriteMeshJob(writer, stream, node, MeshWriter.OutputMode.TextMode) job.setFileName(file_name) job.progress.connect(self._onProgress) job.finished.connect(self._onFinished) message = Message( catalog.i18nc( "@info:progress", "Saving to Removable Drive <filename>{0}</filename>"). format(self.getName()), 0, False, -1) message.show() self.writeStarted.emit(self) job._message = message self._writing = True job.start() except PermissionError as e: Logger.log("e", "Permission denied when trying to write to %s: %s", file_name, str(e)) raise OutputDeviceError.PermissionDeniedError( catalog.i18nc( "@info:status", "Could not save to <filename>{0}</filename>: <message>{1}</message>" ).format(file_name, str(e))) from e except OSError as e: Logger.log("e", "Operating system would not let us write to %s: %s", file_name, str(e)) raise OutputDeviceError.WriteRequestFailedError( catalog.i18nc( "@info:status", "Could not save to <filename>{0}</filename>: <message>{1}</message>" ).format(file_name, str(e))) from e
def _onUpdateRequired(self): #NautilusUpdate.NautilusUpdate().thingsChanged() message=Message(catalog.i18nc("@info:status", "New features are available for {}! It is recommended to update the firmware on your printer.").format(self._name), 0) message.addAction("download_config", catalog.i18nc("@action:button", "Update Firmware"), "globe", catalog.i18nc("@info:tooltip", "Automatically download and install the latest firmware")) message.actionTriggered.connect(self.beginUpdate) message.show()
def _showErrorMessage(self, text: str): """Logs an error and shows it to the user""" Logger.error(text) Message(text, lifetime=0, message_type=Message.MessageType.ERROR).show()
def checkQueuedNodes(self) -> None: global_container_stack = self._application.getGlobalContainerStack() if global_container_stack: disallowed_edge = self._application.getBuildVolume().getEdgeDisallowedSize() + 2 # Allow for some rounding errors max_x_coordinate = (global_container_stack.getProperty("machine_width", "value") / 2) - disallowed_edge max_y_coordinate = (global_container_stack.getProperty("machine_depth", "value") / 2) - disallowed_edge for node in self._node_queue: mesh_data = node.getMeshData() if not mesh_data: continue file_name = mesh_data.getFileName() if self._preferences.getValue("meshtools/randomise_location_on_load") and global_container_stack != None: if file_name and os.path.splitext(file_name)[1].lower() == ".3mf": # don't randomise project files continue node_bounds = node.getBoundingBox() position = self._randomLocation(node_bounds, max_x_coordinate, max_y_coordinate) node.setPosition(position) if ( self._preferences.getValue("meshtools/check_models_on_load") or self._preferences.getValue("meshtools/fix_normals_on_load") or self._preferences.getValue("meshtools/model_unit_factor") != 1 ): tri_node = self._toTriMesh(mesh_data) if self._preferences.getValue("meshtools/model_unit_factor") != 1: if file_name and os.path.splitext(file_name)[1].lower() not in [".stl", ".obj", ".ply"]: # only resize models that don't have an intrinsic unit set continue scale_matrix = Matrix() scale_matrix.setByScaleFactor(float(self._preferences.getValue("meshtools/model_unit_factor"))) tri_node.apply_transform(scale_matrix.getData()) self._replaceSceneNode(node, [tri_node]) if self._preferences.getValue("meshtools/check_models_on_load") and not tri_node.is_watertight: if not file_name: file_name = catalog.i18nc("@text Print job name", "Untitled") base_name = os.path.basename(file_name) if file_name in self._mesh_not_watertight_messages: self._mesh_not_watertight_messages[file_name].hide() message = Message(title=catalog.i18nc("@info:title", "Mesh Tools")) body = catalog.i18nc("@info:status", "Model %s is not watertight, and may not print properly.") % base_name # XRayView may not be available if the plugin has been disabled active_view = self._controller.getActiveView() if active_view and "XRayView" in self._controller.getAllViews() and active_view.getPluginId() != "XRayView": body += " " + catalog.i18nc("@info:status", "Check X-Ray View and repair the model before printing it.") message.addAction("X-Ray", catalog.i18nc("@action:button", "Show X-Ray View"), "", "") message.actionTriggered.connect(self._showXRayView) else: body += " " +catalog.i18nc("@info:status", "Repair the model before printing it.") message.setText(body) message.show() self._mesh_not_watertight_messages[file_name] = message if self._preferences.getValue("meshtools/fix_normals_on_load") and tri_node.is_watertight: tri_node.fix_normals() self._replaceSceneNode(node, [tri_node]) self._node_queue = []
def _onStartSliceCompleted(self, job): if self._error_message: self._error_message.hide() # Note that cancelled slice jobs can still call this method. if self._start_slice_job is job: self._start_slice_job = None if job.isCancelled() or job.getError() or job.getResult() == StartSliceJob.StartJobResult.Error: return if job.getResult() == StartSliceJob.StartJobResult.MaterialIncompatible: if Application.getInstance().platformActivity: self._error_message = Message(catalog.i18nc("@info:status", "The selected material is incompatible with the selected machine or configuration.")) self._error_message.show() self.backendStateChange.emit(BackendState.Error) else: self.backendStateChange.emit(BackendState.NotStarted) return if job.getResult() == StartSliceJob.StartJobResult.SettingError: if Application.getInstance().platformActivity: extruders = list(ExtruderManager.getInstance().getMachineExtruders(self._global_container_stack.getId())) error_keys = [] for extruder in extruders: error_keys.extend(extruder.getErrorKeys()) if not extruders: error_keys = self._global_container_stack.getErrorKeys() error_labels = set() for key in error_keys: for stack in [self._global_container_stack] + extruders: #Search all container stacks for the definition of this setting. Some are only in an extruder stack. definitions = stack.getBottom().findDefinitions(key = key) if definitions: break #Found it! No need to continue search. else: #No stack has a definition for this setting. Logger.log("w", "When checking settings for errors, unable to find definition for key: {key}".format(key = key)) continue error_labels.add(definitions[0].label) error_labels = ", ".join(error_labels) self._error_message = Message(catalog.i18nc("@info:status", "Unable to slice with the current settings. The following settings have errors: {0}".format(error_labels))) self._error_message.show() self.backendStateChange.emit(BackendState.Error) else: self.backendStateChange.emit(BackendState.NotStarted) return if job.getResult() == StartSliceJob.StartJobResult.BuildPlateError: if Application.getInstance().platformActivity: self._error_message = Message(catalog.i18nc("@info:status", "Unable to slice because the prime tower or prime position(s) are invalid.")) self._error_message.show() self.backendStateChange.emit(BackendState.Error) else: self.backendStateChange.emit(BackendState.NotStarted) if job.getResult() == StartSliceJob.StartJobResult.NothingToSlice: if Application.getInstance().platformActivity: self._error_message = Message(catalog.i18nc("@info:status", "Nothing to slice because none of the models fit the build volume. Please scale or rotate models to fit.")) self._error_message.show() self.backendStateChange.emit(BackendState.Error) else: self.backendStateChange.emit(BackendState.NotStarted) return # Preparation completed, send it to the backend. self._socket.sendMessage(job.getSliceMessage()) # Notify the user that it's now up to the backend to do it's job self.backendStateChange.emit(BackendState.Processing) Logger.log("d", "Sending slice message took %s seconds", time() - self._slice_start_time )
def run(self): if not self._url: Logger.log("e", "Can not check for a new release. URL not set!") no_new_version = True application_name = Application.getInstance().getApplicationName() Logger.log("i", "Checking for new version of %s" % application_name) try: headers = { "User-Agent": "%s - %s" % (application_name, Application.getInstance().getVersion()) } request = urllib.request.Request(self._url, headers=headers) latest_version_file = urllib.request.urlopen(request) except Exception as e: Logger.log("w", "Failed to check for new version: %s" % e) if not self.silent: Message(i18n_catalog.i18nc( "@info", "Could not access update information."), title=i18n_catalog.i18nc("@info:title", "Version Upgrade")).show() return try: reader = codecs.getreader("utf-8") data = json.load(reader(latest_version_file)) try: if Application.getInstance().getVersion() is not "master": local_version = Version( Application.getInstance().getVersion()) else: if not self.silent: Message(i18n_catalog.i18nc( "@info", "The version you are using does not support checking for updates." ), title=i18n_catalog.i18nc( "@info:title", "Warning")).show() return except ValueError: Logger.log( "w", "Could not determine application version from string %s, not checking for updates", Application.getInstance().getVersion()) if not self.silent: Message(i18n_catalog.i18nc( "@info", "The version you are using does not support checking for updates." ), title=i18n_catalog.i18nc( "@info:title", "Version Upgrade")).show() return if application_name in data: for key, value in data[application_name].items(): if "major" in value and "minor" in value and "revision" in value and "url" in value: os = key if platform.system( ) == os: #TODO: add architecture check newest_version = Version([ int(value["major"]), int(value["minor"]), int(value["revision"]) ]) if local_version < newest_version: Logger.log( "i", "Found a new version of the software. Spawning message" ) title_message = i18n_catalog.i18nc( "@info:status", "Continuum {0} is available!", newest_version) content_message = i18n_catalog.i18nc( "@info:status", "Continuum {0} provides better and reliable printing experience.", newest_version) message = Message(text=content_message, title=title_message) message.addAction( "download", i18n_catalog.i18nc("@action:button", "Download"), "[no_icon]", "[no_description]") message.addAction( "new_features", i18n_catalog.i18nc( "@action:button", "Learn more about new features"), "[no_icon]", "[no_description]", button_style=Message.ActionButtonStyle. LINK, button_align=Message.ActionButtonStyle. BUTTON_ALIGN_LEFT) if self._set_download_url_callback: self._set_download_url_callback( value["url"]) message.actionTriggered.connect(self._callback) message.show() no_new_version = False break else: Logger.log( "w", "Could not find version information or download url for update." ) else: Logger.log( "w", "Did not find any version information for %s." % application_name) except Exception: Logger.logException( "e", "Exception in update checker while parsing the JSON file.") Message(i18n_catalog.i18nc( "@info", "An error occurred while checking for updates."), title=i18n_catalog.i18nc("@info:title", "Error")).show() no_new_version = False # Just to suppress the message below. if no_new_version and not self.silent: Message(i18n_catalog.i18nc("@info", "No new version was found."), title=i18n_catalog.i18nc("@info:title", "Version Upgrade")).show()
def execute(self, data): gradient_discretization = float(self.getSettingValueByKey("gradientdiscretization")) max_flow = float(self.getSettingValueByKey("maxflow")) min_flow = float(self.getSettingValueByKey("minflow")) link_flow = float(self.getSettingValueByKey("shortdistflow")) gradient_thickness = float(self.getSettingValueByKey("gradientthickness")) extruder_id = self.getSettingValueByKey("extruder_nb") extruder_id = extruder_id -1 gradual_speed= bool(self.getSettingValueByKey("gradualspeed")) max_over_speed_factor = float(self.getSettingValueByKey("maxoverspeed")) max_over_speed_factor = max_over_speed_factor /100 min_over_speed_factor = float(self.getSettingValueByKey("minoverspeed")) min_over_speed_factor = min_over_speed_factor /100 test_outer_wall= bool(self.getSettingValueByKey("testouterwall")) reduce_end_speed= bool(self.getSettingValueByKey("reduceendspeed")) reduce_factor = float(self.getSettingValueByKey("reducefactor")) reduce_factor = 1 - (reduce_factor /100) # machine_extruder_count extruder_count=Application.getInstance().getGlobalContainerStack().getProperty("machine_extruder_count", "value") extruder_count = extruder_count-1 if extruder_id>extruder_count : extruder_id=extruder_count # Deprecation function # extrud = list(Application.getInstance().getGlobalContainerStack().extruders.values()) extrud = Application.getInstance().getGlobalContainerStack().extruderList infillpattern = extrud[extruder_id].getProperty("infill_pattern", "value") connectinfill = extrud[extruder_id].getProperty("zig_zaggify_infill", "value") relativeextrusion = extrud[extruder_id].getProperty("relative_extrusion", "value") link = extrud[extruder_id].getProperty("relative_extrusion", "value") if relativeextrusion == False: # Logger.log('d', 'Gcode must be generate in relative extrusion mode') Message('Gcode must be generate in relative extrusion mode', title = catalog.i18nc("@info:title", "Post Processing")).show() return None # Note : Walls are used to define the boundary of the infill segment and detect if the point are in the 'Gradiant' area infillbeforewalls = extrud[extruder_id].getProperty("infill_before_walls", "value") if infillbeforewalls == True: # Logger.log('d', 'Gcode must be generate with the mode infill_before_walls to off') Message('It is important to make sure that the Walls are printed before the Infill (Infill before Walls must be set to OFF)', title = catalog.i18nc("@info:title", "Post Processing")).show() return None """Parse Gcode and modify infill portions with an extrusion width gradient.""" currentSection = Section.NOTHING lastPosition = Point2D(-10000, -10000) gradientDiscretizationLength = gradient_thickness / gradient_discretization infill_type=mfill_mode(infillpattern) if infill_type == 0: # Logger.log('d', 'Infill Pattern not supported : ' + infillpattern) Message('Infill Pattern not supported : ' + infillpattern , title = catalog.i18nc("@info:title", "Post Processing")).show() return None if connectinfill == True: # Logger.log('d', 'Connect Infill Lines no supported') Message('Gcode must be generate without Connect Infill Lines mode activated' , title = catalog.i18nc("@info:title", "Post Processing")).show() return None Logger.log('d', "GradientFill Param : " + str(gradientDiscretizationLength) + "/" + str(max_flow) + "/" + str(min_flow) + "/" + str(gradient_discretization)+ "/" + str(gradient_thickness) ) Logger.log('d', "Pattern Param : " + infillpattern + "/" + str(infill_type) ) for layer in data: layer_index = data.index(layer) lines = layer.split("\n") for currentLine in lines: new_Line="" stringFeed = "" line_index = lines.index(currentLine) if is_begin_layer_line(currentLine): perimeterSegments = [] if is_begin_inner_wall_line(currentLine): currentSection = Section.INNER_WALL # Logger.log('d', 'is_begin_inner_wall_line' ) if is_begin_outer_wall_line(currentLine): currentSection = Section.OUTER_WALL # Logger.log('d', 'is_begin_outer_wall_line' ) if currentSection == Section.INNER_WALL and test_outer_wall == False: if is_extrusion_line(currentLine): perimeterSegments.append(Segment(getXY(currentLine), lastPosition)) if currentSection == Section.OUTER_WALL and test_outer_wall == True: if is_extrusion_line(currentLine): perimeterSegments.append(Segment(getXY(currentLine), lastPosition)) if is_begin_infill_segment_line(currentLine): # Log Size of perimeterSegments for debuging Logger.log('d', 'PerimeterSegments seg : {}'.format(len(perimeterSegments))) currentSection = Section.INFILL # ! Important pour éviter de rester sur la ligne continue if currentSection == Section.INFILL: if "F" in currentLine and "G1" in currentLine: searchSpeed = re.search(r"F(\d*\.?\d*)", currentLine) if searchSpeed: current_feed=float(searchSpeed.group(1)) new_Line="G1 F{}\n".format(current_feed) else: Logger.log('d', 'Gcode file parsing error for line : ' + currentLine ) if "E" in currentLine and "G1" in currentLine and "X" in currentLine and "Y" in currentLine: currentPosition = getXY(currentLine) splitLine = currentLine.split(" ") # if infill_type == Infill.LINEAR: if infill_type == 2: # find extrusion length for element in splitLine: if "E" in element: extrusionLength = float(element[1:]) segmentLength = get_points_distance(lastPosition, currentPosition) segmentSteps = segmentLength / gradientDiscretizationLength extrusionLengthPerSegment = extrusionLength / segmentSteps segmentDirection = Point2D((currentPosition.x - lastPosition.x) / segmentLength * gradientDiscretizationLength,(currentPosition.y - lastPosition.y) / segmentLength * gradientDiscretizationLength) if segmentSteps >= 2: # new_Line=new_Line+"; GradientInfill segmentSteps >= 2\n" for step in range(int(segmentSteps)): segmentEnd = Point2D(lastPosition.x + segmentDirection.x, lastPosition.y + segmentDirection.y) shortestDistance = min_distance_from_segment(Segment(lastPosition, segmentEnd), perimeterSegments) if shortestDistance < gradient_thickness: segmentExtrusion = extrusionLengthPerSegment * mapRange((0, gradient_thickness), (max_flow / 100, min_flow / 100), shortestDistance) segmentFeed = current_feed / mapRange((0, gradient_thickness), (max_flow / 100, min_flow / 100), shortestDistance) if gradual_speed: if segmentFeed > (current_feed * max_over_speed_factor): segmentFeed = current_feed * max_over_speed_factor if segmentFeed < (current_feed * min_over_speed_factor): segmentFeed = current_feed * min_over_speed_factor if reduce_end_speed: modi = reduce_speed(savePosition, currentPosition, segmentEnd) if modi < 0: # # Logger.log('d', 'Org : ' + str(segmentFeed) ) segmentFeed = segmentFeed * reduce_factor # Logger.log('d', 'Modi : ' + str(segmentFeed) ) stringFeed = " F{}".format(int(segmentFeed)) else: segmentExtrusion = extrusionLengthPerSegment * min_flow / 100 if min_flow>0: segmentFeed = current_feed / (min_flow / 100) else: segmentFeed = current_feed * max_over_speed_factor if gradual_speed: if segmentFeed > (current_feed * max_over_speed_factor): segmentFeed = current_feed * max_over_speed_factor if segmentFeed < (current_feed * min_over_speed_factor): segmentFeed = current_feed * min_over_speed_factor stringFeed = " F{}".format(int(segmentFeed)) new_Line=new_Line + get_extrusion_command(segmentEnd.x, segmentEnd.y, segmentExtrusion) + stringFeed + "\n" lastPosition = segmentEnd # MissingSegment segmentLengthRatio = get_points_distance(lastPosition, currentPosition) / segmentLength segmentFeed = current_feed / ( max_flow / 100 ) if segmentFeed < (current_feed * min_over_speed_factor): segmentFeed = current_feed * min_over_speed_factor if gradual_speed: # Last segment need to reduce ? if reduce_end_speed: segmentFeed = segmentFeed * reduce_factor stringFeed = " F{}".format(int(segmentFeed)) new_Line=new_Line+get_extrusion_command(currentPosition.x,currentPosition.y,segmentLengthRatio * extrusionLength * max_flow / 100) + stringFeed # + " ; Last line" lines[line_index] = new_Line else : outPutLine = "" # outPutLine = "; GradientInfill segmentSteps < 2\n" for element in splitLine: if "E" in element: outPutLine = outPutLine + "E" + str(round(extrusionLength * link_flow / 100, 5)) else: outPutLine = outPutLine + element + " " outPutLine = outPutLine # + "\n" lines[line_index] = outPutLine # writtenToFile = 1 # gyroid or honeycomb # if infill_type == Infill.SMALL_SEGMENTS: if infill_type == 1: shortestDistance = min_distance_from_segment(Segment(lastPosition, currentPosition), perimeterSegments) outPutLine = new_Line if shortestDistance < gradient_thickness: for element in splitLine: if "E" in element: newE = float(element[1:]) * mapRange((0, gradient_thickness), (max_flow / 100, min_flow / 100), shortestDistance) segmentFeed = current_feed / mapRange((0, gradient_thickness), (max_flow / 100, min_flow / 100), shortestDistance) if gradual_speed: if segmentFeed > (current_feed * max_over_speed_factor): segmentFeed = current_feed * max_over_speed_factor if segmentFeed < (current_feed * min_over_speed_factor): segmentFeed = current_feed * min_over_speed_factor stringFeed = " F{}".format(int(segmentFeed)) outPutLine = outPutLine + "E" + str(round(newE, 5)) # test if F already define in line if not " F" in outPutLine and gradual_speed: outPutLine = outPutLine + stringFeed else: outPutLine = outPutLine + element + " " outPutLine = outPutLine # + "\n" lines[line_index] = outPutLine # # comment like ;MESH:NONMESH # if ";" in currentLine: currentSection = Section.NOTHING lines[line_index] = currentLine # other Comment # # line with move # if "X" in currentLine and "Y" in currentLine and ("G1" in currentLine or "G0" in currentLine): lastPosition = getXY(currentLine) savePosition = lastPosition final_lines = "\n".join(lines) data[layer_index] = final_lines return data
def run(self): start_time = time() if Application.getInstance().getController().getActiveView( ).getPluginId() == "LayerView": self._progress = Message( catalog.i18nc("@info:status", "Processing Layers"), 0, False, -1) self._progress.show() Job.yieldThread() if self._abort_requested: if self._progress: self._progress.hide() return Application.getInstance().getController().activeViewChanged.connect( self._onActiveViewChanged) new_node = SceneNode() ## Remove old layer data (if any) for node in DepthFirstIterator(self._scene.getRoot()): if node.callDecoration("getLayerData"): node.getParent().removeChild(node) break if self._abort_requested: if self._progress: self._progress.hide() return # Force garbage collection. # For some reason, Python has a tendency to keep the layer data # in memory longer than needed. Forcing the GC to run here makes # sure any old layer data is really cleaned up before adding new. gc.collect() mesh = MeshData() layer_data = LayerDataBuilder.LayerDataBuilder() layer_count = len(self._layers) # Find the minimum layer number # When using a raft, the raft layers are sent as layers < 0. Instead of allowing layers < 0, we # instead simply offset all other layers so the lowest layer is always 0. min_layer_number = 0 for layer in self._layers: if layer.id < min_layer_number: min_layer_number = layer.id current_layer = 0 for layer in self._layers: abs_layer_number = layer.id + abs(min_layer_number) layer_data.addLayer(abs_layer_number) this_layer = layer_data.getLayer(abs_layer_number) layer_data.setLayerHeight(abs_layer_number, layer.height) for p in range(layer.repeatedMessageCount("path_segment")): polygon = layer.getRepeatedMessage("path_segment", p) extruder = polygon.extruder line_types = numpy.fromstring( polygon.line_type, dtype="u1") # Convert bytearray to numpy array line_types = line_types.reshape((-1, 1)) points = numpy.fromstring( polygon.points, dtype="f4") # Convert bytearray to numpy array if polygon.point_type == 0: # Point2D points = points.reshape( (-1, 2) ) # We get a linear list of pairs that make up the points, so make numpy interpret them correctly. else: # Point3D points = points.reshape((-1, 3)) line_widths = numpy.fromstring( polygon.line_width, dtype="f4") # Convert bytearray to numpy array line_widths = line_widths.reshape( (-1, 1) ) # We get a linear list of pairs that make up the points, so make numpy interpret them correctly. # In the future, line_thicknesses should be given by CuraEngine as well. # Currently the infill layer thickness also translates to line width line_thicknesses = numpy.zeros(line_widths.shape, dtype="f4") line_thicknesses[:] = layer.thickness / 1000 # from micrometer to millimeter # Create a new 3D-array, copy the 2D points over and insert the right height. # This uses manual array creation + copy rather than numpy.insert since this is # faster. new_points = numpy.empty((len(points), 3), numpy.float32) if polygon.point_type == 0: # Point2D new_points[:, 0] = points[:, 0] new_points[:, 1] = layer.height / 1000 # layer height value is in backend representation new_points[:, 2] = -points[:, 1] else: # Point3D new_points[:, 0] = points[:, 0] new_points[:, 1] = points[:, 2] new_points[:, 2] = -points[:, 1] this_poly = LayerPolygon.LayerPolygon(extruder, line_types, new_points, line_widths, line_thicknesses) this_poly.buildCache() this_layer.polygons.append(this_poly) Job.yieldThread() Job.yieldThread() current_layer += 1 progress = (current_layer / layer_count) * 99 # TODO: Rebuild the layer data mesh once the layer has been processed. # This needs some work in LayerData so we can add the new layers instead of recreating the entire mesh. if self._abort_requested: if self._progress: self._progress.hide() return if self._progress: self._progress.setProgress(progress) # We are done processing all the layers we got from the engine, now create a mesh out of the data # Find out colors per extruder global_container_stack = Application.getInstance( ).getGlobalContainerStack() manager = ExtruderManager.getInstance() extruders = list( manager.getMachineExtruders(global_container_stack.getId())) if extruders: material_color_map = numpy.zeros((len(extruders), 4), dtype=numpy.float32) for extruder in extruders: material = extruder.findContainer({"type": "material"}) position = int( extruder.getMetaDataEntry("position", default="0")) # Get the position color_code = material.getMetaDataEntry("color_code") color = colorCodeToRGBA(color_code) material_color_map[position, :] = color else: # Single extruder via global stack. material_color_map = numpy.zeros((1, 4), dtype=numpy.float32) material = global_container_stack.findContainer( {"type": "material"}) color_code = material.getMetaDataEntry("color_code") if color_code is None: # not all stacks have a material color color_code = "#e0e000" color = colorCodeToRGBA(color_code) material_color_map[0, :] = color # We have to scale the colors for compatibility mode if OpenGLContext.isLegacyOpenGL() or bool(Preferences.getInstance( ).getValue("view/force_layer_view_compatibility_mode")): line_type_brightness = 0.5 # for compatibility mode else: line_type_brightness = 1.0 layer_mesh = layer_data.build(material_color_map, line_type_brightness) if self._abort_requested: if self._progress: self._progress.hide() return # Add LayerDataDecorator to scene node to indicate that the node has layer data decorator = LayerDataDecorator.LayerDataDecorator() decorator.setLayerData(layer_mesh) new_node.addDecorator(decorator) new_node.setMeshData(mesh) # Set build volume as parent, the build volume can move as a result of raft settings. # It makes sense to set the build volume as parent: the print is actually printed on it. new_node_parent = Application.getInstance().getBuildVolume() new_node.setParent( new_node_parent) # Note: After this we can no longer abort! settings = Application.getInstance().getGlobalContainerStack() if not settings.getProperty("machine_center_is_zero", "value"): new_node.setPosition( Vector(-settings.getProperty("machine_width", "value") / 2, 0.0, settings.getProperty("machine_depth", "value") / 2)) if self._progress: self._progress.setProgress(100) view = Application.getInstance().getController().getActiveView() if view.getPluginId() == "LayerView": view.resetLayerData() if self._progress: self._progress.hide() # Clear the unparsed layers. This saves us a bunch of memory if the Job does not get destroyed. self._layers = None Logger.log("d", "Processing layers took %s seconds", time() - start_time)
def requestWrite(self, nodes, file_name=None, filter_by_machine=False, file_handler=None): filter_by_machine = True # This plugin is indended to be used by machine (regardless of what it was told to do) if self._writing: raise OutputDeviceError.DeviceBusyError() # Formats supported by this application (File types that we can actually write) if file_handler: file_formats = file_handler.getSupportedFileTypesWrite() else: file_formats = Application.getInstance().getMeshFileHandler( ).getSupportedFileTypesWrite() if filter_by_machine: container = Application.getInstance().getGlobalContainerStack( ).findContainer({"file_formats": "*"}) # Create a list from supported file formats string machine_file_formats = [ file_type.strip() for file_type in container.getMetaDataEntry( "file_formats").split(";") ] # Take the intersection between file_formats and machine_file_formats. file_formats = list( filter( lambda file_format: file_format["mime_type"] in machine_file_formats, file_formats)) if len(file_formats) == 0: Logger.log("e", "There are no file formats available to write with!") raise OutputDeviceError.WriteRequestFailedError() # Just take the first file format available. if file_handler is not None: writer = file_handler.getWriterByMimeType( file_formats[0]["mime_type"]) else: writer = Application.getInstance().getMeshFileHandler( ).getWriterByMimeType(file_formats[0]["mime_type"]) extension = file_formats[0]["extension"] if file_name is None: file_name = self._automaticFileName(nodes) if extension: # Not empty string. extension = "." + extension file_name = os.path.join(self.getId(), os.path.splitext(file_name)[0] + extension) try: Logger.log("d", "Writing to %s", file_name) # Using buffering greatly reduces the write time for many lines of gcode self._stream = open(file_name, "wt", buffering=1, encoding="utf-8") job = WriteFileJob(writer, self._stream, nodes, MeshWriter.OutputMode.TextMode) job.setFileName(file_name) job.progress.connect(self._onProgress) job.finished.connect(self._onFinished) message = Message( catalog.i18nc( "@info:progress", "Saving to Removable Drive <filename>{0}</filename>"). format(self.getName()), 0, False, -1) message.show() self.writeStarted.emit(self) job._message = message self._writing = True job.start() except PermissionError as e: Logger.log("e", "Permission denied when trying to write to %s: %s", file_name, str(e)) raise OutputDeviceError.PermissionDeniedError( catalog.i18nc( "@info:status", "Could not save to <filename>{0}</filename>: <message>{1}</message>" ).format(file_name, str(e))) from e except OSError as e: Logger.log("e", "Operating system would not let us write to %s: %s", file_name, str(e)) raise OutputDeviceError.WriteRequestFailedError( catalog.i18nc( "@info:status", "Could not save to <filename>{0}</filename>: <message>{1}</message>" ).format(file_name, str(e))) from e
def _devicesRemovedFromAccount(self, removed_device_ids: Set[str]) -> None: """ Removes the CloudOutputDevice from the received device ids and marks the specific printers as "removed from account". In addition, it generates a message to inform the user about the printers that are no longer linked to his/her account. The message is not generated if all the printers have been previously reported as not linked to the account. :param removed_device_ids: Set of device ids, whose CloudOutputDevice needs to be removed :return: None """ if not CuraApplication.getInstance().getCuraAPI().account.isLoggedIn: return # Do not report device ids which have been previously marked as non-linked to the account ignored_device_ids = set() for device_id in removed_device_ids: if not parseBool( self._um_cloud_printers[device_id].getMetaDataEntry( META_UM_LINKED_TO_ACCOUNT, "true")): ignored_device_ids.add(device_id) # Keep the reported_device_ids list in a class variable, so that the message button actions can access it and # take the necessary steps to fulfill their purpose. self.reported_device_ids = removed_device_ids - ignored_device_ids if not self.reported_device_ids: return # Generate message self._removed_printers_message = Message( title=self.I18N_CATALOG.i18ncp( "info:status", "Cloud connection is not available for a printer", "Cloud connection is not available for some printers", len(self.reported_device_ids))) device_names = "\n".join([ "<li>{} ({})</li>".format( self._um_cloud_printers[device].name, self._um_cloud_printers[device].definition.name) for device in self.reported_device_ids ]) message_text = self.I18N_CATALOG.i18ncp( "info:status", "The following cloud printer is not linked to your account:\n", "The following cloud printers are not linked to your account:\n", len(self.reported_device_ids)) message_text += self.I18N_CATALOG.i18nc( "info:status", "<ul>{}</ul>\nTo establish a connection, please visit the " "<a href='https://mycloud.ultimaker.com/'>Ultimaker Digital Factory</a>.", device_names) self._removed_printers_message.setText(message_text) self._removed_printers_message.addAction( "keep_printer_configurations_action", name=self.I18N_CATALOG.i18nc("@action:button", "Keep printer configurations"), icon="", description= "Keep the configuration of the cloud printer(s) synced with Cura which are not linked to your account.", button_align=Message.ActionButtonAlignment.ALIGN_RIGHT) self._removed_printers_message.addAction( "remove_printers_action", name=self.I18N_CATALOG.i18nc("@action:button", "Remove printers"), icon="", description= "Remove the cloud printer(s) which are not linked to your account.", button_style=Message.ActionButtonStyle.SECONDARY, button_align=Message.ActionButtonAlignment.ALIGN_LEFT) self._removed_printers_message.actionTriggered.connect( self._onRemovedPrintersMessageActionTriggered) output_device_manager = CuraApplication.getInstance( ).getOutputDeviceManager() # Remove the output device from the printers for device_id in removed_device_ids: device = self._um_cloud_printers.get( device_id, None) # type: Optional[GlobalStack] if not device: continue if device_id in output_device_manager.getOutputDeviceIds(): output_device_manager.removeOutputDevice(device_id) if device_id in self._remote_clusters: del self._remote_clusters[device_id] # Update the printer's metadata to mark it as not linked to the account device.setMetaDataEntry(META_UM_LINKED_TO_ACCOUNT, False) self._removed_printers_message.show()
def _notFound(self): message = Message(catalog.i18nc("@info:status", "Unable to connect to {}, the IP address is invalid or does not exist").format(self._name), 0, False) message.show()
def _onStartSliceCompleted(self, job: StartSliceJob) -> None: if self._error_message: self._error_message.hide() # Note that cancelled slice jobs can still call this method. if self._start_slice_job is job: self._start_slice_job = None if job.isCancelled() or job.getError() or job.getResult( ) == StartJobResult.Error: self.backendStateChange.emit(BackendState.Error) self.backendError.emit(job) return if job.getResult() == StartJobResult.MaterialIncompatible: if self._application.platformActivity: self._error_message = Message(catalog.i18nc( "@info:status", "Unable to slice with the current material as it is incompatible with the selected machine or configuration." ), title=catalog.i18nc( "@info:title", "Unable to slice")) self._error_message.show() self.backendStateChange.emit(BackendState.Error) self.backendError.emit(job) else: self.backendStateChange.emit(BackendState.NotStarted) return if job.getResult() == StartJobResult.SettingError: if self._application.platformActivity: if not self._global_container_stack: Logger.log( "w", "Global container stack not assigned to CuraEngineBackend!" ) return extruders = ExtruderManager.getInstance( ).getActiveExtruderStacks() error_keys = [] #type: List[str] for extruder in extruders: error_keys.extend(extruder.getErrorKeys()) if not extruders: error_keys = self._global_container_stack.getErrorKeys() error_labels = set() for key in error_keys: for stack in [ self._global_container_stack ] + extruders: #Search all container stacks for the definition of this setting. Some are only in an extruder stack. definitions = cast( DefinitionContainerInterface, stack.getBottom()).findDefinitions(key=key) if definitions: break #Found it! No need to continue search. else: #No stack has a definition for this setting. Logger.log( "w", "When checking settings for errors, unable to find definition for key: {key}" .format(key=key)) continue error_labels.add(definitions[0].label) self._error_message = Message(catalog.i18nc( "@info:status", "Unable to slice with the current settings. The following settings have errors: {0}" ).format(", ".join(error_labels)), title=catalog.i18nc( "@info:title", "Unable to slice")) self._error_message.show() self.backendStateChange.emit(BackendState.Error) self.backendError.emit(job) else: self.backendStateChange.emit(BackendState.NotStarted) return elif job.getResult() == StartJobResult.ObjectSettingError: errors = {} for node in DepthFirstIterator( self._application.getController().getScene().getRoot() ): #type: ignore #Ignore type error because iter() should get called automatically by Python syntax. stack = node.callDecoration("getStack") if not stack: continue for key in stack.getErrorKeys(): if not self._global_container_stack: Logger.log( "e", "CuraEngineBackend does not have global_container_stack assigned." ) continue definition = cast(DefinitionContainerInterface, self._global_container_stack.getBottom() ).findDefinitions(key=key) if not definition: Logger.log( "e", "When checking settings for errors, unable to find definition for key {key} in per-object stack." .format(key=key)) continue errors[key] = definition[0].label self._error_message = Message(catalog.i18nc( "@info:status", "Unable to slice due to some per-model settings. The following settings have errors on one or more models: {error_labels}" ).format(error_labels=", ".join(errors.values())), title=catalog.i18nc( "@info:title", "Unable to slice")) self._error_message.show() self.backendStateChange.emit(BackendState.Error) self.backendError.emit(job) return if job.getResult() == StartJobResult.BuildPlateError: if self._application.platformActivity: self._error_message = Message(catalog.i18nc( "@info:status", "Unable to slice because the prime tower or prime position(s) are invalid." ), title=catalog.i18nc( "@info:title", "Unable to slice")) self._error_message.show() self.backendStateChange.emit(BackendState.Error) self.backendError.emit(job) else: self.backendStateChange.emit(BackendState.NotStarted) if job.getResult() == StartJobResult.ObjectsWithDisabledExtruder: self._error_message = Message(catalog.i18nc( "@info:status", "Unable to slice because there are objects associated with disabled Extruder %s." % job.getMessage()), title=catalog.i18nc( "@info:title", "Unable to slice")) self._error_message.show() self.backendStateChange.emit(BackendState.Error) self.backendError.emit(job) return if job.getResult() == StartJobResult.NothingToSlice: if self._application.platformActivity: self._error_message = Message(catalog.i18nc( "@info:status", "Nothing to slice because none of the models fit the build volume. Please scale or rotate models to fit." ), title=catalog.i18nc( "@info:title", "Unable to slice")) self._error_message.show() self.backendStateChange.emit(BackendState.Error) self.backendError.emit(job) else: self.backendStateChange.emit(BackendState.NotStarted) self._invokeSlice() return # Preparation completed, send it to the backend. self._socket.sendMessage(job.getSliceMessage()) # Notify the user that it's now up to the backend to do it's job self.backendStateChange.emit(BackendState.Processing) if self._slice_start_time: Logger.log("d", "Sending slice message took %s seconds", time() - self._slice_start_time)
def _onTimeout(self): message = Message(catalog.i18nc("@info:status", "Unable to check for updates, Cura cannot connect to {}").format(self._name), 0, False) message.show()
def onStageSelected(self): if not SmartSliceStage.getSelectFaceSupported(): error_message = Message( title="Smart Slice: OpenGL error", text= "You are running an outdated version of OpenGL which may not" " support selecting faces in Smart Slice. Please update OpenGL to at least version 4.1" ) error_message.show() application = CuraApplication.getInstance() controller = application.getController() extruderManager = application.getExtruderManager() Selection.clear() printable_node = self._exit_stage_if_scene_is_invalid() if not printable_node: return self._previous_view = controller.getActiveView().name self._connector.api_connection.openConnection() # When the Smart Slice stage is active we want to use our SmartSliceView # to control the rendering of various nodes. Views are referred to by their # plugin name. controller.setActiveView('SmartSlicePlugin') self._connector.propertyHandler.jobCheck() if not Selection.hasSelection(): Selection.add(printable_node) aabb = printable_node.getBoundingBox() if aabb: controller.getCameraTool().setOrigin(aabb.center) smart_slice_node = findChildSceneNode(printable_node, SmartSliceScene.Root) if not smart_slice_node: smart_slice_node = SmartSliceScene.Root() try: smart_slice_node.initialize(printable_node) except Exception as exc: Logger.logException("e", "Unable to analyze geometry") self._scene_not_ready( i18n_catalog.i18n( "Smart Slice could not analyze the geometry for face selection. It may be ill-formed." )) if smart_slice_node: printable_node.removeChild(smart_slice_node) return self.smartSliceNodeChanged.emit(smart_slice_node) for c in controller.getScene().getRoot().getAllChildren(): if isinstance(c, SmartSliceScene.Root): c.setVisible(True) for mesh in getModifierMeshes(): mesh.setSelectable(False) # Remove any HighlightFace if they exist for node in mesh.getChildren(): if isinstance(node, SmartSliceScene.HighlightFace): mesh.removeChild(node) elif isinstance(node, SmartSliceScene.Root): mesh.removeChild(node) # We have modifier meshes in the scene, we need to change their parent # to the intersecting printable node if self._changeParent(): self._parentAssigned() # Ensure we have tools defined and apply them here use_tool = self._our_toolset[0] self.setToolVisibility(True) controller.setFallbackTool(use_tool) self._previous_tool = controller.getActiveTool() if self._previous_tool: controller.setActiveTool(use_tool) self._connector.updateSliceWidget() if self._invalid_scene_message and self._invalid_scene_message.visible: self._invalid_scene_message.hide()
def __init__(self): super().__init__() self._max_layers = 0 self._current_layer_num = 0 self._minimum_layer_num = 0 self._current_layer_mesh = None self._current_layer_jumps = None self._top_layers_job = None self._activity = False self._old_max_layers = 0 self._max_paths = 0 self._current_path_num = 0 self._minimum_path_num = 0 self.currentLayerNumChanged.connect(self._onCurrentLayerNumChanged) self._busy = False self._simulation_running = False self._ghost_shader = None self._layer_pass = None self._composite_pass = None self._old_layer_bindings = None self._simulationview_composite_shader = None self._old_composite_shader = None self._global_container_stack = None self._proxy = SimulationViewProxy() self._controller.getScene().getRoot().childrenChanged.connect( self._onSceneChanged) self._resetSettings() self._legend_items = None self._show_travel_moves = False self._nozzle_node = None Preferences.getInstance().addPreference("view/top_layer_count", 5) Preferences.getInstance().addPreference("view/only_show_top_layers", False) Preferences.getInstance().addPreference( "view/force_layer_view_compatibility_mode", False) Preferences.getInstance().addPreference("layerview/layer_view_type", 0) Preferences.getInstance().addPreference("layerview/extruder_opacities", "") Preferences.getInstance().addPreference("layerview/show_travel_moves", False) Preferences.getInstance().addPreference("layerview/show_helpers", True) Preferences.getInstance().addPreference("layerview/show_skin", True) Preferences.getInstance().addPreference("layerview/show_infill", True) Preferences.getInstance().preferenceChanged.connect( self._onPreferencesChanged) self._updateWithPreferences() self._solid_layers = int( Preferences.getInstance().getValue("view/top_layer_count")) self._only_show_top_layers = bool( Preferences.getInstance().getValue("view/only_show_top_layers")) self._compatibility_mode = True # for safety self._wireprint_warning_message = Message(catalog.i18nc( "@info:status", "Cura does not accurately display layers when Wire Printing is enabled" ), title=catalog.i18nc( "@info:title", "Simulation View"))
def processGCodeStream(self, stream: str) -> Optional[CuraSceneNode]: Logger.log("d", "Preparing to load GCode") self._cancelled = False # We obtain the filament diameter from the selected extruder to calculate line widths global_stack = CuraApplication.getInstance().getGlobalContainerStack() if not global_stack: return None self._filament_diameter = global_stack.extruders[str(self._extruder_number)].getProperty("material_diameter", "value") scene_node = CuraSceneNode() gcode_list = [] self._is_layers_in_file = False self._extruder_offsets = self._extruderOffsets() # dict with index the extruder number. can be empty ############################################################################################## ## This part is where the action starts ############################################################################################## file_lines = 0 current_line = 0 for line in stream.split("\n"): file_lines += 1 gcode_list.append(line + "\n") if not self._is_layers_in_file and line[:len(self._layer_keyword)] == self._layer_keyword: self._is_layers_in_file = True file_step = max(math.floor(file_lines / 100), 1) self._clearValues() self._message = Message(catalog.i18nc("@info:status", "Parsing G-code"), lifetime=0, title = catalog.i18nc("@info:title", "G-code Details")) assert(self._message is not None) # use for typing purposes self._message.setProgress(0) self._message.show() Logger.log("d", "Parsing Gcode...") current_position = Position(0, 0, 0, 0, [0]) current_path = [] #type: List[List[float]] min_layer_number = 0 negative_layers = 0 previous_layer = 0 self._previous_extrusion_value = 0.0 for line in stream.split("\n"): if self._cancelled: Logger.log("d", "Parsing Gcode file cancelled") return None current_line += 1 if current_line % file_step == 0: self._message.setProgress(math.floor(current_line / file_lines * 100)) Job.yieldThread() if len(line) == 0: continue if line.find(self._type_keyword) == 0: type = line[len(self._type_keyword):].strip() if type == "WALL-INNER": self._layer_type = LayerPolygon.InsetXType elif type == "WALL-OUTER": self._layer_type = LayerPolygon.Inset0Type elif type == "SKIN": self._layer_type = LayerPolygon.SkinType elif type == "SKIRT": self._layer_type = LayerPolygon.SkirtType elif type == "SUPPORT": self._layer_type = LayerPolygon.SupportType elif type == "FILL": self._layer_type = LayerPolygon.InfillType else: Logger.log("w", "Encountered a unknown type (%s) while parsing g-code.", type) # When the layer change is reached, the polygon is computed so we have just one layer per extruder if self._is_layers_in_file and line[:len(self._layer_keyword)] == self._layer_keyword: try: layer_number = int(line[len(self._layer_keyword):]) self._createPolygon(self._current_layer_thickness, current_path, self._extruder_offsets.get(self._extruder_number, [0, 0])) current_path.clear() # Start the new layer at the end position of the last layer current_path.append([current_position.x, current_position.y, current_position.z, current_position.f, current_position.e[self._extruder_number], LayerPolygon.MoveCombingType]) # When using a raft, the raft layers are stored as layers < 0, it mimics the same behavior # as in ProcessSlicedLayersJob if layer_number < min_layer_number: min_layer_number = layer_number if layer_number < 0: layer_number += abs(min_layer_number) negative_layers += 1 else: layer_number += negative_layers # In case there is a gap in the layer count, empty layers are created for empty_layer in range(previous_layer + 1, layer_number): self._createEmptyLayer(empty_layer) self._layer_number = layer_number previous_layer = layer_number except: pass # This line is a comment. Ignore it (except for the layer_keyword) if line.startswith(";"): continue G = self._getInt(line, "G") if G is not None: # When find a movement, the new posistion is calculated and added to the current_path, but # don't need to create a polygon until the end of the layer current_position = self.processGCode(G, line, current_position, current_path) continue # When changing the extruder, the polygon with the stored paths is computed if line.startswith("T"): T = self._getInt(line, "T") if T is not None: self._createPolygon(self._current_layer_thickness, current_path, self._extruder_offsets.get(self._extruder_number, [0, 0])) current_path.clear() # When changing tool, store the end point of the previous path, then process the code and finally # add another point with the new position of the head. current_path.append([current_position.x, current_position.y, current_position.z, current_position.f, current_position.e[self._extruder_number], LayerPolygon.MoveCombingType]) current_position = self.processTCode(T, line, current_position, current_path) current_path.append([current_position.x, current_position.y, current_position.z, current_position.f, current_position.e[self._extruder_number], LayerPolygon.MoveCombingType]) if line.startswith("M"): M = self._getInt(line, "M") self.processMCode(M, line, current_position, current_path) # "Flush" leftovers. Last layer paths are still stored if len(current_path) > 1: if self._createPolygon(self._current_layer_thickness, current_path, self._extruder_offsets.get(self._extruder_number, [0, 0])): self._layer_number += 1 current_path.clear() material_color_map = numpy.zeros((8, 4), dtype = numpy.float32) material_color_map[0, :] = [0.0, 0.7, 0.9, 1.0] material_color_map[1, :] = [0.7, 0.9, 0.0, 1.0] material_color_map[2, :] = [0.9, 0.0, 0.7, 1.0] material_color_map[3, :] = [0.7, 0.0, 0.0, 1.0] material_color_map[4, :] = [0.0, 0.7, 0.0, 1.0] material_color_map[5, :] = [0.0, 0.0, 0.7, 1.0] material_color_map[6, :] = [0.3, 0.3, 0.3, 1.0] material_color_map[7, :] = [0.7, 0.7, 0.7, 1.0] layer_mesh = self._layer_data_builder.build(material_color_map) decorator = LayerDataDecorator() decorator.setLayerData(layer_mesh) scene_node.addDecorator(decorator) gcode_list_decorator = GCodeListDecorator() gcode_list_decorator.setGCodeList(gcode_list) scene_node.addDecorator(gcode_list_decorator) # gcode_dict stores gcode_lists for a number of build plates. active_build_plate_id = CuraApplication.getInstance().getMultiBuildPlateModel().activeBuildPlate gcode_dict = {active_build_plate_id: gcode_list} CuraApplication.getInstance().getController().getScene().gcode_dict = gcode_dict #type: ignore #Because gcode_dict is generated dynamically. Logger.log("d", "Finished parsing Gcode") self._message.hide() if self._layer_number == 0: Logger.log("w", "File doesn't contain any valid layers") settings = CuraApplication.getInstance().getGlobalContainerStack() if not settings.getProperty("machine_center_is_zero", "value"): machine_width = settings.getProperty("machine_width", "value") machine_depth = settings.getProperty("machine_depth", "value") scene_node.setPosition(Vector(-machine_width / 2, 0, machine_depth / 2)) Logger.log("d", "GCode loading finished") if CuraApplication.getInstance().getPreferences().getValue("gcodereader/show_caution"): caution_message = Message(catalog.i18nc( "@info:generic", "Make sure the g-code is suitable for your printer and printer configuration before sending the file to it. The g-code representation may not be accurate."), lifetime=0, title = catalog.i18nc("@info:title", "G-code Details")) caution_message.show() # The "save/print" button's state is bound to the backend state. backend = CuraApplication.getInstance().getBackend() backend.backendStateChange.emit(Backend.BackendState.Disabled) return scene_node
def exportQualityProfile(self, container_list, file_name, file_type) -> bool: # Parse the fileType to deduce what plugin can save the file format. # fileType has the format "<description> (*.<extension>)" split = file_type.rfind( " (*." ) # Find where the description ends and the extension starts. if split < 0: # Not found. Invalid format. Logger.log("e", "Invalid file format identifier %s", file_type) return False description = file_type[:split] extension = file_type[split + 4:-1] # Leave out the " (*." and ")". if not file_name.endswith( "." + extension ): # Auto-fill the extension if the user did not provide any. file_name += "." + extension # On Windows, QML FileDialog properly asks for overwrite confirm, but not on other platforms, so handle those ourself. if not Platform.isWindows(): if os.path.exists(file_name): result = QMessageBox.question( None, catalog.i18nc("@title:window", "File Already Exists"), catalog.i18nc( "@label Don't translate the XML tag <filename>!", "The file <filename>{0}</filename> already exists. Are you sure you want to overwrite it?" ).format(file_name)) if result == QMessageBox.No: return False profile_writer = self._findProfileWriter(extension, description) try: success = profile_writer.write(file_name, container_list) except Exception as e: Logger.log("e", "Failed to export profile to %s: %s", file_name, str(e)) m = Message(catalog.i18nc( "@info:status Don't translate the XML tags <filename> or <message>!", "Failed to export profile to <filename>{0}</filename>: <message>{1}</message>", file_name, str(e)), lifetime=0, title=catalog.i18nc("@info:title", "Error")) m.show() return False if not success: Logger.log( "w", "Failed to export profile to %s: Writer plugin reported failure.", file_name) m = Message(catalog.i18nc( "@info:status Don't translate the XML tag <filename>!", "Failed to export profile to <filename>{0}</filename>: Writer plugin reported failure.", file_name), lifetime=0, title=catalog.i18nc("@info:title", "Error")) m.show() return False m = Message(catalog.i18nc( "@info:status Don't translate the XML tag <filename>!", "Exported profile to <filename>{0}</filename>", file_name), title=catalog.i18nc("@info:title", "Export succeeded")) m.show() return True
def exportProfile(self, instance_ids, file_name, file_type): # Parse the fileType to deduce what plugin can save the file format. # fileType has the format "<description> (*.<extension>)" split = file_type.rfind( " (*." ) # Find where the description ends and the extension starts. if split < 0: # Not found. Invalid format. Logger.log("e", "Invalid file format identifier %s", file_type) return description = file_type[:split] extension = file_type[split + 4:-1] # Leave out the " (*." and ")". if not file_name.endswith( "." + extension ): # Auto-fill the extension if the user did not provide any. file_name += "." + extension # On Windows, QML FileDialog properly asks for overwrite confirm, but not on other platforms, so handle those ourself. if not Platform.isWindows(): if os.path.exists(file_name): result = QMessageBox.question( None, catalog.i18nc("@title:window", "File Already Exists"), catalog.i18nc( "@label", "The file <filename>{0}</filename> already exists. Are you sure you want to overwrite it?" ).format(file_name)) if result == QMessageBox.No: return found_containers = [] extruder_positions = [] for instance_id in instance_ids: containers = ContainerRegistry.getInstance( ).findInstanceContainers(id=instance_id) if containers: found_containers.append(containers[0]) # Determine the position of the extruder of this container extruder_id = containers[0].getMetaDataEntry("extruder", "") if extruder_id == "": # Global stack extruder_positions.append(-1) else: extruder_containers = ContainerRegistry.getInstance( ).findDefinitionContainers(id=extruder_id) if extruder_containers: extruder_positions.append( int(extruder_containers[0].getMetaDataEntry( "position", 0))) else: extruder_positions.append(0) # Ensure the profiles are always exported in order (global, extruder 0, extruder 1, ...) found_containers = [ containers for (positions, containers ) in sorted(zip(extruder_positions, found_containers)) ] profile_writer = self._findProfileWriter(extension, description) try: success = profile_writer.write(file_name, found_containers) except Exception as e: Logger.log("e", "Failed to export profile to %s: %s", file_name, str(e)) m = Message(catalog.i18nc( "@info:status", "Failed to export profile to <filename>{0}</filename>: <message>{1}</message>", file_name, str(e)), lifetime=0) m.show() return if not success: Logger.log( "w", "Failed to export profile to %s: Writer plugin reported failure.", file_name) m = Message(catalog.i18nc( "@info:status", "Failed to export profile to <filename>{0}</filename>: Writer plugin reported failure.", file_name), lifetime=0) m.show() return m = Message( catalog.i18nc("@info:status", "Exported profile to <filename>{0}</filename>", file_name)) m.show()
def run(self) -> None: status_message = Message(i18n_catalog.i18nc("@info:status", "Multiplying and placing objects"), lifetime=0, dismissable=False, progress=0, title = i18n_catalog.i18nc("@info:title", "Placing Objects")) status_message.show() scene = Application.getInstance().getController().getScene() total_progress = len(self._objects) * self._count current_progress = 0 global_container_stack = Application.getInstance().getGlobalContainerStack() if global_container_stack is None: return # We can't do anything in this case. machine_width = global_container_stack.getProperty("machine_width", "value") machine_depth = global_container_stack.getProperty("machine_depth", "value") root = scene.getRoot() scale = 0.5 arranger = Arrange.create(x = machine_width, y = machine_depth, scene_root = root, scale = scale, min_offset = self._min_offset) processed_nodes = [] # type: List[SceneNode] nodes = [] not_fit_count = 0 found_solution_for_all = False for node in self._objects: # If object is part of a group, multiply group current_node = node while current_node.getParent() and (current_node.getParent().callDecoration("isGroup") or current_node.getParent().callDecoration("isSliceable")): current_node = current_node.getParent() if current_node in processed_nodes: continue processed_nodes.append(current_node) node_too_big = False if node.getBoundingBox().width < machine_width or node.getBoundingBox().depth < machine_depth: offset_shape_arr, hull_shape_arr = ShapeArray.fromNode(current_node, min_offset = self._min_offset, scale = scale) else: node_too_big = True found_solution_for_all = True arranger.resetLastPriority() for _ in range(self._count): # We do place the nodes one by one, as we want to yield in between. new_node = copy.deepcopy(node) solution_found = False if not node_too_big: if offset_shape_arr is not None and hull_shape_arr is not None: solution_found = arranger.findNodePlacement(new_node, offset_shape_arr, hull_shape_arr) else: # The node has no shape, so no need to arrange it. The solution is simple: Do nothing. solution_found = True if node_too_big or not solution_found: found_solution_for_all = False new_location = new_node.getPosition() new_location = new_location.set(z = - not_fit_count * 20) new_node.setPosition(new_location) not_fit_count += 1 # Same build plate build_plate_number = current_node.callDecoration("getBuildPlateNumber") new_node.callDecoration("setBuildPlateNumber", build_plate_number) for child in new_node.getChildren(): child.callDecoration("setBuildPlateNumber", build_plate_number) nodes.append(new_node) current_progress += 1 status_message.setProgress((current_progress / total_progress) * 100) QCoreApplication.processEvents() Job.yieldThread() QCoreApplication.processEvents() Job.yieldThread() if nodes: operation = GroupedOperation() for new_node in nodes: operation.addOperation(AddSceneNodeOperation(new_node, current_node.getParent())) operation.push() status_message.hide() if not found_solution_for_all: no_full_solution_message = Message(i18n_catalog.i18nc("@info:status", "Unable to find a location within the build volume for all objects"), title = i18n_catalog.i18nc("@info:title", "Placing Object")) no_full_solution_message.show()
def execute(self, data): global bM, bG, bShort, _msg global cmdtab _msg = '' cmdtab = None self.getVar() lastE = 0 lineNo = 0 for layer_number, layer in enumerate(data): lines = layer.split("\n") i1 = 0 for line in lines: lineNo = lineNo + 1 if line != '': if not ';' in line: arline = line.split(' ') cmd = arline[0] if cmd[0] == 'G': if cmd != 'G1' and cmd != 'G0': if bG == True: line = self.addCmd(line, cmd, arline[1:]) else: actE = Script.getValue(self, line=line, key='E') if actE != None: actE = float(actE) if actE < lastE: retract = actE - lastE line = line + '; --> Retract ' + str( round(retract, 2)) + ' mm' if i1 > 0: print(' line ' + str(lineNo - 1) + ' : ' + lines[i1 - 1]) print(' line ' + str(lineNo) + ' : ' + line) else: print(' line ' + str(lineNo) + ' : ' + line) lastE = actE if cmd == 'G92': lastE = Script.getValue(self, line=line, key='E') if cmd == 'G91': lastE = 0 elif cmd[0] == 'M': if bM == True: line = self.addCmd(line, cmd, arline[1:]) elif cmd[0] == 'T': line = line + '; --> Activation Extruder ' + cmd[1] lines[i1] = line i1 = i1 + 1 sep = '\n' data[layer_number] = sep.join(lines) if _msg != None and _msg != '': Message("Info Comment G-Code:" + "\n" + _msg, title=catalog.i18nc("@info:title", "Post Processing")).show() return data
def run(self): if not self._url: Logger.log("e", "Can not check for a new release. URL not set!") no_new_version = True application_name = Application.getInstance().getApplicationName() Logger.log("i", "Checking for new version of %s" % application_name) try: headers = {"User-Agent": "%s - %s" % (application_name, Application.getInstance().getVersion())} request = urllib.request.Request(self._url, headers = headers) latest_version_file = urllib.request.urlopen(request) except Exception as e: Logger.log("w", "Failed to check for new version: %s" % e) if not self.silent: Message(i18n_catalog.i18nc("@info", "Could not access update information."), title = i18n_catalog.i18nc("@info:title", "Version Upgrade") ).show() return try: reader = codecs.getreader("utf-8") data = json.load(reader(latest_version_file)) try: if Application.getInstance().getVersion() is not "master": local_version = Version(Application.getInstance().getVersion()) else: if not self.silent: Message(i18n_catalog.i18nc("@info", "The version you are using does not support checking for updates."), title = i18n_catalog.i18nc("@info:title", "Warning")).show() return except ValueError: Logger.log("w", "Could not determine application version from string %s, not checking for updates", Application.getInstance().getVersion()) if not self.silent: Message(i18n_catalog.i18nc("@info", "The version you are using does not support checking for updates."), title = i18n_catalog.i18nc("@info:title", "Version Upgrade")).show() return if application_name in data: for key, value in data[application_name].items(): if "major" in value and "minor" in value and "revision" in value and "url" in value: os = key if platform.system() == os: #TODO: add architecture check newest_version = Version([int(value["major"]), int(value["minor"]), int(value["revision"])]) if local_version < newest_version: preferences = Application.getInstance().getPreferences() latest_version_shown = preferences.getValue("info/latest_update_version_shown") if latest_version_shown == newest_version and not self.display_same_version: continue #Don't show this update again. The user already clicked it away and doesn't want it again. preferences.setValue("info/latest_update_version_shown", str(newest_version)) Logger.log("i", "Found a new version of the software. Spawning message") self.showUpdate(newest_version, value["url"]) no_new_version = False break else: Logger.log("w", "Could not find version information or download url for update.") else: Logger.log("w", "Did not find any version information for %s." % application_name) except Exception: Logger.logException("e", "Exception in update checker while parsing the JSON file.") Message(i18n_catalog.i18nc("@info", "An error occurred while checking for updates."), title = i18n_catalog.i18nc("@info:title", "Error")).show() no_new_version = False # Just to suppress the message below. if no_new_version and not self.silent: Message(i18n_catalog.i18nc("@info", "No new version was found."), title = i18n_catalog.i18nc("@info:title", "Version Upgrade")).show()
def updateError(self, errorCode): Logger.log("e", "updateError: %s", repr(errorCode)) self._message = Message(catalog.i18nc("@info:status","There was an error updating {}").format(self._name)) self._message.show()
def requestWrite(self, node, file_name=None, limit_mimetypes=None): if self._writing: raise OutputDeviceError.DeviceBusyError() dialog = QFileDialog() dialog.setWindowTitle(catalog.i18nc("@title:window", "Save to File")) dialog.setFileMode(QFileDialog.AnyFile) dialog.setAcceptMode(QFileDialog.AcceptSave) default_save_path = os.path.expanduser("~/") dialog.setDirectory(default_save_path) # Ensure platform never ask for overwrite confirmation since we do this ourselves dialog.setOption(QFileDialog.DontConfirmOverwrite) if sys.platform == "linux" and "KDE_FULL_SESSION" in os.environ: dialog.setOption(QFileDialog.DontUseNativeDialog) filters = [] mime_types = [] selected_filter = None last_used_type = Preferences.getInstance().getValue( "local_file/last_used_type") file_types = Application.getInstance().getMeshFileHandler( ).getSupportedFileTypesWrite() file_types.sort(key=lambda k: k["description"]) if limit_mimetypes: file_types = list( filter(lambda i: i["mime_type"] in limit_mimetypes, file_types)) if len(file_types) == 0: Logger.log("e", "There are no file types available to write with!") raise OutputDeviceError.WriteRequestFailedError() for item in file_types: type_filter = "{0} (*.{1})".format(item["description"], item["extension"]) filters.append(type_filter) mime_types.append(item["mime_type"]) if last_used_type == item["mime_type"]: selected_filter = type_filter if file_name: file_name += "." + item["extension"] dialog.setNameFilters(filters) if selected_filter != None: dialog.selectNameFilter(selected_filter) if file_name != None: dialog.selectFile(file_name) dialog.restoreState(Preferences.getInstance().getValue( "local_file/dialog_state").encode()) if not dialog.exec_(): raise OutputDeviceError.UserCanceledError() Preferences.getInstance().setValue("local_file/dialog_state", str(dialog.saveState())) selected_type = file_types[filters.index(dialog.selectedNameFilter())] Preferences.getInstance().setValue("local_file/last_used_type", selected_type["mime_type"]) file_name = dialog.selectedFiles()[0] if os.path.exists(file_name): result = QMessageBox.question( None, catalog.i18nc("@title:window", "File Already Exists"), catalog.i18nc( "@label", "The file <filename>{0}</filename> already exists. Are you sure you want to overwrite it?" ).format(file_name)) if result == QMessageBox.No: raise OutputDeviceError.UserCanceledError() self.writeStarted.emit(self) mesh_writer = Application.getInstance().getMeshFileHandler().getWriter( selected_type["id"]) try: mode = selected_type["mode"] if mode == MeshWriter.OutputMode.TextMode: Logger.log("d", "Writing to Local File %s in text mode", file_name) stream = open(file_name, "wt") elif mode == MeshWriter.OutputMode.BinaryMode: Logger.log("d", "Writing to Local File %s in binary mode", file_name) stream = open(file_name, "wb") job = WriteMeshJob(mesh_writer, stream, node, mode) job.setFileName(file_name) job.progress.connect(self._onJobProgress) job.finished.connect(self._onWriteJobFinished) message = Message( catalog.i18nc( "@info:progress", "Saving to <filename>{0}</filename>").format(file_name), 0, False, -1) message.show() job._message = message self._writing = True job.start() except PermissionError as e: Logger.log("e", "Permission denied when trying to write to %s: %s", file_name, str(e)) raise OutputDeviceError.PermissionDeniedError( catalog.i18nc( "@info:status", "Permission denied when trying to save <filename>{0}</filename>" ).format(file_name)) from e except OSError as e: Logger.log("e", "Operating system would not let us write to %s: %s", file_name, str(e)) raise OutputDeviceError.WriteRequestFailedError( catalog.i18nc( "@info:status", "Could not save to <filename>{0}</filename>: <message>{1}</message>" ).format()) from e
def _unknownError(self): message = Message(catalog.i18nc("@info:status", "Unable to connect to {}, there was an unknown error").format(self._name), 0, False) message.show()
def requestWrite(self, nodes, file_name=None, limit_mimetypes=None, file_handler=None, **kwargs): """Request the specified nodes to be written to a file. :param nodes: A collection of scene nodes that should be written to the file. :param file_name: A suggestion for the file name to write to. Can be freely ignored if providing a file name makes no sense. :param limit_mimetypes: Should we limit the available MIME types to the MIME types available to the currently active machine? :param kwargs: Keyword arguments. """ if self._writing: raise OutputDeviceError.DeviceBusyError() # Set up and display file dialog dialog = QFileDialog() dialog.setWindowTitle(catalog.i18nc("@title:window", "Save to Disk")) dialog.setFileMode(QFileDialog.AnyFile) dialog.setAcceptMode(QFileDialog.AcceptSave) # Ensure platform never ask for overwrite confirmation since we do this ourselves dialog.setOption(QFileDialog.DontConfirmOverwrite) if sys.platform == "linux" and "KDE_FULL_SESSION" in os.environ: dialog.setOption(QFileDialog.DontUseNativeDialog) filters = [] mime_types = [] selected_filter = None if "preferred_mimetypes" in kwargs and kwargs[ "preferred_mimetypes"] is not None: preferred_mimetypes = kwargs["preferred_mimetypes"] else: preferred_mimetypes = Application.getInstance().getPreferences( ).getValue("local_file/last_used_type") preferred_mimetype_list = preferred_mimetypes.split(";") if not file_handler: file_handler = Application.getInstance().getMeshFileHandler() file_types = file_handler.getSupportedFileTypesWrite() file_types.sort(key=lambda k: k["description"]) if limit_mimetypes: file_types = list( filter(lambda i: i["mime_type"] in limit_mimetypes, file_types)) file_types = [ft for ft in file_types if not ft["hide_in_file_dialog"]] if len(file_types) == 0: Logger.log("e", "There are no file types available to write with!") raise OutputDeviceError.WriteRequestFailedError( catalog.i18nc( "@info:warning", "There are no file types available to write with!")) # Find the first available preferred mime type preferred_mimetype = None for mime_type in preferred_mimetype_list: if any(ft["mime_type"] == mime_type for ft in file_types): preferred_mimetype = mime_type break extension_added = False for item in file_types: type_filter = "{0} (*.{1})".format(item["description"], item["extension"]) filters.append(type_filter) mime_types.append(item["mime_type"]) if preferred_mimetype == item["mime_type"]: selected_filter = type_filter if file_name and not extension_added: extension_added = True file_name += "." + item["extension"] # CURA-6411: This code needs to be before dialog.selectFile and the filters, because otherwise in macOS (for some reason) the setDirectory call doesn't work. stored_directory = Application.getInstance().getPreferences().getValue( "local_file/dialog_save_path") if stored_directory and stored_directory != "" and os.path.exists( stored_directory): dialog.setDirectory(stored_directory) # Add the file name before adding the extension to the dialog if file_name is not None: dialog.selectFile(file_name) dialog.setNameFilters(filters) if selected_filter is not None: dialog.selectNameFilter(selected_filter) if not dialog.exec_(): raise OutputDeviceError.UserCanceledError() save_path = dialog.directory().absolutePath() Application.getInstance().getPreferences().setValue( "local_file/dialog_save_path", save_path) selected_type = file_types[filters.index(dialog.selectedNameFilter())] Application.getInstance().getPreferences().setValue( "local_file/last_used_type", selected_type["mime_type"]) # Get file name from file dialog file_name = dialog.selectedFiles()[0] Logger.log("d", "Writing to [%s]..." % file_name) if os.path.exists(file_name): result = QMessageBox.question( None, catalog.i18nc("@title:window", "File Already Exists"), catalog.i18nc( "@label Don't translate the XML tag <filename>!", "The file <filename>{0}</filename> already exists. Are you sure you want to overwrite it?" ).format(file_name)) if result == QMessageBox.No: raise OutputDeviceError.UserCanceledError() # Actually writing file if file_handler: file_writer = file_handler.getWriter(selected_type["id"]) else: file_writer = Application.getInstance().getMeshFileHandler( ).getWriter(selected_type["id"]) if isinstance(file_writer, WorkspaceWriter): self.setLastOutputName(file_name) self.writeStarted.emit(self) try: mode = selected_type["mode"] if mode == MeshWriter.OutputMode.TextMode: Logger.log("d", "Writing to Local File %s in text mode", file_name) stream = open(file_name, "wt", encoding="utf-8") elif mode == MeshWriter.OutputMode.BinaryMode: Logger.log("d", "Writing to Local File %s in binary mode", file_name) stream = open(file_name, "wb") else: Logger.log("e", "Unrecognised OutputMode.") return None job = WriteFileJob(file_writer, stream, nodes, mode) job.setFileName(file_name) job.setAddToRecentFiles( True ) # The file will be added into the "recent files" list upon success job.progress.connect(self._onJobProgress) job.finished.connect(self._onWriteJobFinished) message = Message( catalog.i18nc( "@info:progress Don't translate the XML tags <filename>!", "Saving to <filename>{0}</filename>").format(file_name), 0, False, -1, catalog.i18nc("@info:title", "Saving")) message.show() job.setMessage(message) self._writing = True job.start() except PermissionError as e: Logger.log("e", "Permission denied when trying to write to %s: %s", file_name, str(e)) raise OutputDeviceError.PermissionDeniedError( catalog.i18nc( "@info:status Don't translate the XML tags <filename>!", "Permission denied when trying to save <filename>{0}</filename>" ).format(file_name)) from e except OSError as e: Logger.log("e", "Operating system would not let us write to %s: %s", file_name, str(e)) raise OutputDeviceError.WriteRequestFailedError( catalog.i18nc( "@info:status Don't translate the XML tags <filename> or <message>!", "Could not save to <filename>{0}</filename>: <message>{1}</message>" ).format(file_name, str(e))) from e
def __init__(self, parent=None) -> None: super().__init__(parent) self._max_layers = 0 self._current_layer_num = 0 self._minimum_layer_num = 0 self._current_layer_mesh = None self._current_layer_jumps = None self._top_layers_job = None # type: Optional["_CreateTopLayersJob"] self._activity = False self._old_max_layers = 0 self._max_paths = 0 self._current_path_num = 0 self._minimum_path_num = 0 self.currentLayerNumChanged.connect(self._onCurrentLayerNumChanged) self._busy = False self._simulation_running = False self._ghost_shader = None # type: Optional["ShaderProgram"] self._layer_pass = None # type: Optional[SimulationPass] self._composite_pass = None # type: Optional[CompositePass] self._old_layer_bindings = None # type: Optional[List[str]] self._simulationview_composite_shader = None # type: Optional["ShaderProgram"] self._old_composite_shader = None # type: Optional["ShaderProgram"] self._max_feedrate = sys.float_info.min self._min_feedrate = sys.float_info.max self._max_thickness = sys.float_info.min self._min_thickness = sys.float_info.max self._global_container_stack = None # type: Optional[ContainerStack] self._proxy = None self._resetSettings() self._legend_items = None self._show_travel_moves = False self._nozzle_node = None # type: Optional[NozzleNode] Application.getInstance().getPreferences().addPreference( "view/top_layer_count", 5) Application.getInstance().getPreferences().addPreference( "view/only_show_top_layers", False) Application.getInstance().getPreferences().addPreference( "view/force_layer_view_compatibility_mode", False) Application.getInstance().getPreferences().addPreference( "layerview/layer_view_type", 0) Application.getInstance().getPreferences().addPreference( "layerview/extruder_opacities", "") Application.getInstance().getPreferences().addPreference( "layerview/show_travel_moves", False) Application.getInstance().getPreferences().addPreference( "layerview/show_helpers", True) Application.getInstance().getPreferences().addPreference( "layerview/show_skin", True) Application.getInstance().getPreferences().addPreference( "layerview/show_infill", True) self._updateWithPreferences() self._solid_layers = int( Application.getInstance().getPreferences().getValue( "view/top_layer_count")) self._only_show_top_layers = bool( Application.getInstance().getPreferences().getValue( "view/only_show_top_layers")) self._compatibility_mode = self._evaluateCompatibilityMode() self._wireprint_warning_message = Message(catalog.i18nc( "@info:status", "Cura does not accurately display layers when Wire Printing is enabled." ), title=catalog.i18nc( "@info:title", "Simulation View")) self._slice_first_warning_message = Message( catalog.i18nc("@info:status", "Nothing is shown because you need to slice first."), title=catalog.i18nc("@info:title", "No layers to show"), option_text=catalog.i18nc("@info:option_text", "Do not show this message again"), option_state=False) self._slice_first_warning_message.optionToggled.connect( self._onDontAskMeAgain) CuraApplication.getInstance().getPreferences().addPreference( self._no_layers_warning_preference, True) QtApplication.getInstance().engineCreatedSignal.connect( self._onEngineCreated)