def on_event(self, event, payload): if event == "FileAdded" and "ufp" in payload["type"]: # Add ufp file to analysisqueue old_name = self._settings.global_get_basefolder( "uploads") + "/" + payload["path"] new_name = old_name + ".gcode" os.rename(old_name, new_name) printer_profile = self._printer_profile_manager.get("_default") if version.get_octoprint_version( ) > version.get_comparable_version("1.3.9"): entry = QueueEntry(payload["name"] + ".gcode", payload["path"] + ".gcode", "gcode", payload["storage"], new_name, printer_profile, None) else: entry = QueueEntry(payload["name"] + ".gcode", payload["path"] + ".gcode", "gcode", payload["storage"], new_name, printer_profile) self._analysis_queue.enqueue(entry, high_priority=True) if event == "FileRemoved" and payload["name"].endswith(".ufp.gcode"): thumbnail = "%s/%s" % (self.get_plugin_data_folder(), payload["name"].replace( ".ufp.gcode", ".png")) ufp_file = "%s/%s" % (self.get_plugin_data_folder(), payload["name"].replace( ".ufp.gcode", ".ufp")) if os.path.exists(thumbnail): os.remove(thumbnail) if os.path.exists(ufp_file): os.remove(ufp_file)
def _refresh_repository(self, repo_data=None): if repo_data is None: repo_data = self._fetch_repository_from_url() if repo_data is None: return False current_os = get_os() octoprint_version = get_octoprint_version(base=True) def map_repository_entry(entry): result = copy.deepcopy(entry) if not "follow_dependency_links" in result: result["follow_dependency_links"] = False result["is_compatible"] = dict( octoprint=True, os=True ) if "compatibility" in entry: if "octoprint" in entry["compatibility"] and entry["compatibility"]["octoprint"] is not None and isinstance(entry["compatibility"]["octoprint"], (list, tuple)) and len(entry["compatibility"]["octoprint"]): result["is_compatible"]["octoprint"] = is_octoprint_compatible(*entry["compatibility"]["octoprint"], octoprint_version=octoprint_version) if "os" in entry["compatibility"] and entry["compatibility"]["os"] is not None and isinstance(entry["compatibility"]["os"], (list, tuple)) and len(entry["compatibility"]["os"]): result["is_compatible"]["os"] = self._is_os_compatible(current_os, entry["compatibility"]["os"]) return result self._repository_plugins = map(map_repository_entry, repo_data) return True
def download_file(self, action): # Make API call to MyMiniFactory to download gcode file. url = "https://www.myminifactory.com/api/v2/print-file" payload = dict(task_id=action["task_id"], printer_token=self._settings.get(["printer_token"])) headers = {'X-Api-Key': self._settings.get(["client_key"])} self._logger.debug("Sending parameters: %s with header: %s" % (payload, headers)) response = requests.get(url, params=payload, headers=headers) if response.status_code == 200: # Save file to uploads folder gcode_file_name = self._file_manager.sanitize_name( "local", action["filename"]) gcode_download_file = "%s/%s" % ( self._settings.global_get_basefolder("uploads"), gcode_file_name) self._logger.debug("Saving file: %s" % gcode_download_file) with open(gcode_download_file, 'w') as f: f.write(response.text) # Add downloaded file to analysisqueue printer_profile = self._printer_profile_manager.get("_default") if version.get_octoprint_version( ) > version.get_comparable_version("1.3.9"): entry = QueueEntry(gcode_file_name, gcode_download_file, "gcode", "local", gcode_download_file, printer_profile, None) else: entry = QueueEntry(gcode_file_name, gcode_download_file, "gcode", "local", gcode_download_file, printer_profile) self._analysis_queue.enqueue(entry, high_priority=True) # Select file downloaded and start printing if auto_start_print is enabled and not already printing if self._printer.is_ready(): self._mmf_print = True self._printer.select_file( gcode_file_name, False, printAfterSelect=self._settings.get_boolean( ["auto_start_print"])) else: self._logger.debug( "Printer not ready, not selecting file to print.") else: self._logger.debug("API Error: %s" % response) self._plugin_manager.send_plugin_message( self._identifier, dict(error=response.status_code))
def on_settings_save(self, data): old_tabs = self._settings.get(["tabs"]) + self._settings.get(["hidden_tabs"]) octoprint.plugin.SettingsPlugin.on_settings_save(self, data) new_tabs = self._settings.get(["tabs"]) + self._settings.get(["hidden_tabs"]) if old_tabs != new_tabs: self._logger.info("tabs changed from {old_tabs} to {new_tabs} reordering tabs.".format(**locals())) flattened_tabs = [] for tab in new_tabs: if version.get_octoprint_version() > version.get_comparable_version("1.4.0") and tab["name"] == "gcodeviewer": flattened_tabs.append("plugin_{}".format(tab["name"])) else: flattened_tabs.append(tab["name"]) self._settings.global_set(["appearance","components","order","tab"],flattened_tabs)
def on_event(self, event, payload): if event == "FileAdded" and "ufp" in payload["type"]: # Add ufp file to analysisqueue old_name = self._settings.global_get_basefolder( "uploads") + "/" + payload["path"] new_name = old_name + ".gcode" if os.path.exists(new_name): os.remove(new_name) os.rename(old_name, new_name) printer_profile = self._printer_profile_manager.get("_default") if version.get_octoprint_version( ) > version.get_comparable_version("1.3.9"): entry = QueueEntry(payload["name"] + ".gcode", payload["path"] + ".gcode", "gcode", payload["storage"], new_name, printer_profile, None) else: entry = QueueEntry(payload["name"] + ".gcode", payload["path"] + ".gcode", "gcode", payload["storage"], new_name, printer_profile) self._analysis_queue.enqueue(entry, high_priority=True) if event == "MetadataAnalysisFinished" and "ufp" in payload["path"]: self._logger.info('Adding thumbnail url') thumbnail_url = "/plugin/UltimakerFormatPackage/thumbnail/" + payload[ "path"].replace(".ufp.gcode", ".png") self._storage_interface = self._file_manager._storage( payload.get("origin", "local")) self._storage_interface.set_additional_metadata( payload.get("path"), "refs", dict(thumbnail=thumbnail_url), merge=True) if event == "FileRemoved" and payload["name"].endswith(".ufp.gcode"): thumbnail = "%s/%s" % (self.get_plugin_data_folder(), payload["path"].replace( ".ufp.gcode", ".png")) ufp_file = "%s/%s" % (self.get_plugin_data_folder(), payload["path"].replace( ".ufp.gcode", ".ufp")) if os.path.exists(thumbnail): os.remove(thumbnail) if os.path.exists(ufp_file): os.remove(ufp_file)
def _refresh_repository(self, repo_data=None): if repo_data is None: repo_data = self._fetch_repository_from_url() if repo_data is None: return False current_os = get_os() octoprint_version = get_octoprint_version(base=True) def map_repository_entry(entry): result = copy.deepcopy(entry) if not "follow_dependency_links" in result: result["follow_dependency_links"] = False result["is_compatible"] = dict(octoprint=True, os=True) if "compatibility" in entry: if "octoprint" in entry["compatibility"] and entry[ "compatibility"][ "octoprint"] is not None and isinstance( entry["compatibility"]["octoprint"], (list, tuple)) and len( entry["compatibility"]["octoprint"]): result["is_compatible"][ "octoprint"] = is_octoprint_compatible( *entry["compatibility"]["octoprint"], octoprint_version=octoprint_version) if "os" in entry["compatibility"] and entry["compatibility"][ "os"] is not None and isinstance( entry["compatibility"]["os"], (list, tuple)) and len( entry["compatibility"]["os"]): result["is_compatible"]["os"] = self._is_os_compatible( current_os, entry["compatibility"]["os"]) return result self._repository_plugins = map(map_repository_entry, repo_data) return True
def _get_notifications(self, plugin): key = plugin.key if not plugin.enabled: return if key not in self._notices: return octoprint_version = get_octoprint_version(base=True) plugin_notifications = self._notices.get(key, []) def filter_relevant(notification): return "text" in notification and "date" in notification and \ ("versions" not in notification or plugin.version in notification["versions"]) and \ ("octoversions" not in notification or is_octoprint_compatible(*notification["octoversions"], octoprint_version=octoprint_version)) def map_notification(notification): return self._to_external_notification(key, notification) return filter(lambda x: x is not None, map(map_notification, filter(filter_relevant, plugin_notifications)))
def _flash_bossac(self, firmware=None, printer_port=None): assert (firmware is not None) assert (printer_port is not None) bossac_path = self._settings.get(["bossac_path"]) bossac_disableverify = self._settings.get(["bossac_disableverify"]) working_dir = os.path.dirname(bossac_path) bossac_command = [ bossac_path, "-i", "-p", printer_port, "-U", "false", "-e", "-w" ] if not bossac_disableverify: bossac_command += ["-v"] bossac_command += ["-b", firmware, "-R"] self._logger.info(u"Attempting to reset the board to SAM-BA") if not self._reset_1200(printer_port): self._logger.error(u"Reset failed") return False import sarge self._logger.info(u"Running %r in %s" % (' '.join(bossac_command), working_dir)) self._console_logger.info(" ".join(bossac_command)) try: if version.get_octoprint_version( ) > version.get_comparable_version("1.3.9"): p = sarge.run(bossac_command, cwd=working_dir, async_=True, stdout=sarge.Capture(buffer_size=1), stderr=sarge.Capture(buffer_size=1)) else: p = sarge.run(bossac_command, cwd=working_dir, async=True, stdout=sarge.Capture(buffer_size=1), stderr=sarge.Capture(buffer_size=1)) p.wait_events() while p.returncode is None: output = p.stdout.read(timeout=0.5) if not output: p.commands[0].poll() continue for line in output.split("\n"): if line.endswith("\r"): line = line[:-1] self._console_logger.info(u"> {}".format(line)) if self.BOSSAC_ERASING in line: self._logger.info(u"Erasing memory...") self._send_status("progress", subtype="erasing") elif self.BOSSAC_WRITING in line: self._logger.info(u"Writing memory...") self._send_status("progress", subtype="writing") elif self.BOSSAC_VERIFYING in line: self._logger.info(u"Verifying memory...") self._send_status("progress", subtype="verifying") elif self.AVRDUDE_TIMEOUT in line: p.close() raise FlashException( "Timeout communicating with programmer") elif self.BOSSAC_NODEVICE in line: raise FlashException("No device found") elif self.AVRDUDE_ERROR_VERIFICATION in line: raise FlashException("Error verifying flash") elif self.AVRDUDE_ERROR in line: raise FlashException( "bossac error: " + output[output.find(self.AVRDUDE_ERROR) + len(self.AVRDUDE_ERROR):].strip()) if p.returncode == 0: return True else: raise FlashException( "bossac returned code {returncode}".format( returncode=p.returncode)) except FlashException as ex: self._logger.error( u"Flashing failed. {error}.".format(error=ex.reason)) self._send_status("flasherror", message=ex.reason) return False except: self._logger.exception(u"Flashing failed. Unexpected error.") self._send_status("flasherror") return False
def _flash_avrdude(self, firmware=None, printer_port=None): assert (firmware is not None) assert (printer_port is not None) avrdude_path = self._settings.get(["avrdude_path"]) avrdude_conf = self._settings.get(["avrdude_conf"]) avrdude_avrmcu = self._settings.get(["avrdude_avrmcu"]) avrdude_programmer = self._settings.get(["avrdude_programmer"]) avrdude_baudrate = self._settings.get(["avrdude_baudrate"]) avrdude_disableverify = self._settings.get(["avrdude_disableverify"]) working_dir = os.path.dirname(avrdude_path) avrdude_command = [ avrdude_path, "-v", "-q", "-p", avrdude_avrmcu, "-c", avrdude_programmer, "-P", printer_port, "-D" ] if avrdude_conf is not None and avrdude_conf != "": avrdude_command += ["-C", avrdude_conf] if avrdude_baudrate is not None and avrdude_baudrate != "": avrdude_command += ["-b", avrdude_baudrate] if avrdude_disableverify: avrdude_command += ["-V"] avrdude_command += ["-U", "flash:w:" + firmware + ":i"] import sarge self._logger.info(u"Running %r in %s" % (' '.join(avrdude_command), working_dir)) self._console_logger.info(" ".join(avrdude_command)) try: if version.get_octoprint_version( ) > version.get_comparable_version("1.3.9"): p = sarge.run(avrdude_command, cwd=working_dir, async_=True, stdout=sarge.Capture(), stderr=sarge.Capture()) else: p = sarge.run(avrdude_command, cwd=working_dir, async=True, stdout=sarge.Capture(), stderr=sarge.Capture()) p.wait_events() while p.returncode is None: output = p.stderr.read(timeout=0.5) if not output: p.commands[0].poll() continue for line in output.split("\n"): if line.endswith("\r"): line = line[:-1] self._console_logger.info(u"> {}".format(line)) if self.AVRDUDE_WRITING in output: self._logger.info(u"Writing memory...") self._send_status("progress", subtype="writing") elif self.AVRDUDE_VERIFYING in output: self._logger.info(u"Verifying memory...") self._send_status("progress", subtype="verifying") elif self.AVRDUDE_TIMEOUT in output: p.commands[0].kill() p.close() raise FlashException( "Timeout communicating with programmer") elif self.AVRDUDE_ERROR_DEVICE in output: p.commands[0].kill() p.close() raise FlashException("Error opening serial device") elif self.AVRDUDE_ERROR_VERIFICATION in output: p.commands[0].kill() p.close() raise FlashException("Error verifying flash") elif self.AVRDUDE_ERROR_SYNC in output: p.commands[0].kill() p.close() raise FlashException( "Avrdude says: 'not in sync" + output[output.find(self.AVRDUDE_ERROR_SYNC) + len(self.AVRDUDE_ERROR_SYNC):].strip() + "'") elif self.AVRDUDE_ERROR in output: raise FlashException( "Avrdude error: " + output[output.find(self.AVRDUDE_ERROR) + len(self.AVRDUDE_ERROR):].strip()) if p.returncode == 0: return True else: raise FlashException( "Avrdude returned code {returncode}".format( returncode=p.returncode)) except FlashException as ex: self._logger.error( u"Flashing failed. {error}.".format(error=ex.reason)) self._send_status("flasherror", message=ex.reason) return False except: self._logger.exception(u"Flashing failed. Unexpected error.") self._send_status("flasherror") return False
def _restore_backup(cls, path, settings=None, plugin_manager=None, datafolder=None, on_install_plugins=None, on_report_unknown_plugins=None, on_invalid_backup=None, on_log_progress=None, on_log_error=None, on_restore_start=None, on_restore_done=None, on_restore_failed=None): if not is_os_compatible(["!windows"]): if callable(on_log_error): on_log_error( u"Restore is not supported on this operating system") if callable(on_restore_failed): on_restore_failed(path) return False restart_command = settings.global_get( ["server", "commands", "serverRestartCommand"]) basedir = settings._basedir cls._clean_dir_backup(basedir, on_log_progress=on_log_progress) plugin_repo = dict() repo_url = settings.global_get( ["plugins", "pluginmanager", "repository"]) if repo_url: plugin_repo = cls._get_plugin_repository_data(repo_url) if callable(on_restore_start): on_restore_start(path) try: with zipfile.ZipFile(path, "r") as zip: # read metadata try: metadata_zipinfo = zip.getinfo("metadata.json") except KeyError: if callable(on_invalid_backup): on_invalid_backup( u"Not an OctoPrint backup, lacks metadata.json") if callable(on_restore_failed): on_restore_failed(path) return False metadata_bytes = zip.read(metadata_zipinfo) metadata = json.loads(metadata_bytes) backup_version = get_comparable_version(metadata["version"], base=True) if backup_version > get_octoprint_version(base=True): if callable(on_invalid_backup): on_invalid_backup( u"Backup is from a newer version of OctoPrint and cannot be applied" ) if callable(on_restore_failed): on_restore_failed(path) return False # unzip to temporary folder temp = tempfile.mkdtemp() try: if callable(on_log_progress): on_log_progress( u"Unpacking backup to {}...".format(temp)) abstemp = os.path.abspath(temp) for member in zip.infolist(): abspath = os.path.abspath( os.path.join(temp, member.filename)) if abspath.startswith(abstemp): zip.extract(member, temp) # sanity check configfile = os.path.join(temp, "basedir", "config.yaml") if not os.path.exists(configfile): if callable(on_invalid_backup): on_invalid_backup(u"Backup lacks config.yaml") if callable(on_restore_failed): on_restore_failed(path) return False import yaml with codecs.open(configfile) as f: configdata = yaml.safe_load(f) if configdata.get("accessControl", dict()).get("enabled", True): userfile = os.path.join(temp, "basedir", "users.yaml") if not os.path.exists(userfile): if callable(on_invalid_backup): on_invalid_backup(u"Backup lacks users.yaml") if callable(on_restore_failed): on_restore_failed(path) return False if callable(on_log_progress): on_log_progress(u"Unpacked") # install available plugins plugins = [] plugin_list_file = os.path.join(temp, "plugin_list.json") if os.path.exists(plugin_list_file): with codecs.open(plugin_list_file, "r") as f: plugins = json.load(f) known_plugins = [] unknown_plugins = [] if plugins: if plugin_repo: for plugin in plugins: if plugin["key"] in plugin_manager.plugins: # already installed continue if plugin["key"] in plugin_repo: # not installed, can be installed from repository url known_plugins.append( plugin_repo[plugin["key"]]) else: # not installed, not installable unknown_plugins.append(plugin) else: # no repo, all plugins are not installable unknown_plugins = plugins if callable(on_log_progress): if known_plugins: on_log_progress( u"Known and installable plugins: {}". format(u", ".join( map(lambda x: x["id"], known_plugins)))) if unknown_plugins: on_log_progress(u"Unknown plugins: {}".format( u", ".join( map(lambda x: x["key"], unknown_plugins)))) if callable(on_install_plugins): on_install_plugins(known_plugins) if callable(on_report_unknown_plugins): on_report_unknown_plugins(unknown_plugins) # move config data basedir_backup = basedir + ".bck" basedir_extracted = os.path.join(temp, "basedir") if callable(on_log_progress): on_log_progress(u"Renaming {} to {}...".format( basedir, basedir_backup)) shutil.move(basedir, basedir_backup) try: if callable(on_log_progress): on_log_progress(u"Moving {} to {}...".format( basedir_extracted, basedir)) shutil.move(basedir_extracted, basedir) except: if callable(on_log_error): on_log_error(u"Error while restoring config data", exc_info=sys.exc_info()) on_log_error(u"Rolling back old config data") shutil.move(basedir_backup, basedir) if callable(on_restore_failed): on_restore_failed(path) return False if unknown_plugins: if callable(on_log_progress): on_log_progress( u"Writing info file about unknown plugins") if not os.path.isdir(datafolder): os.makedirs(datafolder) unknown_plugins_path = os.path.join( datafolder, UNKNOWN_PLUGINS_FILE) try: with codecs.open(unknown_plugins_path, mode="w", encoding="utf-8") as f: json.dump(unknown_plugins, f) except: if callable(on_log_error): on_log_error( u"Could not persist list of unknown plugins to {}" .format(unknown_plugins_path), exc_info=sys.exc_info()) finally: if callable(on_log_progress): on_log_progress(u"Removing temporary unpacked folder") shutil.rmtree(temp) except: exc_info = sys.exc_info() try: if callable(on_log_error): on_log_error(u"Error while running restore", exc_info=exc_info) if callable(on_restore_failed): on_restore_failed(path) finally: del exc_info return False finally: # remove zip if callable(on_log_progress): on_log_progress(u"Removing temporary zip") os.remove(path) # restart server if restart_command: import sarge if callable(on_log_progress): on_log_progress(u"Restarting...") if callable(on_restore_done): on_restore_done(path) try: sarge.run(restart_command, async_=True) except: if callable(on_log_error): on_log_error( u"Error while restarting via command {}".format( restart_command), exc_info=sys.exc_info()) on_log_error(u"Please restart OctoPrint manually") return False else: if callable(on_restore_done): on_restore_done(path) return True
def _restore_backup(cls, path, settings=None, plugin_manager=None, datafolder=None, on_install_plugins=None, on_report_unknown_plugins=None, on_invalid_backup=None, on_log_progress=None, on_log_error=None, on_restore_start=None, on_restore_done=None, on_restore_failed=None): if not is_os_compatible(["!windows"]): if callable(on_log_error): on_log_error(u"Restore is not supported on this operating system") if callable(on_restore_failed): on_restore_failed(path) return False restart_command = settings.global_get(["server", "commands", "serverRestartCommand"]) basedir = settings._basedir cls._clean_dir_backup(basedir, on_log_progress=on_log_progress) plugin_repo = dict() repo_url = settings.global_get(["plugins", "pluginmanager", "repository"]) if repo_url: plugin_repo = cls._get_plugin_repository_data(repo_url) if callable(on_restore_start): on_restore_start(path) try: with zipfile.ZipFile(path, "r") as zip: # read metadata try: metadata_zipinfo = zip.getinfo("metadata.json") except KeyError: if callable(on_invalid_backup): on_invalid_backup(u"Not an OctoPrint backup, lacks metadata.json") if callable(on_restore_failed): on_restore_failed(path) return False metadata_bytes = zip.read(metadata_zipinfo) metadata = json.loads(metadata_bytes) backup_version = get_comparable_version(metadata["version"], base=True) if backup_version > get_octoprint_version(base=True): if callable(on_invalid_backup): on_invalid_backup(u"Backup is from a newer version of OctoPrint and cannot be applied") if callable(on_restore_failed): on_restore_failed(path) return False # unzip to temporary folder temp = tempfile.mkdtemp() try: if callable(on_log_progress): on_log_progress(u"Unpacking backup to {}...".format(temp)) abstemp = os.path.abspath(temp) for member in zip.infolist(): abspath = os.path.abspath(os.path.join(temp, member.filename)) if abspath.startswith(abstemp): zip.extract(member, temp) # sanity check configfile = os.path.join(temp, "basedir", "config.yaml") if not os.path.exists(configfile): if callable(on_invalid_backup): on_invalid_backup(u"Backup lacks config.yaml") if callable(on_restore_failed): on_restore_failed(path) return False import yaml with codecs.open(configfile) as f: configdata = yaml.safe_load(f) if configdata.get("accessControl", dict()).get("enabled", True): userfile = os.path.join(temp, "basedir", "users.yaml") if not os.path.exists(userfile): if callable(on_invalid_backup): on_invalid_backup(u"Backup lacks users.yaml") if callable(on_restore_failed): on_restore_failed(path) return False if callable(on_log_progress): on_log_progress(u"Unpacked") # install available plugins with codecs.open(os.path.join(temp, "plugin_list.json"), "r") as f: plugins = json.load(f) known_plugins = [] unknown_plugins = [] if plugins: if plugin_repo: for plugin in plugins: if plugin["key"] in plugin_manager.plugins: # already installed continue if plugin["key"] in plugin_repo: # not installed, can be installed from repository url known_plugins.append(plugin_repo[plugin["key"]]) else: # not installed, not installable unknown_plugins.append(plugin) else: # no repo, all plugins are not installable unknown_plugins = plugins if callable(on_log_progress): if known_plugins: on_log_progress(u"Known and installable plugins: {}".format(u", ".join(map(lambda x: x["id"], known_plugins)))) if unknown_plugins: on_log_progress(u"Unknown plugins: {}".format(u", ".join(map(lambda x: x["key"], unknown_plugins)))) if callable(on_install_plugins): on_install_plugins(known_plugins) if callable(on_report_unknown_plugins): on_report_unknown_plugins(unknown_plugins) # move config data basedir_backup = basedir + ".bck" basedir_extracted = os.path.join(temp, "basedir") if callable(on_log_progress): on_log_progress(u"Renaming {} to {}...".format(basedir, basedir_backup)) os.rename(basedir, basedir_backup) try: if callable(on_log_progress): on_log_progress(u"Moving {} to {}...".format(basedir_extracted, basedir)) os.rename(basedir_extracted, basedir) except: if callable(on_log_error): on_log_error(u"Error while restoring config data", exc_info=sys.exc_info()) on_log_error(u"Rolling back old config data") os.rename(basedir_backup, basedir) if callable(on_restore_failed): on_restore_failed(path) return False if unknown_plugins: if callable(on_log_progress): on_log_progress(u"Writing info file about unknown plugins") if not os.path.isdir(datafolder): os.makedirs(datafolder) unknown_plugins_path = os.path.join(datafolder, UNKNOWN_PLUGINS_FILE) try: with codecs.open(unknown_plugins_path, mode="w", encoding="utf-8") as f: json.dump(unknown_plugins, f) except: if callable(on_log_error): on_log_error(u"Could not persist list of unknown plugins to {}".format(unknown_plugins_path), exc_info = sys.exc_info()) finally: if callable(on_log_progress): on_log_progress(u"Removing temporary unpacked folder") shutil.rmtree(temp) except: exc_info = sys.exc_info() try: if callable(on_log_error): on_log_error(u"Error while running restore", exc_info=exc_info) if callable(on_restore_failed): on_restore_failed(path) finally: del exc_info return False finally: # remove zip if callable(on_log_progress): on_log_progress(u"Removing temporary zip") os.remove(path) # restart server if restart_command: import sarge if callable(on_log_progress): on_log_progress(u"Restarting...") if callable(on_restore_done): on_restore_done(path) try: sarge.run(restart_command, async_=True) except: if callable(on_log_error): on_log_error(u"Error while restarting via command {}".format(restart_command), exc_info=sys.exc_info()) on_log_error(u"Please restart OctoPrint manually") return False else: if callable(on_restore_done): on_restore_done(path) return True