def _is_current(release_information): if release_information["remote"]["value"] is None: return True local_version = get_comparable_version(release_information["local"]["value"]) remote_version = get_comparable_version(release_information["remote"]["value"]) return remote_version <= local_version
def _broken_version(self, line): version_str = line[len(self.VERSION):] version = get_comparable_version(version_str, base=True) if version is not None and version <= self.FIXED_VERSION: return True else: return False
def on_event(self, event, payload): if event == "FileAdded" and "ufp" in payload["type"]: # Add ufp file to analysisqueue old_name = self._settings.global_get_basefolder( "uploads") + "/" + payload["path"] new_name = old_name + ".gcode" os.rename(old_name, new_name) printer_profile = self._printer_profile_manager.get("_default") if version.get_octoprint_version( ) > version.get_comparable_version("1.3.9"): entry = QueueEntry(payload["name"] + ".gcode", payload["path"] + ".gcode", "gcode", payload["storage"], new_name, printer_profile, None) else: entry = QueueEntry(payload["name"] + ".gcode", payload["path"] + ".gcode", "gcode", payload["storage"], new_name, printer_profile) self._analysis_queue.enqueue(entry, high_priority=True) if event == "FileRemoved" and payload["name"].endswith(".ufp.gcode"): thumbnail = "%s/%s" % (self.get_plugin_data_folder(), payload["name"].replace( ".ufp.gcode", ".png")) ufp_file = "%s/%s" % (self.get_plugin_data_folder(), payload["name"].replace( ".ufp.gcode", ".ufp")) if os.path.exists(thumbnail): os.remove(thumbnail) if os.path.exists(ufp_file): os.remove(ufp_file)
def _broken_version(self, line): version_str = line[len(self.VERSION):] version = get_comparable_version(version_str, base=True) if version is not None and version < self.FIXED_VERSION: return True else: return False
class RepetierBefore092Check(Check): """ Repetier firmware prior to version 0.92 Identified through firmware name "Repetier_x.y.z" with x.y.z < 0.92 """ name = "repetier_before_092" FIXED_VERSION = get_comparable_version("0.92") def m115(self, name, data): if name and name.lower().startswith("repetier"): version = self._extract_repetier_version(name) self._triggered = version is not None and version < self.FIXED_VERSION self._active = False def _extract_repetier_version(self, name): """ Extracts the Repetier version number from the firmware name. Example: "Repetier_0.91" => 0.91 """ version = None if "_" in name: _, version = name.split("_", 1) version = get_comparable_version(version, base=True) return version
def can_perform_update(target, check, online=True): from .. import MINIMUM_PIP pip_caller = _get_pip_caller( command=check["pip_command"] if "pip_command" in check else None) return ("pip" in check and pip_caller is not None and pip_caller.available and pip_caller.version >= get_comparable_version(MINIMUM_PIP) and (online or check.get("offline", False)))
def can_perform_update(target, check, online=True): from .. import MINIMUM_PIP pip_caller = _get_pip_caller(command=check["pip_command"] if "pip_command" in check else None) return "pip" in check \ and pip_caller is not None \ and pip_caller.available \ and pip_caller.version >= get_comparable_version(MINIMUM_PIP) \ and (online or check.get("offline", False))
def _get_comparable_factory(compare_type, force_base=True): if compare_type in ("python", "python_unequal"): return lambda version: get_comparable_version(version, base=force_base) elif compare_type in ("semantic", "semantic_unequal"): return lambda version: _get_comparable_version_semantic( version, force_base=force_base) else: return lambda version: version
def extract_repetier_version(name): """ Extracts the Repetier version number from the firmware name. Example: "Repetier_0.91" => 0.91 """ version = None if "_" in name: _, version = name.split("_", 1) version = get_comparable_version(version, base=True) return version
class AnycubicCheck(Check): """ Anycubic MEGA stock firmware Identified through "Author: (Jolly, xxxxxxxx.CO.)" or "| Author: (**Jolly, xxxxxxxx.CO.**)" in startup messages combined with "echo:Vx.y.z" in startup messages, with x.y.z < 1.1.2. """ name = "anycubic" AUTHOR = "| Author: ".lower() VERSION = "echo:V" CRITICAL_AUTHOR1 = "| Author: (Jolly, xxxxxxxx.CO.)".lower() CRITICAL_AUTHOR2 = "| Author: (**Jolly, xxxxxxxx.CO.**)".lower() FIXED_VERSION = get_comparable_version("1.1.2") def __init__(self): Check.__init__(self) self._author_matches = None self._version_matches = None def received(self, line): if not line: return lower_line = line.lower() if self.AUTHOR in lower_line: self._author_matches = self.CRITICAL_AUTHOR1 in lower_line or self.CRITICAL_AUTHOR2 in lower_line elif line.startswith(self.VERSION): self._version_matches = self._broken_version(line) else: return self._evaluate() def _broken_version(self, line): version_str = line[len(self.VERSION):] version = get_comparable_version(version_str, base=True) if version is not None and version < self.FIXED_VERSION: return True else: return False def _evaluate(self): if self._author_matches is None or self._version_matches is None: return self._triggered = self._author_matches and self._version_matches self._active = False def reset(self): Check.reset(self) self._author_matches = None self._version_matches = None
def extract_repetier_version(name): """ Extracts the Repetier version number from the firmware name. Example: "Repetier_0.91" => 0.91 """ version = None if "_" in name: _, version = name.split("_", 1) version = get_comparable_version(version, base=True) return version
def test_get_comparable_version(self, version, cut, expected): from octoprint.util.version import get_comparable_version try: actual = get_comparable_version(version, cut=cut) except Exception as exc: if isinstance(expected, type) and isinstance(exc, expected): pass else: raise else: self.assertEqual(actual, pkg_resources.parse_version(expected))
class MalyanM200Check(Check): """ Malyan M200 stock firmware prior to version 4.0 Identified through firmware name "Malyan*", model "M200" and version < 4.0. """ name = "malyan_m200" FIXED_VERSION = get_comparable_version("4.0") def m115(self, name, data): self._triggered = name and name.lower().startswith("malyan") and data.get("MODEL") == "M200" and get_comparable_version(data.get("VER", "0")) < self.FIXED_VERSION self._active = False
def download_file(self, action): # Make API call to MyMiniFactory to download gcode file. url = "https://www.myminifactory.com/api/v2/print-file" payload = dict(task_id=action["task_id"], printer_token=self._settings.get(["printer_token"])) headers = {'X-Api-Key': self._settings.get(["client_key"])} self._logger.debug("Sending parameters: %s with header: %s" % (payload, headers)) response = requests.get(url, params=payload, headers=headers) if response.status_code == 200: # Save file to uploads folder gcode_file_name = self._file_manager.sanitize_name( "local", action["filename"]) gcode_download_file = "%s/%s" % ( self._settings.global_get_basefolder("uploads"), gcode_file_name) self._logger.debug("Saving file: %s" % gcode_download_file) with open(gcode_download_file, 'w') as f: f.write(response.text) # Add downloaded file to analysisqueue printer_profile = self._printer_profile_manager.get("_default") if version.get_octoprint_version( ) > version.get_comparable_version("1.3.9"): entry = QueueEntry(gcode_file_name, gcode_download_file, "gcode", "local", gcode_download_file, printer_profile, None) else: entry = QueueEntry(gcode_file_name, gcode_download_file, "gcode", "local", gcode_download_file, printer_profile) self._analysis_queue.enqueue(entry, high_priority=True) # Select file downloaded and start printing if auto_start_print is enabled and not already printing if self._printer.is_ready(): self._mmf_print = True self._printer.select_file( gcode_file_name, False, printAfterSelect=self._settings.get_boolean( ["auto_start_print"])) else: self._logger.debug( "Printer not ready, not selecting file to print.") else: self._logger.debug("API Error: %s" % response) self._plugin_manager.send_plugin_message( self._identifier, dict(error=response.status_code))
def on_settings_save(self, data): old_tabs = self._settings.get(["tabs"]) + self._settings.get(["hidden_tabs"]) octoprint.plugin.SettingsPlugin.on_settings_save(self, data) new_tabs = self._settings.get(["tabs"]) + self._settings.get(["hidden_tabs"]) if old_tabs != new_tabs: self._logger.info("tabs changed from {old_tabs} to {new_tabs} reordering tabs.".format(**locals())) flattened_tabs = [] for tab in new_tabs: if version.get_octoprint_version() > version.get_comparable_version("1.4.0") and tab["name"] == "gcodeviewer": flattened_tabs.append("plugin_{}".format(tab["name"])) else: flattened_tabs.append(tab["name"]) self._settings.global_set(["appearance","components","order","tab"],flattened_tabs)
def on_event(self, event, payload): if event == "FileAdded" and "ufp" in payload["type"]: # Add ufp file to analysisqueue old_name = self._settings.global_get_basefolder( "uploads") + "/" + payload["path"] new_name = old_name + ".gcode" if os.path.exists(new_name): os.remove(new_name) os.rename(old_name, new_name) printer_profile = self._printer_profile_manager.get("_default") if version.get_octoprint_version( ) > version.get_comparable_version("1.3.9"): entry = QueueEntry(payload["name"] + ".gcode", payload["path"] + ".gcode", "gcode", payload["storage"], new_name, printer_profile, None) else: entry = QueueEntry(payload["name"] + ".gcode", payload["path"] + ".gcode", "gcode", payload["storage"], new_name, printer_profile) self._analysis_queue.enqueue(entry, high_priority=True) if event == "MetadataAnalysisFinished" and "ufp" in payload["path"]: self._logger.info('Adding thumbnail url') thumbnail_url = "/plugin/UltimakerFormatPackage/thumbnail/" + payload[ "path"].replace(".ufp.gcode", ".png") self._storage_interface = self._file_manager._storage( payload.get("origin", "local")) self._storage_interface.set_additional_metadata( payload.get("path"), "refs", dict(thumbnail=thumbnail_url), merge=True) if event == "FileRemoved" and payload["name"].endswith(".ufp.gcode"): thumbnail = "%s/%s" % (self.get_plugin_data_folder(), payload["path"].replace( ".ufp.gcode", ".png")) ufp_file = "%s/%s" % (self.get_plugin_data_folder(), payload["path"].replace( ".ufp.gcode", ".ufp")) if os.path.exists(thumbnail): os.remove(thumbnail) if os.path.exists(ufp_file): os.remove(ufp_file)
def _check_environment(self): import pkg_resources local_pip = LocalPipCaller() # check python and setuptools version versions = dict(python=get_python_version_string(), setuptools=pkg_resources.get_distribution("setuptools").version, pip=local_pip.version_string) supported = get_comparable_version(versions["python"]) >= get_comparable_version(MINIMUM_PYTHON) \ and get_comparable_version(versions["setuptools"]) >= get_comparable_version(MINIMUM_SETUPTOOLS) \ and get_comparable_version(versions["pip"]) >= get_comparable_version(MINIMUM_PIP) self._environment_supported = supported self._environment_versions = versions self._environment_ready.set()
def _flash_bossac(self, firmware=None, printer_port=None): assert (firmware is not None) assert (printer_port is not None) bossac_path = self._settings.get(["bossac_path"]) bossac_disableverify = self._settings.get(["bossac_disableverify"]) working_dir = os.path.dirname(bossac_path) bossac_command = [ bossac_path, "-i", "-p", printer_port, "-U", "false", "-e", "-w" ] if not bossac_disableverify: bossac_command += ["-v"] bossac_command += ["-b", firmware, "-R"] self._logger.info(u"Attempting to reset the board to SAM-BA") if not self._reset_1200(printer_port): self._logger.error(u"Reset failed") return False import sarge self._logger.info(u"Running %r in %s" % (' '.join(bossac_command), working_dir)) self._console_logger.info(" ".join(bossac_command)) try: if version.get_octoprint_version( ) > version.get_comparable_version("1.3.9"): p = sarge.run(bossac_command, cwd=working_dir, async_=True, stdout=sarge.Capture(buffer_size=1), stderr=sarge.Capture(buffer_size=1)) else: p = sarge.run(bossac_command, cwd=working_dir, async=True, stdout=sarge.Capture(buffer_size=1), stderr=sarge.Capture(buffer_size=1)) p.wait_events() while p.returncode is None: output = p.stdout.read(timeout=0.5) if not output: p.commands[0].poll() continue for line in output.split("\n"): if line.endswith("\r"): line = line[:-1] self._console_logger.info(u"> {}".format(line)) if self.BOSSAC_ERASING in line: self._logger.info(u"Erasing memory...") self._send_status("progress", subtype="erasing") elif self.BOSSAC_WRITING in line: self._logger.info(u"Writing memory...") self._send_status("progress", subtype="writing") elif self.BOSSAC_VERIFYING in line: self._logger.info(u"Verifying memory...") self._send_status("progress", subtype="verifying") elif self.AVRDUDE_TIMEOUT in line: p.close() raise FlashException( "Timeout communicating with programmer") elif self.BOSSAC_NODEVICE in line: raise FlashException("No device found") elif self.AVRDUDE_ERROR_VERIFICATION in line: raise FlashException("Error verifying flash") elif self.AVRDUDE_ERROR in line: raise FlashException( "bossac error: " + output[output.find(self.AVRDUDE_ERROR) + len(self.AVRDUDE_ERROR):].strip()) if p.returncode == 0: return True else: raise FlashException( "bossac returned code {returncode}".format( returncode=p.returncode)) except FlashException as ex: self._logger.error( u"Flashing failed. {error}.".format(error=ex.reason)) self._send_status("flasherror", message=ex.reason) return False except: self._logger.exception(u"Flashing failed. Unexpected error.") self._send_status("flasherror") return False
def m115(self, name, data): self._triggered = name and name.lower().startswith("malyan") and data.get("MODEL") == "M200" and get_comparable_version(data.get("VER", "0")) < self.FIXED_VERSION self._active = False
def _flash_avrdude(self, firmware=None, printer_port=None): assert (firmware is not None) assert (printer_port is not None) avrdude_path = self._settings.get(["avrdude_path"]) avrdude_conf = self._settings.get(["avrdude_conf"]) avrdude_avrmcu = self._settings.get(["avrdude_avrmcu"]) avrdude_programmer = self._settings.get(["avrdude_programmer"]) avrdude_baudrate = self._settings.get(["avrdude_baudrate"]) avrdude_disableverify = self._settings.get(["avrdude_disableverify"]) working_dir = os.path.dirname(avrdude_path) avrdude_command = [ avrdude_path, "-v", "-q", "-p", avrdude_avrmcu, "-c", avrdude_programmer, "-P", printer_port, "-D" ] if avrdude_conf is not None and avrdude_conf != "": avrdude_command += ["-C", avrdude_conf] if avrdude_baudrate is not None and avrdude_baudrate != "": avrdude_command += ["-b", avrdude_baudrate] if avrdude_disableverify: avrdude_command += ["-V"] avrdude_command += ["-U", "flash:w:" + firmware + ":i"] import sarge self._logger.info(u"Running %r in %s" % (' '.join(avrdude_command), working_dir)) self._console_logger.info(" ".join(avrdude_command)) try: if version.get_octoprint_version( ) > version.get_comparable_version("1.3.9"): p = sarge.run(avrdude_command, cwd=working_dir, async_=True, stdout=sarge.Capture(), stderr=sarge.Capture()) else: p = sarge.run(avrdude_command, cwd=working_dir, async=True, stdout=sarge.Capture(), stderr=sarge.Capture()) p.wait_events() while p.returncode is None: output = p.stderr.read(timeout=0.5) if not output: p.commands[0].poll() continue for line in output.split("\n"): if line.endswith("\r"): line = line[:-1] self._console_logger.info(u"> {}".format(line)) if self.AVRDUDE_WRITING in output: self._logger.info(u"Writing memory...") self._send_status("progress", subtype="writing") elif self.AVRDUDE_VERIFYING in output: self._logger.info(u"Verifying memory...") self._send_status("progress", subtype="verifying") elif self.AVRDUDE_TIMEOUT in output: p.commands[0].kill() p.close() raise FlashException( "Timeout communicating with programmer") elif self.AVRDUDE_ERROR_DEVICE in output: p.commands[0].kill() p.close() raise FlashException("Error opening serial device") elif self.AVRDUDE_ERROR_VERIFICATION in output: p.commands[0].kill() p.close() raise FlashException("Error verifying flash") elif self.AVRDUDE_ERROR_SYNC in output: p.commands[0].kill() p.close() raise FlashException( "Avrdude says: 'not in sync" + output[output.find(self.AVRDUDE_ERROR_SYNC) + len(self.AVRDUDE_ERROR_SYNC):].strip() + "'") elif self.AVRDUDE_ERROR in output: raise FlashException( "Avrdude error: " + output[output.find(self.AVRDUDE_ERROR) + len(self.AVRDUDE_ERROR):].strip()) if p.returncode == 0: return True else: raise FlashException( "Avrdude returned code {returncode}".format( returncode=p.returncode)) except FlashException as ex: self._logger.error( u"Flashing failed. {error}.".format(error=ex.reason)) self._send_status("flasherror", message=ex.reason) return False except: self._logger.exception(u"Flashing failed. Unexpected error.") self._send_status("flasherror") return False
def _restore_backup(cls, path, settings=None, plugin_manager=None, datafolder=None, on_install_plugins=None, on_report_unknown_plugins=None, on_invalid_backup=None, on_log_progress=None, on_log_error=None, on_restore_start=None, on_restore_done=None, on_restore_failed=None): if not is_os_compatible(["!windows"]): if callable(on_log_error): on_log_error(u"Restore is not supported on this operating system") if callable(on_restore_failed): on_restore_failed(path) return False restart_command = settings.global_get(["server", "commands", "serverRestartCommand"]) basedir = settings._basedir cls._clean_dir_backup(basedir, on_log_progress=on_log_progress) plugin_repo = dict() repo_url = settings.global_get(["plugins", "pluginmanager", "repository"]) if repo_url: plugin_repo = cls._get_plugin_repository_data(repo_url) if callable(on_restore_start): on_restore_start(path) try: with zipfile.ZipFile(path, "r") as zip: # read metadata try: metadata_zipinfo = zip.getinfo("metadata.json") except KeyError: if callable(on_invalid_backup): on_invalid_backup(u"Not an OctoPrint backup, lacks metadata.json") if callable(on_restore_failed): on_restore_failed(path) return False metadata_bytes = zip.read(metadata_zipinfo) metadata = json.loads(metadata_bytes) backup_version = get_comparable_version(metadata["version"], base=True) if backup_version > get_octoprint_version(base=True): if callable(on_invalid_backup): on_invalid_backup(u"Backup is from a newer version of OctoPrint and cannot be applied") if callable(on_restore_failed): on_restore_failed(path) return False # unzip to temporary folder temp = tempfile.mkdtemp() try: if callable(on_log_progress): on_log_progress(u"Unpacking backup to {}...".format(temp)) abstemp = os.path.abspath(temp) for member in zip.infolist(): abspath = os.path.abspath(os.path.join(temp, member.filename)) if abspath.startswith(abstemp): zip.extract(member, temp) # sanity check configfile = os.path.join(temp, "basedir", "config.yaml") if not os.path.exists(configfile): if callable(on_invalid_backup): on_invalid_backup(u"Backup lacks config.yaml") if callable(on_restore_failed): on_restore_failed(path) return False import yaml with codecs.open(configfile) as f: configdata = yaml.safe_load(f) if configdata.get("accessControl", dict()).get("enabled", True): userfile = os.path.join(temp, "basedir", "users.yaml") if not os.path.exists(userfile): if callable(on_invalid_backup): on_invalid_backup(u"Backup lacks users.yaml") if callable(on_restore_failed): on_restore_failed(path) return False if callable(on_log_progress): on_log_progress(u"Unpacked") # install available plugins with codecs.open(os.path.join(temp, "plugin_list.json"), "r") as f: plugins = json.load(f) known_plugins = [] unknown_plugins = [] if plugins: if plugin_repo: for plugin in plugins: if plugin["key"] in plugin_manager.plugins: # already installed continue if plugin["key"] in plugin_repo: # not installed, can be installed from repository url known_plugins.append(plugin_repo[plugin["key"]]) else: # not installed, not installable unknown_plugins.append(plugin) else: # no repo, all plugins are not installable unknown_plugins = plugins if callable(on_log_progress): if known_plugins: on_log_progress(u"Known and installable plugins: {}".format(u", ".join(map(lambda x: x["id"], known_plugins)))) if unknown_plugins: on_log_progress(u"Unknown plugins: {}".format(u", ".join(map(lambda x: x["key"], unknown_plugins)))) if callable(on_install_plugins): on_install_plugins(known_plugins) if callable(on_report_unknown_plugins): on_report_unknown_plugins(unknown_plugins) # move config data basedir_backup = basedir + ".bck" basedir_extracted = os.path.join(temp, "basedir") if callable(on_log_progress): on_log_progress(u"Renaming {} to {}...".format(basedir, basedir_backup)) os.rename(basedir, basedir_backup) try: if callable(on_log_progress): on_log_progress(u"Moving {} to {}...".format(basedir_extracted, basedir)) os.rename(basedir_extracted, basedir) except: if callable(on_log_error): on_log_error(u"Error while restoring config data", exc_info=sys.exc_info()) on_log_error(u"Rolling back old config data") os.rename(basedir_backup, basedir) if callable(on_restore_failed): on_restore_failed(path) return False if unknown_plugins: if callable(on_log_progress): on_log_progress(u"Writing info file about unknown plugins") if not os.path.isdir(datafolder): os.makedirs(datafolder) unknown_plugins_path = os.path.join(datafolder, UNKNOWN_PLUGINS_FILE) try: with codecs.open(unknown_plugins_path, mode="w", encoding="utf-8") as f: json.dump(unknown_plugins, f) except: if callable(on_log_error): on_log_error(u"Could not persist list of unknown plugins to {}".format(unknown_plugins_path), exc_info = sys.exc_info()) finally: if callable(on_log_progress): on_log_progress(u"Removing temporary unpacked folder") shutil.rmtree(temp) except: exc_info = sys.exc_info() try: if callable(on_log_error): on_log_error(u"Error while running restore", exc_info=exc_info) if callable(on_restore_failed): on_restore_failed(path) finally: del exc_info return False finally: # remove zip if callable(on_log_progress): on_log_progress(u"Removing temporary zip") os.remove(path) # restart server if restart_command: import sarge if callable(on_log_progress): on_log_progress(u"Restarting...") if callable(on_restore_done): on_restore_done(path) try: sarge.run(restart_command, async_=True) except: if callable(on_log_error): on_log_error(u"Error while restarting via command {}".format(restart_command), exc_info=sys.exc_info()) on_log_error(u"Please restart OctoPrint manually") return False else: if callable(on_restore_done): on_restore_done(path) return True
# iMe on Micro3D IME_M115_TEST = lambda name, data: name and name.lower().startswith("ime") # Malyan M200 aka Monoprice Select Mini MALYANM200_M115_TEST = lambda name, data: name and name.lower().startswith( "malyan") and data.get("MODEL") == "M200" # Stock Micro3D MICRO3D_M115_TEST = lambda name, data: name and name.lower().startswith( "micro3d") # Any Repetier versions < 0.92 REPETIER_BEFORE_092_M115_TEST = lambda name, data: name and name.lower( ).startswith("repetier") and extract_repetier_version( name) is not None and extract_repetier_version( name) < get_comparable_version("0.92") # THERMAL_PROTECTION capability reported as disabled THERMAL_PROTECTION_CAP_TEST = lambda cap, enabled: cap == "THERMAL_PROTECTION" and not enabled SAFETY_CHECKS = { "firmware-unsafe": dict( m115=(ANETA8_M115_TEST, IME_M115_TEST, MALYANM200_M115_TEST, MICRO3D_M115_TEST, REPETIER_BEFORE_092_M115_TEST), received=(ANYCUBIC_RECEIVED_TEST, CR10S_RECEIVED_TEST, ENDER3_RECEIVED_TEST), cap=(THERMAL_PROTECTION_CAP_TEST, ), message=gettext( u"Your printer's firmware is known to lack mandatory safety features (e.g. " u"thermal runaway protection). This is a fire risk."))
CR10S_RECEIVED_TEST = ("cr10s", lambda line: line and CR10S_AUTHOR in line.lower()) # Creality Ender 3 ENDER3_AUTHOR = " | Author: (Ender3)".lower() ENDER3_RECEIVED_TEST = ("ender3", lambda line: line and ENDER3_AUTHOR in line.lower()) # iMe on Micro3D IME_M115_TEST = ("ime", lambda name, data: name and name.lower().startswith("ime")) # Malyan M200 aka Monoprice Select Mini, versions less than 4.0 MALYANM200_M115_TEST = ( "malyan_m200", lambda name, data: name and name.lower().startswith( "malyan") and data.get("MODEL") == "M200" and get_comparable_version( data.get("VER", "0")) < get_comparable_version("4.0")) # Stock Micro3D MICRO3D_M115_TEST = ( "micro3d", lambda name, data: name and name.lower().startswith("micro3d")) # Any Repetier versions < 0.92 REPETIER_BEFORE_092_M115_TEST = ( "repetier_before_092", lambda name, data: name and name.lower().startswith( "repetier") and extract_repetier_version(name) is not None and extract_repetier_version(name) < get_comparable_version("0.92")) # THERMAL_PROTECTION capability reported as disabled THERMAL_PROTECTION_CAP_TEST = ( "capability", lambda cap, enabled: cap == "THERMAL_PROTECTION" and not enabled)
def _restore_backup(cls, path, settings=None, plugin_manager=None, datafolder=None, on_install_plugins=None, on_report_unknown_plugins=None, on_invalid_backup=None, on_log_progress=None, on_log_error=None, on_restore_start=None, on_restore_done=None, on_restore_failed=None): if not is_os_compatible(["!windows"]): if callable(on_log_error): on_log_error( u"Restore is not supported on this operating system") if callable(on_restore_failed): on_restore_failed(path) return False restart_command = settings.global_get( ["server", "commands", "serverRestartCommand"]) basedir = settings._basedir cls._clean_dir_backup(basedir, on_log_progress=on_log_progress) plugin_repo = dict() repo_url = settings.global_get( ["plugins", "pluginmanager", "repository"]) if repo_url: plugin_repo = cls._get_plugin_repository_data(repo_url) if callable(on_restore_start): on_restore_start(path) try: with zipfile.ZipFile(path, "r") as zip: # read metadata try: metadata_zipinfo = zip.getinfo("metadata.json") except KeyError: if callable(on_invalid_backup): on_invalid_backup( u"Not an OctoPrint backup, lacks metadata.json") if callable(on_restore_failed): on_restore_failed(path) return False metadata_bytes = zip.read(metadata_zipinfo) metadata = json.loads(metadata_bytes) backup_version = get_comparable_version(metadata["version"], base=True) if backup_version > get_octoprint_version(base=True): if callable(on_invalid_backup): on_invalid_backup( u"Backup is from a newer version of OctoPrint and cannot be applied" ) if callable(on_restore_failed): on_restore_failed(path) return False # unzip to temporary folder temp = tempfile.mkdtemp() try: if callable(on_log_progress): on_log_progress( u"Unpacking backup to {}...".format(temp)) abstemp = os.path.abspath(temp) for member in zip.infolist(): abspath = os.path.abspath( os.path.join(temp, member.filename)) if abspath.startswith(abstemp): zip.extract(member, temp) # sanity check configfile = os.path.join(temp, "basedir", "config.yaml") if not os.path.exists(configfile): if callable(on_invalid_backup): on_invalid_backup(u"Backup lacks config.yaml") if callable(on_restore_failed): on_restore_failed(path) return False import yaml with codecs.open(configfile) as f: configdata = yaml.safe_load(f) if configdata.get("accessControl", dict()).get("enabled", True): userfile = os.path.join(temp, "basedir", "users.yaml") if not os.path.exists(userfile): if callable(on_invalid_backup): on_invalid_backup(u"Backup lacks users.yaml") if callable(on_restore_failed): on_restore_failed(path) return False if callable(on_log_progress): on_log_progress(u"Unpacked") # install available plugins plugins = [] plugin_list_file = os.path.join(temp, "plugin_list.json") if os.path.exists(plugin_list_file): with codecs.open(plugin_list_file, "r") as f: plugins = json.load(f) known_plugins = [] unknown_plugins = [] if plugins: if plugin_repo: for plugin in plugins: if plugin["key"] in plugin_manager.plugins: # already installed continue if plugin["key"] in plugin_repo: # not installed, can be installed from repository url known_plugins.append( plugin_repo[plugin["key"]]) else: # not installed, not installable unknown_plugins.append(plugin) else: # no repo, all plugins are not installable unknown_plugins = plugins if callable(on_log_progress): if known_plugins: on_log_progress( u"Known and installable plugins: {}". format(u", ".join( map(lambda x: x["id"], known_plugins)))) if unknown_plugins: on_log_progress(u"Unknown plugins: {}".format( u", ".join( map(lambda x: x["key"], unknown_plugins)))) if callable(on_install_plugins): on_install_plugins(known_plugins) if callable(on_report_unknown_plugins): on_report_unknown_plugins(unknown_plugins) # move config data basedir_backup = basedir + ".bck" basedir_extracted = os.path.join(temp, "basedir") if callable(on_log_progress): on_log_progress(u"Renaming {} to {}...".format( basedir, basedir_backup)) shutil.move(basedir, basedir_backup) try: if callable(on_log_progress): on_log_progress(u"Moving {} to {}...".format( basedir_extracted, basedir)) shutil.move(basedir_extracted, basedir) except: if callable(on_log_error): on_log_error(u"Error while restoring config data", exc_info=sys.exc_info()) on_log_error(u"Rolling back old config data") shutil.move(basedir_backup, basedir) if callable(on_restore_failed): on_restore_failed(path) return False if unknown_plugins: if callable(on_log_progress): on_log_progress( u"Writing info file about unknown plugins") if not os.path.isdir(datafolder): os.makedirs(datafolder) unknown_plugins_path = os.path.join( datafolder, UNKNOWN_PLUGINS_FILE) try: with codecs.open(unknown_plugins_path, mode="w", encoding="utf-8") as f: json.dump(unknown_plugins, f) except: if callable(on_log_error): on_log_error( u"Could not persist list of unknown plugins to {}" .format(unknown_plugins_path), exc_info=sys.exc_info()) finally: if callable(on_log_progress): on_log_progress(u"Removing temporary unpacked folder") shutil.rmtree(temp) except: exc_info = sys.exc_info() try: if callable(on_log_error): on_log_error(u"Error while running restore", exc_info=exc_info) if callable(on_restore_failed): on_restore_failed(path) finally: del exc_info return False finally: # remove zip if callable(on_log_progress): on_log_progress(u"Removing temporary zip") os.remove(path) # restart server if restart_command: import sarge if callable(on_log_progress): on_log_progress(u"Restarting...") if callable(on_restore_done): on_restore_done(path) try: sarge.run(restart_command, async_=True) except: if callable(on_log_error): on_log_error( u"Error while restarting via command {}".format( restart_command), exc_info=sys.exc_info()) on_log_error(u"Please restart OctoPrint manually") return False else: if callable(on_restore_done): on_restore_done(path) return True
# Creality Ender 3 ENDER3_AUTHOR = " | Author: (Ender3)".lower() ENDER3_RECEIVED_TEST = lambda line: line and ENDER3_AUTHOR in line.lower() # iMe on Micro3D IME_M115_TEST = lambda name, data: name and name.lower().startswith("ime") # Malyan M200 aka Monoprice Select Mini MALYANM200_M115_TEST = lambda name, data: name and name.lower().startswith("malyan") and data.get("MODEL") == "M200" # Stock Micro3D MICRO3D_M115_TEST = lambda name, data: name and name.lower().startswith("micro3d") # Any Repetier versions < 0.92 REPETIER_BEFORE_092_M115_TEST = lambda name, data: name and name.lower().startswith("repetier") and extract_repetier_version(name) is not None and extract_repetier_version(name) < get_comparable_version("0.92") # THERMAL_PROTECTION capability reported as disabled THERMAL_PROTECTION_CAP_TEST = lambda cap, enabled: cap == "THERMAL_PROTECTION" and not enabled SAFETY_CHECKS = { "firmware-unsafe": dict(m115=(ANETA8_M115_TEST, IME_M115_TEST, MALYANM200_M115_TEST, MICRO3D_M115_TEST, REPETIER_BEFORE_092_M115_TEST), received=(ANYCUBIC_RECEIVED_TEST, CR10S_RECEIVED_TEST, ENDER3_RECEIVED_TEST), cap=(THERMAL_PROTECTION_CAP_TEST,), message=gettext(u"Your printer's firmware is known to lack mandatory safety features (e.g. " u"thermal runaway protection). This is a fire risk.")) } def extract_repetier_version(name): """
def test_get_comparable_version_base(self): from octoprint.util.version import get_comparable_version actual = get_comparable_version("1.6.0.dev303+g328853170.dirty", base=True) self.assertEqual(actual, pkg_resources.parse_version("1.6.0"))
def m115(self, name, data): self._triggered = name and name.lower().startswith("malyan") and data.get("MODEL") == "M200" and get_comparable_version(data.get("VER", "0")) < self.FIXED_VERSION self._active = False