def setup_mock_popen(expected_stdout, expected_stderr): mock_popen = MockPopen() mock_popen.communicate = mock.Mock( return_value=( to_bytes(expected_stdout, encoding="utf-8"), to_bytes(expected_stderr, encoding="utf-8"), ) ) setattr(subprocess, "Popen", lambda *args, **kwargs: mock_popen) return mock_popen
def createPasswordHash(password, salt=None): if not salt: salt = settings().get(["accessControl", "salt"]) if salt is None: import string from random import choice chars = string.ascii_lowercase + string.ascii_uppercase + string.digits salt = "".join(choice(chars) for _ in range(32)) settings().set(["accessControl", "salt"], salt) settings().save() return hashlib.sha512(to_bytes(password, encoding="utf-8", errors="replace") + to_bytes(salt)).hexdigest()
def create_password_hash(password, salt=None): if not salt: salt = settings().get(["accessControl", "salt"]) if salt is None: import string from random import choice chars = string.ascii_lowercase + string.ascii_uppercase + string.digits salt = "".join(choice(chars) for _ in range(32)) settings().set(["accessControl", "salt"], salt) settings().save() return hashlib.sha512(to_bytes(password, encoding="utf-8", errors="replace") + to_bytes(salt)).hexdigest()
def get_systeminfo_bundle(systeminfo, logbase, printer=None): from octoprint.util import to_bytes systeminfotxt = [] for k in sorted(systeminfo.keys()): systeminfotxt.append("{}: {}".format(k, systeminfo[k])) terminaltxt = None if printer and printer.is_operational(): firmware_info = printer.firmware_info if firmware_info: systeminfo["printer.firmware"] = firmware_info["name"] if hasattr(printer, "_log"): terminaltxt = list(printer._log) try: import zlib # noqa: F401 compress_type = zipstream.ZIP_DEFLATED except ImportError: # no zlib, no compression compress_type = zipstream.ZIP_STORED z = zipstream.ZipFile() # add systeminfo z.writestr("systeminfo.txt", to_bytes("\n".join(systeminfotxt)), compress_type=compress_type) # add terminal.txt, if available if terminaltxt: z.writestr("terminal.txt", to_bytes("\n".join(terminaltxt)), compress_type=compress_type) # add logs for log in ( "octoprint.log", "serial.log", "plugin_softwareupdate_console.log", "plugin_pluginmanager_console.log", ): logpath = os.path.join(logbase, log) if os.path.exists(logpath): z.write(logpath, arcname=log, compress_type=compress_type) return z
def write(self, data): data = to_bytes(data, errors="replace") u_data = to_unicode(data, errors="replace") if self._debug_awol: return len(data) if self._debug_drop_connection: self._logger.info("Debug drop of connection requested, raising SerialTimeoutException") raise SerialTimeoutException() with self._incoming_lock: if self.incoming is None or self.outgoing is None: return 0 if "M112" in data and self._supportM112: self._seriallog.info(u"<<< {}".format(u_data)) self._kill() return len(data) try: written = self.incoming.put(data, timeout=self._write_timeout, partial=True) self._seriallog.info(u"<<< {}".format(u_data)) return written except queue.Full: self._logger.info("Incoming queue is full, raising SerialTimeoutException") raise SerialTimeoutException()
def clean_ansi(line: Union[str, bytes]) -> Union[str, bytes]: """ Removes ANSI control codes from ``line``. Note: This function also still supports an input of ``bytes``, leading to an ``output`` of ``bytes``. This if for reasons of backwards compatibility only, should no longer be used and considered to be deprecated and to be removed in a future version of OctoPrint. A warning will be logged. Parameters: line (str or bytes): the line to process Returns: (str or bytes) The line without any ANSI control codes .. changed:: 1.8.0 Usage as ``clean_ansi(line: bytes) -> bytes`` is now deprecated and will be removed in a future version of OctoPrint. """ # TODO: bytes support is deprecated, remove in 2.0.0 if isinstance(line, bytes): warnings.warn( "Calling clean_ansi with bytes is deprecated, call with str instead", DeprecationWarning, ) return to_bytes(_ANSI_REGEX.sub("", to_unicode(line))) return _ANSI_REGEX.sub("", line)
def readline(self): if self._debug_awol: time.sleep(self._read_timeout) return "" if self._debug_drop_connection: raise SerialTimeoutException() if self._debug_sleep > 0: # if we are supposed to sleep, we sleep not longer than the read timeout # (and then on the next call sleep again if there's time to sleep left) sleep_for = min(self._debug_sleep, self._read_timeout) self._debug_sleep -= sleep_for time.sleep(sleep_for) if self._debug_sleep > 0: # we slept the full read timeout, return an empty line return "" # otherwise our left over timeout is the read timeout minus what we already # slept for timeout = self._read_timeout - sleep_for else: # use the full read timeout as timeout timeout = self._read_timeout try: # fetch a line from the queue, wait no longer than timeout line = to_unicode(self.outgoing.get(timeout=timeout), errors="replace") self._seriallog.info(u">>> {}".format(line.strip())) self.outgoing.task_done() return to_bytes(line, errors="replace") except queue.Empty: # queue empty? return empty line return ""
def get_systeminfo_bundle(systeminfo, logbase, printer=None, plugin_manager=None): from octoprint.util import to_bytes try: z = ZipStream(compress_type=ZIP_DEFLATED) except RuntimeError: # no zlib support z = ZipStream(sized=True) if printer and printer.is_operational(): firmware_info = printer.firmware_info if firmware_info: # add firmware to systeminfo so it's included in systeminfo.txt systeminfo["printer.firmware"] = firmware_info["name"] # Add printer log, if available if hasattr(printer, "_log"): z.add(to_bytes("\n".join(printer._log)), arcname="terminal.txt") # add systeminfo systeminfotxt = [] for k in sorted(systeminfo.keys()): systeminfotxt.append(f"{k}: {systeminfo[k]}") z.add(to_bytes("\n".join(systeminfotxt)), arcname="systeminfo.txt") # add logs for log in ( "octoprint.log", "serial.log", ): logpath = os.path.join(logbase, log) if os.path.exists(logpath): z.add_path(logpath, arcname=log) # add additional bundle contents from bundled plugins if plugin_manager: for name, hook in plugin_manager.get_hooks( "octoprint.systeminfo.additional_bundle_files").items(): try: plugin = plugin_manager.get_plugin_info(name) if not plugin.bundled: # we only support this for bundled plugins because we don't want # third party logs to blow up the bundles continue logs = hook() for log, content in logs.items(): if isinstance(content, str): # log path if os.path.exists(content) and os.access( content, os.R_OK): z.add_path(content, arcname=log) elif callable(content): # content generating callable z.add(to_bytes(content()), arcname=log) except Exception: logging.getLogger(__name__).exception( "Error while retrieving additional bundle contents for plugin {}" .format(name), extra={"plugin": name}, ) return z
def _restore_backup(cls, path, settings=None, plugin_manager=None, datafolder=None, on_install_plugins=None, on_report_unknown_plugins=None, on_invalid_backup=None, on_log_progress=None, on_log_error=None, on_restore_start=None, on_restore_done=None, on_restore_failed=None): if not is_os_compatible(["!windows"]): if callable(on_log_error): on_log_error("Restore is not supported on this operating system") if callable(on_restore_failed): on_restore_failed(path) return False restart_command = settings.global_get(["server", "commands", "serverRestartCommand"]) basedir = settings._basedir cls._clean_dir_backup(basedir, on_log_progress=on_log_progress) plugin_repo = dict() repo_url = settings.global_get(["plugins", "pluginmanager", "repository"]) if repo_url: plugin_repo = cls._get_plugin_repository_data(repo_url) if callable(on_restore_start): on_restore_start(path) try: with zipfile.ZipFile(path, "r") as zip: # read metadata try: metadata_zipinfo = zip.getinfo("metadata.json") except KeyError: if callable(on_invalid_backup): on_invalid_backup("Not an OctoPrint backup, lacks metadata.json") if callable(on_restore_failed): on_restore_failed(path) return False metadata_bytes = zip.read(metadata_zipinfo) metadata = json.loads(metadata_bytes) backup_version = get_comparable_version(metadata["version"], base=True) if backup_version > get_octoprint_version(base=True): if callable(on_invalid_backup): on_invalid_backup("Backup is from a newer version of OctoPrint and cannot be applied") if callable(on_restore_failed): on_restore_failed(path) return False # unzip to temporary folder temp = tempfile.mkdtemp() try: if callable(on_log_progress): on_log_progress("Unpacking backup to {}...".format(temp)) abstemp = os.path.abspath(temp) for member in zip.infolist(): abspath = os.path.abspath(os.path.join(temp, member.filename)) if abspath.startswith(abstemp): zip.extract(member, temp) # sanity check configfile = os.path.join(temp, "basedir", "config.yaml") if not os.path.exists(configfile): if callable(on_invalid_backup): on_invalid_backup("Backup lacks config.yaml") if callable(on_restore_failed): on_restore_failed(path) return False import yaml with io.open(configfile, "rt", encoding="utf-8") as f: configdata = yaml.safe_load(f) if configdata.get("accessControl", dict()).get("enabled", True): userfile = os.path.join(temp, "basedir", "users.yaml") if not os.path.exists(userfile): if callable(on_invalid_backup): on_invalid_backup("Backup lacks users.yaml") if callable(on_restore_failed): on_restore_failed(path) return False if callable(on_log_progress): on_log_progress("Unpacked") # install available plugins plugins = [] plugin_list_file = os.path.join(temp, "plugin_list.json") if os.path.exists(plugin_list_file): with io.open(os.path.join(temp, "plugin_list.json"), 'rb') as f: plugins = json.load(f) known_plugins = [] unknown_plugins = [] if plugins: if plugin_repo: for plugin in plugins: if plugin["key"] in plugin_manager.plugins: # already installed continue if plugin["key"] in plugin_repo: # not installed, can be installed from repository url known_plugins.append(plugin_repo[plugin["key"]]) else: # not installed, not installable unknown_plugins.append(plugin) else: # no repo, all plugins are not installable unknown_plugins = plugins if callable(on_log_progress): if known_plugins: on_log_progress("Known and installable plugins: {}".format(", ".join(map(lambda x: x["id"], known_plugins)))) if unknown_plugins: on_log_progress("Unknown plugins: {}".format(", ".join(map(lambda x: x["key"], unknown_plugins)))) if callable(on_install_plugins): on_install_plugins(known_plugins) if callable(on_report_unknown_plugins): on_report_unknown_plugins(unknown_plugins) # move config data basedir_backup = basedir + ".bck" basedir_extracted = os.path.join(temp, "basedir") if callable(on_log_progress): on_log_progress("Renaming {} to {}...".format(basedir, basedir_backup)) shutil.move(basedir, basedir_backup) try: if callable(on_log_progress): on_log_progress("Moving {} to {}...".format(basedir_extracted, basedir)) shutil.move(basedir_extracted, basedir) except Exception: if callable(on_log_error): on_log_error("Error while restoring config data", exc_info=sys.exc_info()) on_log_error("Rolling back old config data") shutil.move(basedir_backup, basedir) if callable(on_restore_failed): on_restore_failed(path) return False if unknown_plugins: if callable(on_log_progress): on_log_progress("Writing info file about unknown plugins") if not os.path.isdir(datafolder): os.makedirs(datafolder) unknown_plugins_path = os.path.join(datafolder, UNKNOWN_PLUGINS_FILE) try: with io.open(unknown_plugins_path, mode='wb') as f: f.write(to_bytes(json.dumps(unknown_plugins))) except Exception: if callable(on_log_error): on_log_error("Could not persist list of unknown plugins to {}".format(unknown_plugins_path), exc_info = sys.exc_info()) finally: if callable(on_log_progress): on_log_progress("Removing temporary unpacked folder") shutil.rmtree(temp) except Exception: exc_info = sys.exc_info() try: if callable(on_log_error): on_log_error("Error while running restore", exc_info=exc_info) if callable(on_restore_failed): on_restore_failed(path) finally: del exc_info return False finally: # remove zip if callable(on_log_progress): on_log_progress("Removing temporary zip") os.remove(path) # restart server if restart_command: import sarge if callable(on_log_progress): on_log_progress("Restarting...") if callable(on_restore_done): on_restore_done(path) try: sarge.run(restart_command, async_=True) except Exception: if callable(on_log_error): on_log_error("Error while restarting via command {}".format(restart_command), exc_info=sys.exc_info()) on_log_error("Please restart OctoPrint manually") return False else: if callable(on_restore_done): on_restore_done(path) return True
def index(): global _templates, _plugin_names, _plugin_vars preemptive_cache_enabled = settings().getBoolean(["devel", "cache", "preemptive"]) locale = g.locale.language if g.locale else "en" # helper to check if wizards are active def wizard_active(templates): return templates is not None and bool(templates["wizard"]["order"]) # we force a refresh if the client forces one or if we have wizards cached force_refresh = util.flask.cache_check_headers() or "_refresh" in request.values or wizard_active(_templates.get(locale)) # if we need to refresh our template cache or it's not yet set, process it fetch_template_data(refresh=force_refresh) now = datetime.datetime.utcnow() enable_accesscontrol = userManager.enabled enable_gcodeviewer = settings().getBoolean(["gcodeViewer", "enabled"]) enable_timelapse = settings().getBoolean(["webcam", "timelapseEnabled"]) def default_template_filter(template_type, template_key): if template_type == "navbar": return template_key != "login" or enable_accesscontrol elif template_type == "tab": return (template_key != "gcodeviewer" or enable_gcodeviewer) and \ (template_key != "timelapse" or enable_timelapse) elif template_type == "settings": return template_key != "accesscontrol" or enable_accesscontrol elif template_type == "usersettings": return enable_accesscontrol else: return True default_additional_etag = [enable_accesscontrol, enable_gcodeviewer, enable_timelapse] + sorted(["{}:{}".format(to_bytes(k, errors="replace"), to_bytes(v, errors="replace")) for k, v in _plugin_vars.items()]) def get_preemptively_cached_view(key, view, data=None, additional_request_data=None, additional_unless=None): if (data is None and additional_request_data is None) or g.locale is None: return view d = _preemptive_data(key, data=data, additional_request_data=additional_request_data) def unless(): return _preemptive_unless(base_url=request.url_root, additional_unless=additional_unless) # finally decorate our view return util.flask.preemptively_cached(cache=preemptiveCache, data=d, unless=unless)(view) def get_cached_view(key, view, additional_key_data=None, additional_files=None, additional_etag=None, custom_files=None, custom_etag=None, custom_lastmodified=None): if additional_etag is None: additional_etag = [] def cache_key(): return _cache_key(key, additional_key_data=additional_key_data) def check_etag_and_lastmodified(): files = collect_files() lastmodified = compute_lastmodified(files) lastmodified_ok = util.flask.check_lastmodified(lastmodified) etag_ok = util.flask.check_etag(compute_etag(files=files, lastmodified=lastmodified, additional=[cache_key()] + additional_etag)) return lastmodified_ok and etag_ok def validate_cache(cached): etag_different = compute_etag(additional=[cache_key()] + additional_etag) != cached.get_etag()[0] return force_refresh or etag_different def collect_files(): if callable(custom_files): try: files = custom_files() if files: return files except: _logger.exception("Error while trying to retrieve tracked files for plugin {}".format(key)) templates = _get_all_templates() assets = _get_all_assets() translations = _get_all_translationfiles(g.locale.language if g.locale else "en", "messages") files = templates + assets + translations if callable(additional_files): try: af = additional_files() if af: files += af except: _logger.exception("Error while trying to retrieve additional tracked files for plugin {}".format(key)) return sorted(set(files)) def compute_lastmodified(files=None): if callable(custom_lastmodified): try: lastmodified = custom_lastmodified() if lastmodified: return lastmodified except: _logger.exception("Error while trying to retrieve custom LastModified value for plugin {}".format(key)) if files is None: files = collect_files() return _compute_date(files) def compute_etag(files=None, lastmodified=None, additional=None): if callable(custom_etag): try: etag = custom_etag() if etag: return etag except: _logger.exception("Error while trying to retrieve custom ETag value for plugin {}".format(key)) if files is None: files = collect_files() if lastmodified is None: lastmodified = compute_lastmodified(files) if lastmodified and not isinstance(lastmodified, basestring): from werkzeug.http import http_date lastmodified = http_date(lastmodified) if additional is None: additional = [] import hashlib hash = hashlib.sha1() hash.update(octoprint.__version__) hash.update(",".join(sorted(files))) if lastmodified: hash.update(lastmodified) for add in additional: hash.update(str(add)) return hash.hexdigest() decorated_view = view decorated_view = util.flask.lastmodified(lambda _: compute_lastmodified())(decorated_view) decorated_view = util.flask.etagged(lambda _: compute_etag(additional=[cache_key()] + additional_etag))(decorated_view) decorated_view = util.flask.cached(timeout=-1, refreshif=validate_cache, key=cache_key, unless_response=lambda response: util.flask.cache_check_response_headers(response) or util.flask.cache_check_status_code(response, _valid_status_for_cache))(decorated_view) decorated_view = util.flask.with_client_revalidation(decorated_view) decorated_view = util.flask.conditional(check_etag_and_lastmodified, NOT_MODIFIED)(decorated_view) return decorated_view def plugin_view(p): cached = get_cached_view(p._identifier, p.on_ui_render, additional_key_data=p.get_ui_additional_key_data_for_cache, additional_files=p.get_ui_additional_tracked_files, custom_files=p.get_ui_custom_tracked_files, custom_etag=p.get_ui_custom_etag, custom_lastmodified=p.get_ui_custom_lastmodified, additional_etag=p.get_ui_additional_etag(default_additional_etag)) if preemptive_cache_enabled and p.get_ui_preemptive_caching_enabled(): view = get_preemptively_cached_view(p._identifier, cached, p.get_ui_data_for_preemptive_caching, p.get_ui_additional_request_data_for_preemptive_caching, p.get_ui_preemptive_caching_additional_unless) else: view = cached template_filter = p.get_ui_custom_template_filter(default_template_filter) if template_filter is not None and callable(template_filter): filtered_templates = _filter_templates(_templates[locale], template_filter) else: filtered_templates = _templates[locale] render_kwargs = _get_render_kwargs(filtered_templates, _plugin_names, _plugin_vars, now) return view(now, request, render_kwargs) def default_view(): filtered_templates = _filter_templates(_templates[locale], default_template_filter) wizard = wizard_active(filtered_templates) accesscontrol_active = enable_accesscontrol and userManager.hasBeenCustomized() render_kwargs = _get_render_kwargs(filtered_templates, _plugin_names, _plugin_vars, now) render_kwargs.update(dict( enableWebcam=settings().getBoolean(["webcam", "webcamEnabled"]) and bool(settings().get(["webcam", "stream"])), enableTemperatureGraph=settings().get(["feature", "temperatureGraph"]), enableAccessControl=enable_accesscontrol, accessControlActive=accesscontrol_active, enableSdSupport=settings().get(["feature", "sdSupport"]), gcodeMobileThreshold=settings().get(["gcodeViewer", "mobileSizeThreshold"]), gcodeThreshold=settings().get(["gcodeViewer", "sizeThreshold"]), wizard=wizard, now=now, )) # no plugin took an interest, we'll use the default UI def make_default_ui(): r = make_response(render_template("index.jinja2", **render_kwargs)) if wizard: # if we have active wizard dialogs, set non caching headers r = util.flask.add_non_caching_response_headers(r) return r cached = get_cached_view("_default", make_default_ui, additional_etag=default_additional_etag) preemptively_cached = get_preemptively_cached_view("_default", cached, dict(), dict()) return preemptively_cached() response = None forced_view = request.headers.get("X-Force-View", None) if forced_view: # we have view forced by the preemptive cache _logger.debug("Forcing rendering of view {}".format(forced_view)) if forced_view != "_default": plugin = pluginManager.get_plugin_info(forced_view, require_enabled=True) if plugin is not None and isinstance(plugin.implementation, octoprint.plugin.UiPlugin): response = plugin_view(plugin.implementation) else: response = default_view() else: # select view from plugins and fall back on default view if no plugin will handle it ui_plugins = pluginManager.get_implementations(octoprint.plugin.UiPlugin, sorting_context="UiPlugin.on_ui_render") for plugin in ui_plugins: try: if plugin.will_handle_ui(request): # plugin claims responsibility, let it render the UI response = plugin_view(plugin) if response is not None: break else: _logger.warn("UiPlugin {} returned an empty response".format(plugin._identifier)) except Exception: _logger.exception("Error while calling plugin {}, skipping it".format(plugin._identifier), extra=dict(plugin=plugin._identifier)) else: response = default_view() if response is None: return abort(404) return response
def get_systeminfo_bundle(systeminfo, logbase, printer=None, plugin_manager=None): from octoprint.util import to_bytes systeminfotxt = [] for k in sorted(systeminfo.keys()): systeminfotxt.append("{}: {}".format(k, systeminfo[k])) terminaltxt = None if printer and printer.is_operational(): firmware_info = printer.firmware_info if firmware_info: systeminfo["printer.firmware"] = firmware_info["name"] if hasattr(printer, "_log"): terminaltxt = list(printer._log) try: import zlib # noqa: F401 compress_type = zipstream.ZIP_DEFLATED except ImportError: # no zlib, no compression compress_type = zipstream.ZIP_STORED z = zipstream.ZipFile() # add systeminfo z.writestr("systeminfo.txt", to_bytes("\n".join(systeminfotxt)), compress_type=compress_type) # add terminal.txt, if available if terminaltxt: z.writestr("terminal.txt", to_bytes("\n".join(terminaltxt)), compress_type=compress_type) # add logs for log in ( "octoprint.log", "serial.log", ): logpath = os.path.join(logbase, log) if os.path.exists(logpath): z.write(logpath, arcname=log, compress_type=compress_type) # add additional bundle contents from bundled plugins if plugin_manager: for name, hook in plugin_manager.get_hooks( "octoprint.systeminfo.additional_bundle_files").items(): try: plugin = plugin_manager.get_plugin_info(name) if not plugin.bundled: # we only support this for bundled plugins because we don't want # third party logs to blow up the bundles continue logs = hook() for log, content in logs.items(): if isinstance(content, str): # log path if os.path.exists(content) and os.access( content, os.R_OK): z.write(content, arcname=log, compress_type=compress_type) elif callable(content): # content generating callable z.writestr(log, to_bytes(content()), compress_type=compress_type) except Exception: logging.getLogger(__name__).exception( "Error while retrieving additional bundle contents for plugin {}" .format(name), extra={"plugin": name}, ) return z