def _list_folder(self, path, filter=None, recursive=True): metadata = self._get_metadata(path) if not metadata: metadata = dict() metadata_dirty = False result = dict() for entry in os.listdir(path): if is_hidden_path(entry): # no hidden files and folders continue entry_path = os.path.join(path, entry) # file handling if os.path.isfile(entry_path): file_type = octoprint.filemanager.get_file_type(entry) if not file_type: # only supported extensions continue else: file_type = file_type[0] if entry in metadata and isinstance(metadata[entry], dict): entry_data = metadata[entry] else: entry_data = self._add_basic_metadata(path, entry, save=False, metadata=metadata) metadata_dirty = True # TODO extract model hash from source if possible to recreate link if not filter or filter(entry, entry_data): # only add files passing the optional filter extended_entry_data = dict() extended_entry_data.update(entry_data) extended_entry_data["name"] = entry extended_entry_data["type"] = file_type stat = os.stat(entry_path) if stat: extended_entry_data["size"] = stat.st_size extended_entry_data["date"] = int(stat.st_mtime) result[entry] = extended_entry_data # folder recursion elif os.path.isdir(entry_path) and recursive: sub_result = self._list_folder(entry_path, filter=filter) result[entry] = dict( name=entry, type="folder", children=sub_result ) # TODO recreate links if we have metadata less entries # save metadata if metadata_dirty: self._save_metadata(path, metadata) return result
def delete_backup(self, filename): backup_folder = self.get_plugin_data_folder() full_path = os.path.realpath(os.path.join(backup_folder, filename)) if full_path.startswith(backup_folder) \ and os.path.exists(full_path) \ and not is_hidden_path(full_path): try: os.remove(full_path) except: self._logger.exception(u"Could not delete {}".format(filename)) raise return NO_CONTENT
def _load_all_identifiers(self): results = dict() for entry in scandir(self._folder): if is_hidden_path(entry.name) or not entry.name.endswith(".profile"): continue if not entry.is_file(): continue identifier = entry.name[:-len(".profile")] results[identifier] = entry.path return results
def get_finished_timelapses(): files = [] basedir = settings().getBaseFolder("timelapse", check_writable=False) for entry in scandir(basedir): if util.is_hidden_path(entry.path) or not valid_timelapse(entry.path): continue files.append({ "name": entry.name, "size": util.get_formatted_size(entry.stat().st_size), "bytes": entry.stat().st_size, "date": util.get_formatted_datetime(datetime.datetime.fromtimestamp(entry.stat().st_mtime)) }) return files
def _load_all_identifiers(self): results = dict(_default=None) for entry in os.listdir(self._folder): if is_hidden_path(entry) or not entry.endswith(".profile") or entry == "_default.profile": continue path = os.path.join(self._folder, entry) if not os.path.isfile(path): continue identifier = entry[:-len(".profile")] results[identifier] = path return results
def deleteTimelapse(filename): timelapse_folder = settings().getBaseFolder("timelapse") full_path = os.path.realpath(os.path.join(timelapse_folder, filename)) if octoprint.timelapse.valid_timelapse(full_path) \ and full_path.startswith(timelapse_folder) \ and os.path.exists(full_path) \ and not util.is_hidden_path(full_path): try: os.remove(full_path) except Exception as ex: logging.getLogger(__file__).exception("Error deleting timelapse file {}".format(full_path)) return make_response("Unexpected error: {}".format(ex), 500) return getTimelapseData()
def _load_all_identifiers(self): results = dict(_default=None) for entry in os.listdir(self._folder): if is_hidden_path(entry) or not entry.endswith( ".profile") or entry == "_default.profile": continue path = os.path.join(self._folder, entry) if not os.path.isfile(path): continue identifier = entry[:-len(".profile")] results[identifier] = path return results
def _get_backups(self): backups = [] for entry in scandir(self.get_plugin_data_folder()): if is_hidden_path(entry.path): continue if not entry.is_file(): continue if not entry.name.endswith(".zip"): continue backups.append(dict(name=entry.name, date=entry.stat().st_mtime, size=entry.stat().st_size, url=flask.url_for("index") + "plugin/backup/download/" + entry.name)) return backups
def route_hook(self, *args, **kwargs): from octoprint.server.util.tornado import LargeResponseHandler, path_validation_factory from octoprint.util import is_hidden_path from octoprint.server import app from octoprint.server.util.tornado import access_validation_factory from octoprint.server.util.flask import admin_validator return [(r"/download/(.*)", LargeResponseHandler, dict(path=self.get_plugin_data_folder(), as_attachment=True, path_validation=path_validation_factory( lambda path: not is_hidden_path(path), status_code=404), access_validation=access_validation_factory( app, admin_validator)))]
def deleteTimelapse(filename): timelapse_folder = settings().getBaseFolder("timelapse") full_path = os.path.realpath(os.path.join(timelapse_folder, filename)) if (octoprint.timelapse.valid_timelapse(full_path) and full_path.startswith(timelapse_folder) and os.path.exists(full_path) and not util.is_hidden_path(full_path)): try: os.remove(full_path) except Exception as ex: logging.getLogger(__file__).exception( "Error deleting timelapse file {}".format(full_path)) abort(500, description="Unexpected error: {}".format(ex)) return getTimelapseData()
def _get_backups(self): backups = [] for entry in scandir(self.get_plugin_data_folder()): if is_hidden_path(entry.path): continue if not entry.is_file(): continue if not entry.name.endswith(".zip"): continue backups.append(dict(name=entry.name, date=entry.stat().st_mtime, size=entry.stat().st_size, url=flask.url_for("index") + "plugin/backup/download/" + entry.name)) return backups
def get_finished_timelapses(): files = [] basedir = settings().getBaseFolder("timelapse", check_writable=False) for entry in scandir(basedir): if util.is_hidden_path(entry.path) or not valid_timelapse(entry.path): continue files.append( { "name": entry.name, "size": util.get_formatted_size(entry.stat().st_size), "bytes": entry.stat().st_size, "date": util.get_formatted_datetime( datetime.datetime.fromtimestamp(entry.stat().st_mtime) ), } ) return files
def _analysis_backlog_generator(self, path=None): if path is None: path = self.basefolder metadata = self._get_metadata(path) if not metadata: metadata = dict() for entry in scandir(path): if is_hidden_path(entry.name) or not octoprint.filemanager.valid_file_type(entry.name): continue if entry.is_file(): if not entry.name in metadata or not isinstance(metadata[entry.name], dict) or not "analysis" in metadata[entry.name]: printer_profile_rels = self.get_link(entry.path, "printerprofile") if printer_profile_rels: printer_profile_id = printer_profile_rels[0]["id"] else: printer_profile_id = None yield entry.name, entry.path, printer_profile_id elif os.path.isdir(entry.path): for sub_entry in self._analysis_backlog_generator(entry.path): yield self.join_path(entry.name, sub_entry[0]), sub_entry[1], sub_entry[2]
def _analysis_backlog_generator(self, path=None): if path is None: path = self.basefolder metadata = self._get_metadata(path) if not metadata: metadata = dict() for entry in scandir(path): if is_hidden_path(entry.name): continue if entry.is_file() and octoprint.filemanager.valid_file_type(entry.name): if not entry.name in metadata or not isinstance(metadata[entry.name], dict) or not "analysis" in metadata[entry.name]: printer_profile_rels = self.get_link(entry.path, "printerprofile") if printer_profile_rels: printer_profile_id = printer_profile_rels[0]["id"] else: printer_profile_id = None yield entry.name, entry.path, printer_profile_id elif os.path.isdir(entry.path): for sub_entry in self._analysis_backlog_generator(entry.path): yield self.join_path(entry.name, sub_entry[0]), sub_entry[1], sub_entry[2]
def perform_restore(self): if not is_os_compatible(["!windows"]): return flask.make_response(u"Invalid request, the restores are not supported on the underlying operating system", 400) input_name = "file" input_upload_path = input_name + "." + self._settings.global_get(["server", "uploads", "pathSuffix"]) if input_upload_path in flask.request.values: # file to restore was uploaded path = flask.request.values[input_upload_path] elif flask.request.json and "path" in flask.request.json: # existing backup is supposed to be restored backup_folder = self.get_plugin_data_folder() path = os.path.realpath(os.path.join(backup_folder, flask.request.json["path"])) if not path.startswith(backup_folder) \ or not os.path.exists(path) \ or is_hidden_path(path): return flask.abort(404) else: return flask.make_response(u"Invalid request, neither a file nor a path of a file to restore provided", 400) def on_install_plugins(plugins): force_user = self._settings.global_get_boolean(["plugins", "pluginmanager", "pip_force_user"]) pip_args = self._settings.global_get(["plugins", "pluginmanager", "pip_args"]) def on_log(line): self._logger.info(line) self._send_client_message("logline", dict(line=line, type="stdout")) for plugin in plugins: octoprint_compatible = is_octoprint_compatible(*plugin["compatibility"]["octoprint"]) os_compatible = is_os_compatible(plugin["compatibility"]["os"]) compatible = octoprint_compatible and os_compatible if not compatible: if not octoprint_compatible and not os_compatible: self._logger.warn(u"Cannot install plugin {}, it is incompatible to this version " u"of OctoPrint and the underlying operating system".format(plugin["id"])) elif not octoprint_compatible: self._logger.warn(u"Cannot install plugin {}, it is incompatible to this version " u"of OctoPrint".format(plugin["id"])) elif not os_compatible: self._logger.warn(u"Cannot install plugin {}, it is incompatible to the underlying " u"operating system".format(plugin["id"])) self._send_client_message("plugin_incompatible", dict(plugin=plugin["id"], octoprint_compatible=octoprint_compatible, os_compatible=os_compatible)) continue self._logger.info(u"Installing plugin {}".format(plugin["id"])) self._send_client_message("installing_plugin", dict(plugin=plugin["id"])) self.__class__._install_plugin(plugin, force_user=force_user, pip_args=pip_args, on_log=on_log) def on_report_unknown_plugins(plugins): self._send_client_message("unknown_plugins", payload=dict(plugins=plugins)) def on_log_progress(line): self._logger.info(line) self._send_client_message("logline", payload=dict(line=line, stream="stdout")) def on_log_error(line, exc_info=None): self._logger.error(line, exc_info=exc_info) self._send_client_message("logline", payload=dict(line=line, stream="stderr")) if exc_info is not None: exc_type, exc_value, exc_tb = exc_info output = traceback.format_exception(exc_type, exc_value, exc_tb) for line in output: self._send_client_message("logline", payload=dict(line=line.rstrip(), stream="stderr")) def on_restore_start(path): self._send_client_message("restore_started") def on_restore_done(path): self._send_client_message("restore_done") def on_restore_failed(path): self._send_client_message("restore_failed") def on_invalid_backup(line): on_log_error(line) archive = tempfile.NamedTemporaryFile(delete=False) archive.close() shutil.copy(path, archive.name) path = archive.name # noinspection PyTypeChecker thread = threading.Thread(target=self._restore_backup, args=(path,), kwargs=dict(settings=self._settings, plugin_manager=self._plugin_manager, datafolder=self.get_plugin_data_folder(), on_install_plugins=on_install_plugins, on_report_unknown_plugins=on_report_unknown_plugins, on_invalid_backup=on_invalid_backup, on_log_progress=on_log_progress, on_log_error=on_log_error, on_restore_start=on_restore_start, on_restore_done=on_restore_done, on_restore_failed=on_restore_failed)) thread.daemon = True thread.start() return flask.jsonify(started=True)
def _init_script_templating(self): from jinja2 import Environment, BaseLoader, ChoiceLoader, TemplateNotFound from jinja2.nodes import Include from jinja2.ext import Extension from octoprint.util.jinja import FilteredFileSystemLoader class SnippetExtension(Extension): tags = {"snippet"} fields = Include.fields def parse(self, parser): node = parser.parse_include() if not node.template.value.startswith("/"): node.template.value = "snippets/" + node.template.value return node class SettingsScriptLoader(BaseLoader): def __init__(self, s): self._settings = s def get_source(self, environment, template): parts = template.split("/") if not len(parts): raise TemplateNotFound(template) script = self._settings.get(["scripts"], merged=True) for part in parts: if isinstance(script, dict) and part in script: script = script[part] else: raise TemplateNotFound(template) source = script if source is None: raise TemplateNotFound(template) mtime = self._settings._mtime return source, None, lambda: mtime == self._settings.last_modified def list_templates(self): scripts = self._settings.get(["scripts"], merged=True) return self._get_templates(scripts) def _get_templates(self, scripts): templates = [] for key in scripts: if isinstance(scripts[key], dict): templates += map(lambda x: key + "/" + x, self._get_templates(scripts[key])) elif isinstance(scripts[key], basestring): templates.append(key) return templates class SelectLoader(BaseLoader): def __init__(self, default, mapping, sep=":"): self._default = default self._mapping = mapping self._sep = sep def get_source(self, environment, template): if self._sep in template: prefix, name = template.split(self._sep, 1) if not prefix in self._mapping: raise TemplateNotFound(template) return self._mapping[prefix].get_source(environment, name) return self._default.get_source(environment, template) def list_templates(self): return self._default.list_templates() class RelEnvironment(Environment): def __init__(self, prefix_sep=":", *args, **kwargs): Environment.__init__(self, *args, **kwargs) self._prefix_sep = prefix_sep def join_path(self, template, parent): prefix, name = self._split_prefix(template) if name.startswith("/"): return self._join_prefix(prefix, name[1:]) else: _, parent_name = self._split_prefix(parent) parent_base = parent_name.split("/")[:-1] return self._join_prefix( prefix, "/".join(parent_base) + "/" + name) def _split_prefix(self, template): if self._prefix_sep in template: return template.split(self._prefix_sep, 1) else: return "", template def _join_prefix(self, prefix, template): if len(prefix): return prefix + self._prefix_sep + template else: return template path_filter = lambda path: not is_hidden_path(path) file_system_loader = FilteredFileSystemLoader( self.getBaseFolder("scripts"), path_filter=path_filter) settings_loader = SettingsScriptLoader(self) choice_loader = ChoiceLoader([file_system_loader, settings_loader]) select_loader = SelectLoader( choice_loader, dict(bundled=settings_loader, file=file_system_loader)) return RelEnvironment(loader=select_loader, extensions=[SnippetExtension])
def _init_script_templating(self): from jinja2 import Environment, BaseLoader, ChoiceLoader, TemplateNotFound from jinja2.nodes import Include from jinja2.ext import Extension from octoprint.util.jinja import FilteredFileSystemLoader class SnippetExtension(Extension): tags = {"snippet"} fields = Include.fields def parse(self, parser): node = parser.parse_include() if not node.template.value.startswith("/"): node.template.value = "snippets/" + node.template.value return node class SettingsScriptLoader(BaseLoader): def __init__(self, s): self._settings = s def get_source(self, environment, template): parts = template.split("/") if not len(parts): raise TemplateNotFound(template) script = self._settings.get(["scripts"], merged=True) for part in parts: if isinstance(script, dict) and part in script: script = script[part] else: raise TemplateNotFound(template) source = script if source is None: raise TemplateNotFound(template) mtime = self._settings._mtime return source, None, lambda: mtime == self._settings.last_modified def list_templates(self): scripts = self._settings.get(["scripts"], merged=True) return self._get_templates(scripts) def _get_templates(self, scripts): templates = [] for key in scripts: if isinstance(scripts[key], dict): templates += map(lambda x: key + "/" + x, self._get_templates(scripts[key])) elif isinstance(scripts[key], basestring): templates.append(key) return templates class SelectLoader(BaseLoader): def __init__(self, default, mapping, sep=":"): self._default = default self._mapping = mapping self._sep = sep def get_source(self, environment, template): if self._sep in template: prefix, name = template.split(self._sep, 1) if not prefix in self._mapping: raise TemplateNotFound(template) return self._mapping[prefix].get_source(environment, name) return self._default.get_source(environment, template) def list_templates(self): return self._default.list_templates() class RelEnvironment(Environment): def __init__(self, prefix_sep=":", *args, **kwargs): Environment.__init__(self, *args, **kwargs) self._prefix_sep = prefix_sep def join_path(self, template, parent): prefix, name = self._split_prefix(template) if name.startswith("/"): return self._join_prefix(prefix, name[1:]) else: _, parent_name = self._split_prefix(parent) parent_base = parent_name.split("/")[:-1] return self._join_prefix(prefix, "/".join(parent_base) + "/" + name) def _split_prefix(self, template): if self._prefix_sep in template: return template.split(self._prefix_sep, 1) else: return "", template def _join_prefix(self, prefix, template): if len(prefix): return prefix + self._prefix_sep + template else: return template path_filter = lambda path: not is_hidden_path(path) file_system_loader = FilteredFileSystemLoader(self.getBaseFolder("scripts"), path_filter=path_filter) settings_loader = SettingsScriptLoader(self) choice_loader = ChoiceLoader([file_system_loader, settings_loader]) select_loader = SelectLoader(choice_loader, dict(bundled=settings_loader, file=file_system_loader)) return RelEnvironment(loader=select_loader, extensions=[SnippetExtension])
def perform_restore(self): if not is_os_compatible(["!windows"]): return flask.make_response( u"Invalid request, the restores are not supported on the underlying operating system", 400) input_name = "file" input_upload_path = input_name + "." + self._settings.global_get( ["server", "uploads", "pathSuffix"]) if input_upload_path in flask.request.values: # file to restore was uploaded path = flask.request.values[input_upload_path] elif flask.request.json and "path" in flask.request.json: # existing backup is supposed to be restored backup_folder = self.get_plugin_data_folder() path = os.path.realpath( os.path.join(backup_folder, flask.request.json["path"])) if not path.startswith(backup_folder) \ or not os.path.exists(path) \ or is_hidden_path(path): return flask.abort(404) else: return flask.make_response( u"Invalid request, neither a file nor a path of a file to restore provided", 400) def on_install_plugins(plugins): force_user = self._settings.global_get_boolean( ["plugins", "pluginmanager", "pip_force_user"]) pip_args = self._settings.global_get( ["plugins", "pluginmanager", "pip_args"]) def on_log(line): self._logger.info(line) self._send_client_message("logline", dict(line=line, type="stdout")) for plugin in plugins: octoprint_compatible = is_octoprint_compatible( *plugin["compatibility"]["octoprint"]) os_compatible = is_os_compatible(plugin["compatibility"]["os"]) compatible = octoprint_compatible and os_compatible if not compatible: if not octoprint_compatible and not os_compatible: self._logger.warn( u"Cannot install plugin {}, it is incompatible to this version " u"of OctoPrint and the underlying operating system" .format(plugin["id"])) elif not octoprint_compatible: self._logger.warn( u"Cannot install plugin {}, it is incompatible to this version " u"of OctoPrint".format(plugin["id"])) elif not os_compatible: self._logger.warn( u"Cannot install plugin {}, it is incompatible to the underlying " u"operating system".format(plugin["id"])) self._send_client_message( "plugin_incompatible", dict(plugin=plugin["id"], octoprint_compatible=octoprint_compatible, os_compatible=os_compatible)) continue self._logger.info(u"Installing plugin {}".format(plugin["id"])) self._send_client_message("installing_plugin", dict(plugin=plugin["id"])) self.__class__._install_plugin(plugin, force_user=force_user, pip_args=pip_args, on_log=on_log) def on_report_unknown_plugins(plugins): self._send_client_message("unknown_plugins", payload=dict(plugins=plugins)) def on_log_progress(line): self._logger.info(line) self._send_client_message("logline", payload=dict(line=line, stream="stdout")) def on_log_error(line, exc_info=None): self._logger.error(line, exc_info=exc_info) self._send_client_message("logline", payload=dict(line=line, stream="stderr")) if exc_info is not None: exc_type, exc_value, exc_tb = exc_info output = traceback.format_exception(exc_type, exc_value, exc_tb) for line in output: self._send_client_message("logline", payload=dict(line=line.rstrip(), stream="stderr")) def on_restore_start(path): self._send_client_message("restore_started") def on_restore_done(path): self._send_client_message("restore_done") def on_restore_failed(path): self._send_client_message("restore_failed") def on_invalid_backup(line): on_log_error(line) archive = tempfile.NamedTemporaryFile(delete=False) archive.close() shutil.copy(path, archive.name) path = archive.name # noinspection PyTypeChecker thread = threading.Thread( target=self._restore_backup, args=(path, ), kwargs=dict(settings=self._settings, plugin_manager=self._plugin_manager, datafolder=self.get_plugin_data_folder(), on_install_plugins=on_install_plugins, on_report_unknown_plugins=on_report_unknown_plugins, on_invalid_backup=on_invalid_backup, on_log_progress=on_log_progress, on_log_error=on_log_error, on_restore_start=on_restore_start, on_restore_done=on_restore_done, on_restore_failed=on_restore_failed)) thread.daemon = True thread.start() return flask.jsonify(started=True)
def route_hook(self, *args, **kwargs): from octoprint.server.util.tornado import LargeResponseHandler, path_validation_factory from octoprint.util import is_hidden_path from octoprint.server import app from octoprint.server.util.tornado import access_validation_factory from octoprint.server.util.flask import admin_validator return [ (r"/download/(.*)", LargeResponseHandler, dict(path=self.get_plugin_data_folder(), as_attachment=True, path_validation=path_validation_factory(lambda path: not is_hidden_path(path), status_code=404), access_validation=access_validation_factory(app, admin_validator))) ]
def route_hook(self, server_routes, *args, **kwargs): from octoprint.server.util.tornado import LargeResponseHandler, path_validation_factory from octoprint.util import is_hidden_path return [ (r"/download/(.*)", LargeResponseHandler, dict(path=self._settings.get(["path"]), as_attachment=True, path_validation=path_validation_factory(lambda path: not is_hidden_path(path), status_code=401))) ]
def route_hook(self, server_routes, *args, **kwargs): from octoprint.server.util.tornado import LargeResponseHandler, UrlProxyHandler, path_validation_factory from octoprint.util import is_hidden_path return [ (r"/download/(.*)", LargeResponseHandler, dict(path=self._settings.global_get_basefolder("uploads"), as_attachment=True, path_validation=path_validation_factory(lambda path: not is_hidden_path(path), status_code=404))), (r"forward", UrlProxyHandler, dict(url=self._settings.global_get(["webcam", "snapshot"]), as_attachment=True)) ]
def _list_folder(self, path, base="", entry_filter=None, recursive=True, **kwargs): if entry_filter is None: entry_filter = kwargs.get("filter", None) metadata = self._get_metadata(path) if not metadata: metadata = dict() metadata_dirty = False result = dict() for entry in scandir(path): if is_hidden_path(entry.name): # no hidden files and folders continue entry_name = entry.name entry_path = entry.path entry_is_file = entry.is_file() entry_is_dir = entry.is_dir() entry_stat = entry.stat() try: new_entry_name, new_entry_path = self._sanitize_entry(entry_name, path, entry_path) if entry_name != new_entry_name or entry_path != new_entry_path: entry_name = new_entry_name entry_path = new_entry_path entry_stat = os.stat(entry_path) except: # error while trying to rename the file, we'll continue here and ignore it continue path_in_location = entry_name if not base else base + entry_name # file handling if entry_is_file: type_path = octoprint.filemanager.get_file_type(entry_name) if not type_path: # only supported extensions continue else: file_type = type_path[0] if entry_name in metadata and isinstance(metadata[entry_name], dict): entry_data = metadata[entry_name] else: entry_data = self._add_basic_metadata(path, entry_name, save=False, metadata=metadata) metadata_dirty = True # TODO extract model hash from source if possible to recreate link if not entry_filter or entry_filter(entry_name, entry_data): # only add files passing the optional filter extended_entry_data = dict() extended_entry_data.update(entry_data) extended_entry_data["name"] = entry_name extended_entry_data["path"] = path_in_location extended_entry_data["type"] = file_type extended_entry_data["typePath"] = type_path stat = entry_stat if stat: extended_entry_data["size"] = stat.st_size extended_entry_data["date"] = int(stat.st_mtime) result[entry_name] = extended_entry_data # folder recursion elif entry_is_dir: entry_data = dict( name=entry_name, path=path_in_location, type="folder", type_path=["folder"] ) if recursive: sub_result = self._list_folder(entry_path, base=path_in_location + "/", entry_filter=entry_filter, recursive=recursive) entry_data["children"] = sub_result if not entry_filter or entry_filter(entry_name, entry_data): def get_size(): total_size = 0 for element in entry_data["children"].values(): if "size" in element: total_size += element["size"] return total_size # only add folders passing the optional filter extended_entry_data = dict() extended_entry_data.update(entry_data) if recursive: extended_entry_data["size"] = get_size() result[entry_name] = extended_entry_data # TODO recreate links if we have metadata less entries # save metadata if metadata_dirty: self._save_metadata(path, metadata) return result
def _list_folder(self, path, entry_filter=None, recursive=True, **kwargs): if entry_filter is None: entry_filter = kwargs.get("filter", None) metadata = self._get_metadata(path) if not metadata: metadata = dict() metadata_dirty = False result = dict() for entry in os.listdir(path): if is_hidden_path(entry): # no hidden files and folders continue entry_path = os.path.join(path, entry) try: entry, entry_path = self._sanitize_entry(entry, path, entry_path) except: # error while trying to rename the file, we'll continue here and ignore it continue # file handling if os.path.isfile(entry_path): file_type = octoprint.filemanager.get_file_type(entry) if not file_type: # only supported extensions continue else: file_type = file_type[0] if entry in metadata and isinstance(metadata[entry], dict): entry_data = metadata[entry] else: entry_data = self._add_basic_metadata(path, entry, save=False, metadata=metadata) metadata_dirty = True # TODO extract model hash from source if possible to recreate link if not entry_filter or entry_filter(entry, entry_data): # only add files passing the optional filter extended_entry_data = dict() extended_entry_data.update(entry_data) extended_entry_data["name"] = entry extended_entry_data["type"] = file_type stat = os.stat(entry_path) if stat: extended_entry_data["size"] = stat.st_size extended_entry_data["date"] = int(stat.st_mtime) result[entry] = extended_entry_data # folder recursion elif os.path.isdir(entry_path) and recursive: sub_result = self._list_folder(entry_path, entry_filter=entry_filter) result[entry] = dict( name=entry, type="folder", children=sub_result ) # TODO recreate links if we have metadata less entries # save metadata if metadata_dirty: self._save_metadata(path, metadata) return result
def _list_folder(self, path, base="", entry_filter=None, recursive=True, **kwargs): if entry_filter is None: entry_filter = kwargs.get("filter", None) metadata = self._get_metadata(path) if not metadata: metadata = dict() metadata_dirty = False result = dict() for entry in scandir(path): if is_hidden_path(entry.name): # no hidden files and folders continue try: entry_name = entry.name entry_path = entry.path entry_is_file = entry.is_file() entry_is_dir = entry.is_dir() entry_stat = entry.stat() except: # error while trying to fetch file metadata, that might be thanks to file already having # been moved or deleted - ignore it and continue continue try: new_entry_name, new_entry_path = self._sanitize_entry(entry_name, path, entry_path) if entry_name != new_entry_name or entry_path != new_entry_path: entry_name = new_entry_name entry_path = new_entry_path entry_stat = os.stat(entry_path) except: # error while trying to rename the file, we'll continue here and ignore it continue path_in_location = entry_name if not base else base + entry_name # file handling if entry_is_file: type_path = octoprint.filemanager.get_file_type(entry_name) if not type_path: # only supported extensions continue else: file_type = type_path[0] if entry_name in metadata and isinstance(metadata[entry_name], dict): entry_data = metadata[entry_name] else: entry_data = self._add_basic_metadata(path, entry_name, save=False, metadata=metadata) metadata_dirty = True # TODO extract model hash from source if possible to recreate link if not entry_filter or entry_filter(entry_name, entry_data): # only add files passing the optional filter extended_entry_data = dict() extended_entry_data.update(entry_data) extended_entry_data["name"] = entry_name extended_entry_data["path"] = path_in_location extended_entry_data["type"] = file_type extended_entry_data["typePath"] = type_path stat = entry_stat if stat: extended_entry_data["size"] = stat.st_size extended_entry_data["date"] = int(stat.st_mtime) result[entry_name] = extended_entry_data # folder recursion elif entry_is_dir: entry_data = dict( name=entry_name, path=path_in_location, type="folder", type_path=["folder"] ) if recursive: sub_result = self._list_folder(entry_path, base=path_in_location + "/", entry_filter=entry_filter, recursive=recursive) entry_data["children"] = sub_result if not entry_filter or entry_filter(entry_name, entry_data): def get_size(): total_size = 0 for element in entry_data["children"].values(): if "size" in element: total_size += element["size"] return total_size # only add folders passing the optional filter extended_entry_data = dict() extended_entry_data.update(entry_data) if recursive: extended_entry_data["size"] = get_size() result[entry_name] = extended_entry_data # TODO recreate links if we have metadata less entries # save metadata if metadata_dirty: self._save_metadata(path, metadata) return result