def handle_plugins_loaded(startup=False, initialize_implementations=True, force_reload=None): if not startup: return from octoprint.util import sv sorted_disabled_from_overlays = sorted( [(key, value[0], value[1]) for key, value in disabled_from_overlays.items()], key=lambda x: (x[2] is None, sv(x[2]), sv(x[0]))) disabled_list = pm.plugin_disabled_list already_processed = [] for name, addons, _ in sorted_disabled_from_overlays: if not name in disabled_list and not name.endswith("disabled"): for addon in addons: if addon in disabled_list: continue if addon in already_processed: logger.info( "Plugin {} wants to disable plugin {}, but that was already processed" .format(name, addon)) if not addon in already_processed and not addon in disabled_list: disabled_list.append(addon) logger.info( "Disabling plugin {} as defined by plugin {}". format(addon, name)) already_processed.append(name)
def _create_etag(path, filter, recursive, lm=None): if lm is None: lm = _create_lastmodified(path, recursive) if lm is None: return None hash = hashlib.sha1() def hash_update(value): value = value.encode("utf-8") hash.update(value) hash_update(str(lm)) hash_update(str(filter)) hash_update(str(recursive)) path = path[len("/api/files"):] if path.startswith("/"): path = path[1:] if "/" in path: storage, _ = path.split("/", 1) else: storage = path if path == "" or storage == FileDestinations.SDCARD: # include sd data in etag hash_update( repr(sorted(printer.get_sd_files(), key=lambda x: sv(x["name"])))) hash_update( _DATA_FORMAT_VERSION) # increment version if we change the API format return hash.hexdigest()
def get_unrendered_timelapses(): global _job_lock global current delete_old_unrendered_timelapses() basedir = settings().getBaseFolder("timelapse_tmp", check_writable=False) jobs = collections.defaultdict( lambda: {"count": 0, "size": None, "bytes": 0, "date": None, "timestamp": None} ) for entry in scandir(basedir): if not fnmatch.fnmatch(entry.name, "*.jpg"): continue prefix = _extract_prefix(entry.name) if prefix is None: continue jobs[prefix]["count"] += 1 jobs[prefix]["bytes"] += entry.stat().st_size if ( jobs[prefix]["timestamp"] is None or entry.stat().st_mtime < jobs[prefix]["timestamp"] ): jobs[prefix]["timestamp"] = entry.stat().st_mtime with _job_lock: global current_render_job def finalize_fields(prefix, job): currently_recording = current is not None and current.prefix == prefix currently_rendering = ( current_render_job is not None and current_render_job["prefix"] == prefix ) job["size"] = util.get_formatted_size(job["bytes"]) job["date"] = util.get_formatted_datetime( datetime.datetime.fromtimestamp(job["timestamp"]) ) job["recording"] = currently_recording job["rendering"] = currently_rendering job["processing"] = currently_recording or currently_rendering del job["timestamp"] return job return sorted( [ util.dict_merge({"name": key}, finalize_fields(key, value)) for key, value in jobs.items() ], key=lambda x: sv(x["name"]), )
def _create_etag(path, filter, recursive, lm=None): if lm is None: lm = _create_lastmodified(path, recursive) if lm is None: return None hash = hashlib.sha1() def hash_update(value): value = value.encode('utf-8') hash.update(value) hash_update(str(lm)) hash_update(str(filter)) hash_update(str(recursive)) if path.endswith("/files") or path.endswith("/files/sdcard"): # include sd data in etag hash_update(repr(sorted(printer.get_sd_files(), key=lambda x: sv(x[0])))) hash_update(_DATA_FORMAT_VERSION) # increment version if we change the API format return hash.hexdigest()
def _print_list(users): click.echo("{} users registered in the system:".format(len(users))) for user in sorted(map(lambda x: x.as_dict(), users), key=lambda x: sv(x.get("name"))): click.echo("\t{}".format(_user_to_line(user)))
def _print_list(users): click.echo(f"{len(users)} users registered in the system:") for user in sorted(map(lambda x: x.as_dict(), users), key=lambda x: sv(x.get("name"))): click.echo(f"\t{_user_to_line(user)}")
def _filter_out_latest(releases, sort_key=None, include_prerelease=False, commitish=None): """ Filters out the newest of all matching releases. Tests: >>> release_1_2_15 = dict(name="1.2.15", tag_name="1.2.15", html_url="some_url", published_at="2016-07-29T19:53:29Z", prerelease=False, draft=False, target_commitish="prerelease") >>> release_1_2_16rc1 = dict(name="1.2.16rc1", tag_name="1.2.16rc1", html_url="some_url", published_at="2016-08-29T12:00:00Z", prerelease=True, draft=False, target_commitish="rc/maintenance") >>> release_1_2_16rc2 = dict(name="1.2.16rc2", tag_name="1.2.16rc2", html_url="some_url", published_at="2016-08-30T12:00:00Z", prerelease=True, draft=False, target_commitish="rc/maintenance") >>> release_1_2_17rc1 = dict(name="1.2.17rc1", tag_name="1.2.17rc1", html_url="some_url", published_at="2016-08-31T12:00:00Z", prerelease=True, draft=True, target_commitish="rc/maintenance") >>> release_1_3_0rc1 = dict(name="1.3.0rc1", tag_name="1.3.0rc1", html_url="some_url", published_at="2016-12-12T12:00:00Z", prerelease=True, draft=False, target_commitish="rc/devel") >>> release_1_3_5rc1 = dict(name="1.3.5rc1", tag_name="1.3.5rc1", html_url="some_url", published_at="2017-06-14T10:00:00Z", prerelease=True, draft=False, target_commitish="rc/maintenance") >>> release_1_2_18 = dict(name="1.2.18", tag_name="1.2.18", html_url="some_url", published_at="2016-12-13T12:00:00Z", prerelease=False, draft=False, target_commitish="master") >>> release_1_4_0rc1 = dict(name="1.4.0rc1", tag_name="1.4.0rc1", html_url="some_url", published_at="2017-12-12T12:00:00Z", prerelease=True, draft=False, target_commitish="rc/future") >>> release_1_4_0rc1_devel = dict(name="1.4.0rc1", tag_name="1.4.0rc1", html_url="some_url", published_at="2017-12-12T12:00:00Z", prerelease=True, draft=False, target_commitish="rc/devel") >>> releases = [release_1_2_15, release_1_2_16rc1, release_1_2_16rc2, release_1_2_17rc1, release_1_3_0rc1, release_1_4_0rc1] >>> _filter_out_latest(releases, include_prerelease=False, commitish=None) ('1.2.15', '1.2.15', 'some_url') >>> _filter_out_latest(releases, include_prerelease=True, commitish=["rc/maintenance"]) ('1.2.16rc2', '1.2.16rc2', 'some_url') >>> _filter_out_latest(releases, include_prerelease=True, commitish=["rc/devel"]) ('1.3.0rc1', '1.3.0rc1', 'some_url') >>> _filter_out_latest(releases, include_prerelease=True, commitish=None) ('1.4.0rc1', '1.4.0rc1', 'some_url') >>> _filter_out_latest(releases, include_prerelease=True, commitish=["rc/doesntexist"]) ('1.2.15', '1.2.15', 'some_url') >>> _filter_out_latest([release_1_2_17rc1]) (None, None, None) >>> _filter_out_latest([release_1_2_16rc1, release_1_2_16rc2]) (None, None, None) >>> comparable_factory = _get_comparable_factory("python", force_base=True) >>> sort_key = lambda release: comparable_factory(_get_sanitized_version(release["tag_name"])) >>> _filter_out_latest(releases + [release_1_2_18], include_prerelease=False, commitish=None, sort_key=sort_key) ('1.2.18', '1.2.18', 'some_url') >>> _filter_out_latest(releases + [release_1_2_18], include_prerelease=True, commitish=["rc/maintenance"], sort_key=sort_key) ('1.2.18', '1.2.18', 'some_url') >>> _filter_out_latest(releases + [release_1_2_18], include_prerelease=True, commitish=["rc/devel"], sort_key=sort_key) ('1.3.0rc1', '1.3.0rc1', 'some_url') >>> _filter_out_latest([release_1_2_18, release_1_3_5rc1], include_prerelease=True, commitish=["rc/maintenance"]) ('1.3.5rc1', '1.3.5rc1', 'some_url') >>> _filter_out_latest([release_1_2_18, release_1_3_5rc1], include_prerelease=True, commitish=["rc/maintenance", "rc/devel"]) ('1.3.5rc1', '1.3.5rc1', 'some_url') >>> _filter_out_latest([release_1_2_18, release_1_3_5rc1, release_1_4_0rc1_devel], include_prerelease=True, commitish=["rc/maintenance"]) ('1.3.5rc1', '1.3.5rc1', 'some_url') >>> _filter_out_latest([release_1_2_18, release_1_3_5rc1, release_1_4_0rc1_devel], include_prerelease=True, commitish=["rc/maintenance", "rc/devel"]) ('1.4.0rc1', '1.4.0rc1', 'some_url') """ nothing = None, None, None if sort_key is None: sort_key = lambda release: sv(release.get("published_at", None)) # filter out prereleases and drafts filter_function = lambda rel: not rel["prerelease"] and not rel["draft"] if include_prerelease: if commitish: filter_function = lambda rel: not rel["draft"] and (not rel[ "prerelease"] or rel["target_commitish"] in commitish) else: filter_function = lambda rel: not rel["draft"] releases = list(filter(filter_function, releases)) if not releases: return nothing # sort by sort_key releases = sorted(releases, key=sort_key) # latest release = last in list latest = releases[-1] return latest["name"], latest["tag_name"], latest.get("html_url", None)