def test_path_prefix(): """Tests correct package importing - critical test! If this one fails, it cancels the whole run.""" result = True set_instance('', None, '') default = get_path('lib', '', ensure=False) result &= default == '/var/lib/isomer' set_instance(pytest.INSTANCENAME, 'green') unset_prefix = get_path('lib', '', ensure=False) result &= unset_prefix == '/var/lib/isomer/' + pytest.INSTANCENAME + '/green' set_instance(pytest.INSTANCENAME, 'green', prefix='/foo/bar/') prefixed = get_path('lib', '', ensure=False) result &= prefixed == '/foo/bar/var/lib/isomer/' + pytest.INSTANCENAME + '/green' if result is False: pytest.exit('Default:' + default + ' Unset:' + unset_prefix + ' Set:' + prefixed + 'Path prefixing is broken! Not continuing until ' 'you fix "isomer.misc.path"!')
def _create_backup(self): self.log("Backing up all data") filename = time.strftime("%Y-%m-%d_%H%M%S.json") filename = join(get_path("local", "backup", ensure=True), filename) backup(None, None, None, "json", filename, False, True, [])
def cli_locations(self, *args): """Display all locations of running instance""" self.log("All locations for this instance:") from isomer.misc.path import locations, get_path for path in locations: self.log(get_path(path, ""), pretty=True)
def __init__(self, default_tile=None, **kwargs): """ :param tile_path: Caching directory structure target path :param default_tile: Used, when no tile can be cached :param kwargs: """ super(MaptileService, self).__init__('MTS', **kwargs) self.worker = Worker(process=False, workers=2, channel="tcworkers").register(self) for item in ['tilecache', 'rastertiles', 'rastercache']: get_path('cache', item, ensure=True) self.cache_path = get_path('cache', '') self.default_tile = default_tile self._tiles = []
def test_path_tools(): path.set_instance('TESTING', 'MAUVE', '/tmp/isomer-test') assert path.INSTANCE == 'TESTING' assert path.ENVIRONMENT == 'MAUVE' assert 'cache' in path.locations assert 'local' in path.locations assert 'lib' in path.locations assert path.get_path( 'cache', '') == '/tmp/isomer-test/var/cache/isomer/TESTING/MAUVE' assert path.get_path( 'local', 'foo') == '/tmp/isomer-test/var/local/isomer/TESTING/MAUVE/foo' assert path.get_path( 'lib', 'bar/qux') == '/tmp/isomer-test/var/lib/isomer/TESTING/MAUVE/bar/qux'
def _install_backend(ctx): """Installs the backend into an environment""" instance_name = ctx.obj["instance"] env = get_next_environment(ctx) set_instance(instance_name, env) log("Installing backend on", env, lvl=debug) env_path = get_path("lib", "") user = ctx.obj["instance_configuration"]["user"] success, result = run_process( os.path.join(env_path, "repository"), [ os.path.join(env_path, "venv", "bin", "python3"), "setup.py", "develop" ], sudo=user, ) if not success: output = str(result) if "was unable to detect version" in output: log( "Installing from dirty repository. This might result in dependency " "version problems!", lvl=hilight, ) else: log( "Something unexpected happened during backend installation:\n", result, lvl=hilight, ) # TODO: Another fault might be an unclean package path. # But i forgot the log message to check for. # log('This might be a problem due to unclean installations of Python' # ' libraries. Please check your path.') log("Installing requirements") success, result = run_process( os.path.join(env_path, "repository"), [ os.path.join(env_path, "venv", "bin", "pip3"), "install", "-r", "requirements.txt", ], sudo=user, ) if not success: log(format_result(result), lvl=error) return True
def _create_folders(ctx): """Generate required folders for an instance""" log("Generating instance directories", emitter="MANAGE") instance_configuration = ctx.obj["instance_configuration"] try: uid = pwd.getpwnam(instance_configuration["user"]).pw_uid gid = grp.getgrnam(instance_configuration["group"]).gr_gid except KeyError: log("User account for instance not found!", lvl=warn) uid = gid = None logfile = os.path.join(get_log_path(), "isomer." + ctx.obj["instance"] + ".log") for item in locations: path = get_path(item, "", ensure=True) log("Created path: " + path, lvl=debug) try: os.chown(path, uid, gid) except PermissionError: log("Could not change ownership:", path, lvl=warn, exc=True) module_path = get_path("lib", "modules", ensure=True) try: os.chown(module_path, uid, gid) except PermissionError: log("Could not change ownership:", module_path, lvl=warn, exc=True) log("Module storage created:", module_path, lvl=debug) if not os.path.exists(logfile): open(logfile, "w").close() try: os.chown(logfile, uid, gid) except PermissionError: log("Could not change ownership:", logfile, lvl=warn, exc=True) finish(ctx)
def _check_free_space(self): """Checks used filesystem storage sizes""" def get_folder_size(path): """Aggregates used size of a specified path, recursively""" total_size = 0 for item in walk(path): for file in item[2]: try: total_size = total_size + getsize(join(item[0], file)) except (OSError, PermissionError) as folder_size_e: self.log("error with file: " + join(item[0], file), folder_size_e) return total_size total = 0 for name, checkpoint in self.config.locations.items(): try: stats = statvfs(get_path(name, "")) except (OSError, PermissionError, KeyError) as e: self.log("Location unavailable:", name, e, type(e), lvl=warn, exc=True) continue free_space = stats.f_frsize * stats.f_bavail used_space = get_folder_size(get_path(name, "")) / 1024.0 / 1024 total += used_space self.log("Location %s uses %.2f MB" % (name, used_space)) if free_space < checkpoint["minimum"]: self.log( "Short of free space on %s: %.2f MB left" % (name, free_space / 1024.0 / 1024 / 1024), lvl=warn, ) self.log("Total space consumption: %.2f MB" % total)
def __init__(self, *args, **kwargs): """ :param tile_path: Caching directory structure target path :param default_tile: Used, when no tile can be cached :param kwargs: """ super(MaptileLoader, self).__init__('MTL', *args, **kwargs) for item in ['tilecache', 'rastertiles', 'rastercache']: get_path('cache', item, ensure=True) self.cache_path = get_path('cache', '') self.worker = Worker(process=False, workers=2, channel="tclworkers").register(self) self.cancelled = [] self.requests = {} self.fire(cli_register_event('test_maploader', cli_test_maptile_Loader))
def _create_system_folders(use_sudo=False): target_paths = [ "/var/www/challenges", # For LetsEncrypt acme certificate challenges "/var/backups/isomer", "/var/log/isomer", "/var/run/isomer", ] for item in locations: target_paths.append(get_path(item, "")) target_paths.append(get_log_path()) for item in target_paths: run_process("/", ["sudo", "mkdir", "-p", item], sudo=use_sudo) run_process("/", ["sudo", "chown", "isomer", item], sudo=use_sudo) # TODO: The group/ownership should be assigned per instance.user/group run_process("/", ["sudo", "chgrp", "isomer", "/var/log/isomer"], sudo=use_sudo) run_process("/", ["sudo", "chmod", "g+w", "/var/log/isomer"], sudo=use_sudo)
def _install_provisions(ctx, import_file=None, skip_provisions=False): """Load provisions into database""" instance_configuration = ctx.obj["instance_configuration"] env = get_next_environment(ctx) env_path = get_path("lib", "") log("Installing provisioning data") if not skip_provisions: success, result = run_process( os.path.join(env_path, "repository"), [ os.path.join(env_path, "venv", "bin", "python3"), "./iso", "-nc", "--flog", "5", "--config-path", get_etc_path(), "-i", instance_configuration["name"], "-e", env, "install", "provisions", ], # Note: no sudo necessary as long as we do not enforce # authentication on databases ) if not success: log("Could not provision data:", lvl=error) log(format_result(result), lvl=error) return False if import_file is not None: log("Importing backup") log(ctx.obj, pretty=True) host, port = ctx.obj["dbhost"].split(":") load(host, int(port), ctx.obj["dbname"], import_file) return True
def _install_frontend(ctx): """Install and build the frontend""" env = get_next_environment(ctx) env_path = get_path("lib", "") instance_configuration = ctx.obj["instance_configuration"] user = instance_configuration["user"] log("Building frontend") success, result = run_process( os.path.join(env_path, "repository"), [ os.path.join(env_path, "venv", "bin", "python3"), "./iso", "-nc", "--config-path", get_etc_path(), "--prefix-path", get_prefix_path(), "-i", instance_configuration["name"], "-e", env, "--clog", "10", "install", "frontend", "--rebuild", ], sudo=user, ) if not success: log(format_result(result), lvl=error) return False return True
def __init__(self, name, instance, **kwargs): super(Core, self).__init__("CORE", **kwargs) self.log("Starting system (channel ", self.channel, ")") self.insecure = kwargs["insecure"] self.development = kwargs["dev"] self.instance = name host = kwargs.get("web_address", None) port = kwargs.get("web_port", None) # self.log(instance, pretty=True, lvl=verbose) self.host = instance["web_address"] if host is None else host self.port = instance["web_port"] if port is None else port self.log("Web configuration: %s:%i" % (self.host, int(self.port)), lvl=debug) self.certificate = certificate = (instance["web_certificate"] if instance["web_certificate"] != "" else None) if certificate: if not os.path.exists(certificate): self.log( "SSL certificate usage requested but certificate " "cannot be found!", lvl=error, ) abort(EXIT_NO_CERTIFICATE) # TODO: Find a way to synchronize this with the paths in i.u.builder if self.development: self.frontend_root = os.path.abspath( os.path.dirname(os.path.realpath(__file__)) + "/../frontend") self.frontend_target = get_path("lib", "frontend-dev") self.module_root = os.path.abspath( os.path.dirname(os.path.realpath(__file__)) + "/../modules") else: self.frontend_root = get_path("lib", "repository/frontend") self.frontend_target = get_path("lib", "frontend") self.module_root = "" self.log( "Frontend & module paths:", self.frontend_root, self.frontend_target, self.module_root, lvl=verbose, ) self.loadable_components = {} self.running_components = {} self.frontend_running = False self.frontend_watcher = None self.frontend_watch_manager = None self.static = None self.websocket = None # TODO: Cleanup self.component_blacklist = [ # 'camera', # 'logger', # 'debugger', "recorder", "playback", # 'sensors', # 'navdatasim' # 'ldap', # 'navdata', # 'nmeaparser', # 'objectmanager', # 'wiki', # 'clientmanager', # 'library', # 'nmeaplayback', # 'alert', # 'tilecache', # 'schemamanager', # 'chat', # 'debugger', # 'rcmanager', # 'auth', # 'machineroom' ] self.component_blacklist += kwargs["blacklist"] self.update_components() self._write_config() self.server = None if self.insecure: self.log("Not dropping privileges - this may be insecure!", lvl=warn)
def upgrade(ctx, release, upgrade_modules, restart, handle_cache, source, url): """Upgrades an instance on its other environment and turns over on success. \b 1. Test if other environment is empty 1.1. No - archive and clear it 2. Copy current environment to other environment 3. Clear old bits (venv, frontend) 4. Fetch updates in other environment repository 5. Select a release 6. Checkout that release and its submodules 7. Install release 8. Copy database 9. Migrate data (WiP) 10. Turnover """ instance_config = ctx.obj["instance_configuration"] repository = get_path("lib", "repository") installation_source = source if source is not None else instance_config[ 'source'] installation_url = url if url is not None else instance_config['url'] environments = instance_config["environments"] active = instance_config["environment"] next_environment = get_next_environment(ctx) if environments[next_environment]["installed"] is True: _clear_environment(ctx, clear_env=next_environment) source_paths = [ get_path("lib", "", environment=active), get_path("local", "", environment=active) ] destination_paths = [ get_path("lib", "", environment=next_environment), get_path("local", "", environment=next_environment) ] log(source_paths, destination_paths, pretty=True) for source, destination in zip(source_paths, destination_paths): log("Copying to new environment:", source, destination) copy_directory_tree(source, destination) if handle_cache != "ignore": log("Handling cache") move = handle_cache == "move" copy_directory_tree(get_path("cache", "", environment=active), get_path("cache", "", environment=next_environment), move=move) rmtree(get_path("lib", "venv"), ignore_errors=True) # TODO: This potentially leaves frontend-dev: rmtree(get_path("lib", "frontend"), ignore_errors=True) releases = _get_versions(ctx, source=installation_source, url=installation_url, fetch=True) releases_keys = sorted_alphanumerical(releases.keys()) if release is None: release = releases_keys[-1] else: if release not in releases_keys: log("Unknown release. Maybe try a different release or source.") abort(50100) log("Choosing release", release) _install_environment(ctx, installation_source, installation_url, upgrade=True, release=release) new_database_name = instance_config["name"] + "_" + next_environment copy_database(ctx.obj["dbhost"], active['database'], new_database_name) apply_migrations(ctx) finish(ctx)
def _clear_environment(ctx, force=False, clear_env=None, clear=False, no_archive=False): """Tests an environment for usage, then clears it :param ctx: Click Context :param force: Irrefutably destroy environment content :param clear_env: Environment to clear (Green/Blue) :param clear: Also destroy generated folders :param no_archive: Don't attempt to archive instance """ instance_name = ctx.obj["instance"] if clear_env is None: next_environment = get_next_environment(ctx) else: next_environment = clear_env log("Clearing environment:", next_environment) set_instance(instance_name, next_environment) # log('Testing', environment, 'for usage') env = ctx.obj["instance_configuration"]["environments"][next_environment] if not no_archive: if not (_archive(ctx, force) or force): log("Archival failed, stopping.") abort(5000) log("Clearing env:", env, lvl=debug) for item in locations: path = get_path(item, "") log("Clearing [%s]: %s" % (item, path), lvl=debug) try: shutil.rmtree(path) except FileNotFoundError: log("Path not found:", path, lvl=debug) except PermissionError: log("No permission to clear environment", lvl=error) return False if not clear: _create_folders(ctx) try: delete_database(ctx.obj["dbhost"], "%s_%s" % (instance_name, next_environment), force=True) except pymongo.errors.ServerSelectionTimeoutError: log("No database available") except Exception as e: log("Could not delete database:", e, lvl=warn, exc=True) ctx.obj["instance_configuration"]["environments"][ next_environment] = environment_template write_instance(ctx.obj["instance_configuration"]) return True
def _install_environment( ctx, source=None, url=None, import_file=None, no_sudo=False, force=False, release=None, upgrade=False, skip_modules=False, skip_data=False, skip_frontend=False, skip_test=False, skip_provisions=False, ): """Internal function to perform environment installation""" if url is None: url = source_url elif url[0] == '.': url = url.replace(".", os.getcwd(), 1) if url[0] == '/': url = os.path.abspath(url) instance_name = ctx.obj["instance"] instance_configuration = ctx.obj["instance_configuration"] next_environment = get_next_environment(ctx) set_instance(instance_name, next_environment) env = copy(instance_configuration["environments"][next_environment]) env["database"] = instance_name + "_" + next_environment env_path = get_path("lib", "") user = instance_configuration["user"] if no_sudo: user = None log("Installing new other environment for %s on %s from %s in %s" % (instance_name, next_environment, source, env_path)) try: result = get_isomer(source, url, env_path, upgrade=upgrade, sudo=user, release=release) if result is False: log("Getting Isomer failed", lvl=critical) abort(50011, ctx) except FileExistsError: if not force: log( "Isomer already present, please safely clear or " "inspect the environment before continuing! Use --force to ignore.", lvl=warn, ) abort(50012, ctx) else: log("Isomer already present, forcing through anyway.") try: repository = Repo(os.path.join(env_path, "repository")) log("Repo:", repository, lvl=debug) env["version"] = str(repository.git.describe()) except (exc.InvalidGitRepositoryError, exc.NoSuchPathError, exc.GitCommandError): env["version"] = version log( "Not running from a git repository; Using isomer.version:", version, lvl=warn, ) ctx.obj["instance_configuration"]["environments"][next_environment] = env # TODO: Does it make sense to early-write the configuration and then again later? write_instance(ctx.obj["instance_configuration"]) log("Creating virtual environment") success, result = run_process( env_path, [ "virtualenv", "-p", "/usr/bin/python3", "--system-site-packages", "venv" ], sudo=user, ) if not success: log(format_result(result), lvl=error) try: if _install_backend(ctx): log("Backend installed") env["installed"] = True if not skip_modules and _install_modules(ctx): log("Modules installed") # env['installed_modules'] = True if not skip_provisions and _install_provisions( ctx, import_file=import_file): log("Provisions installed") env["provisioned"] = True if not skip_data and _migrate(ctx): log("Data migrated") env["migrated"] = True if not skip_frontend and _install_frontend(ctx): log("Frontend installed") env["frontend"] = True if not skip_test and _check_environment(ctx): log("Environment tested") env["tested"] = True except Exception: log("Error during installation:", exc=True, lvl=critical) log("Environment status now:", env) ctx.obj["instance_configuration"]["environments"][next_environment] = env write_instance(ctx.obj["instance_configuration"])
def _install_module(source, url, store_url=DEFAULT_STORE_URL, auth=None, force=False, user=None): """Actually installs a module into an environment""" package_name = package_version = success = output = "" def get_module_info(directory): log("Getting name") success, result = run_process(directory, ["python3", "setup.py", "--name"], sudo=user) if not success: log(format_result(result), pretty=True, lvl=error) return False package_name = str(result.output, encoding="utf8").rstrip("\n") log("Getting version") success, result = run_process(directory, ["python3", "setup.py", "--version"], sudo=user) if not success: log(format_result(result), pretty=True, lvl=error) return False package_version = str(result.output, encoding="utf8").rstrip("\n") log("Package name:", package_name, "version:", package_version) return package_name, package_version if source == "develop": log("Installing module for development") success, output = run_process( url, [ os.path.join(get_path("lib", "venv"), "bin", "python3"), "setup.py", "develop", ], sudo=user, ) if not success: log(output, lvl=verbose) return False else: return get_module_info(url) module_path = get_path("lib", "modules", ensure=True) module_info = False if source not in ("git", "link", "copy", "store"): abort(EXIT_INVALID_SOURCE) uuid = std_uuid() temporary_path = os.path.join(module_path, "%s" % uuid) log("Installing module: %s [%s]" % (url, source)) if source in ("link", "copy") and url.startswith("/"): absolute_path = url else: absolute_path = os.path.abspath(url) if source == "git": log("Cloning repository from", url) success, output = run_process(module_path, ["git", "clone", url, temporary_path], sudo=user) if not success: log("Error:", output, lvl=error) elif source == "link": log("Linking repository from", absolute_path) success, output = run_process( module_path, ["ln", "-s", absolute_path, temporary_path], sudo=user) if not success: log("Error:", output, lvl=error) elif source == "copy": log("Copying repository from", absolute_path) success, output = run_process( module_path, ["cp", "-a", absolute_path, temporary_path], sudo=user) if not success: log("Error:", output, lvl=error) elif source == "store": log("Installing wheel from store", absolute_path) log(store_url, auth) store = get_store(store_url, auth) if url not in store["packages"]: abort(EXIT_STORE_PACKAGE_NOT_FOUND) meta = store["packages"][url] package_name = meta['name'] package_version = meta['version'] venv_path = os.path.join(get_path("lib", "venv"), "bin") success, output = run_process( venv_path, ["pip3", "install", "--extra-index-url", store_url, package_name]) if source != "store": module_info = get_module_info(temporary_path) if module_info is False: log("Could not get name and version information from module.", lvl=error) return False package_name, package_version = module_info final_path = os.path.join(module_path, package_name) if os.path.exists(final_path): log("Module exists.", lvl=warn) if force: log("Removing previous version.") success, result = run_process(module_path, ["rm", "-rf", final_path], sudo=user) if not success: log("Could not remove previous version!", lvl=error) abort(50000) else: log("Not overwriting previous version without --force", lvl=error) abort(50000) log("Renaming to", final_path) os.rename(temporary_path, final_path) log("Installing module") success, output = run_process( final_path, [ os.path.join(get_path("lib", "venv"), "bin", "python3"), "setup.py", "develop", ], sudo=user, ) if not success: log(output, lvl=verbose) return False else: return package_name, package_version
def _archive(ctx, force=False, dynamic=False): instance_configuration = ctx.obj["instance_configuration"] next_environment = get_next_environment(ctx) env = instance_configuration["environments"][next_environment] log("Instance info:", instance_configuration, next_environment, pretty=True, lvl=debug) log("Installed:", env["installed"], "Tested:", env["tested"], lvl=debug) if (not env["installed"] or not env["tested"]) and not force: log("Environment has not been installed - not archiving.", lvl=warn) return False log("Archiving environment:", next_environment) set_instance(ctx.obj["instance"], next_environment) timestamp = std_now().replace(":", "-").replace(".", "-") temp_path = mkdtemp(prefix="isomer_backup") log("Archiving database") if not dump( instance_configuration["database_host"], instance_configuration["database_port"], env["database"], os.path.join(temp_path, "db_" + timestamp + ".json"), ): if not force: log("Could not archive database.") return False archive_filename = os.path.join( "/var/backups/isomer/", "%s_%s_%s.tgz" % (ctx.obj["instance"], next_environment, timestamp), ) try: shutil.copy( os.path.join(get_etc_instance_path(), ctx.obj["instance"] + ".conf"), temp_path, ) with tarfile.open(archive_filename, "w:gz") as f: if not dynamic: for item in locations: path = get_path(item, "") log("Archiving [%s]: %s" % (item, path)) f.add(path) f.add(temp_path, "db_etc") except (PermissionError, FileNotFoundError) as e: log("Could not archive environment:", e, lvl=error) if not force: return False finally: log("Clearing temporary backup target") shutil.rmtree(temp_path) ctx.obj["instance_configuration"]["environments"]["archive"][ timestamp] = env log(ctx.obj["instance_configuration"]) return archive_filename
def __init__(self, name, instance, **kwargs): super(Core, self).__init__("CORE", **kwargs) self.log("Starting system (channel ", self.channel, ")") self.insecure = kwargs["insecure"] self.development = kwargs["dev"] self.instance = name host = kwargs.get("web_address", None) port = kwargs.get("web_port", None) # self.log(instance, pretty=True, lvl=verbose) self.host = instance["web"]["address"] if host is None else host self.port = instance["web"]["port"] if port is None else port self.log("Web configuration: %s:%i" % (self.host, int(self.port)), lvl=debug) self.certificate = certificate = (instance["web"]["certificate"] if instance["web"]["certificate"] != "" else None) if certificate: if not os.path.exists(certificate): self.log( "SSL certificate usage requested but certificate " "cannot be found!", lvl=error, ) abort(EXIT_NO_CERTIFICATE) # TODO: Find a way to synchronize this with the paths in i.u.builder if self.development: self.frontend_root = os.path.abspath( os.path.dirname(os.path.realpath(__file__)) + "/../frontend") self.frontend_target = get_path("lib", "frontend-dev") self.module_root = os.path.abspath( os.path.dirname(os.path.realpath(__file__)) + "/../modules") else: self.frontend_root = get_path("lib", "repository/frontend") self.frontend_target = get_path("lib", "frontend") self.module_root = "" self.log( "Frontend & module paths:", self.frontend_root, self.frontend_target, self.module_root, lvl=verbose, ) self.modules_loaded = {} self.loadable_components = {} self.loaded_components = {} self.frontend_running = False self.frontend_watcher = None self.frontend_watch_manager = None self.static = None self.websocket = None self.component_blacklist = instance["environments"][ instance["environment"]]["blacklist"] self.component_blacklist += list(kwargs.get("blacklist", [])) self._check_provisions() self.update_components() self._write_config() self.server = None if self.insecure: self.log("Not dropping privileges - this may be insecure!", lvl=warn)
def _check_environment(ctx, env=None, dev=False): """General fitness tests of the built environment""" if env is None: env = get_next_environment(ctx) log("Health checking the environment '%s'" % env) # Frontend not_enough_files = False html_missing = False loader_missing = False size_too_small = False # Backend repository_missing = False modules_missing = False venv_missing = False local_missing = False cache_missing = False # Backend if not os.path.exists(os.path.join(get_path('lib', 'repository'))): log("Repository is missing", lvl=warn) repository_missing = True if not os.path.exists(os.path.join(get_path('lib', 'modules'))): log("Modules folder is missing", lvl=warn) modules_missing = True if not os.path.exists(os.path.join(get_path('lib', 'venv'))): log("Virtual environment is missing", lvl=warn) venv_missing = True if not os.path.exists(os.path.join(get_path('local', ''))): log("Local data folder is missing", lvl=warn) local_missing = True if not os.path.exists(os.path.join(get_path('cache', ''))): log("Cache folder is missing", lvl=warn) cache_missing = True # Frontend _, frontend_target = get_frontend_locations(dev) if not os.path.exists(os.path.join(frontend_target, 'index.html')): log("A compiled frontend html seems to be missing", lvl=warn) html_missing = True if not glob.glob(frontend_target + '/main.*.js'): log("A compiled frontend loader seems to be missing", lvl=warn) loader_missing = True size_sum = 0 amount_files = 0 for file in glob.glob(os.path.join(frontend_target, '*.gz')): size_sum += os.stat(file).st_size amount_files += 1 if amount_files < 4: log("The frontend probably did not compile completely", lvl=warn) not_enough_files = True if size_sum < 2 * 1024 * 1024: log("The compiled frontend seems exceptionally small", lvl=warn) size_too_small = True frontend = (repository_missing or modules_missing or venv_missing or local_missing or cache_missing) backend = (not_enough_files or loader_missing or size_too_small or html_missing) result = not (frontend or backend) if result is False: log("Health check failed", lvl=error) return result