def update_indexes(dao, pkgstore, channel_name, subdirs=None): jinjaenv = _jinjaenv() channeldata = channel_data.export(dao, channel_name) if subdirs is None: subdirs = sorted(channeldata["subdirs"], key=_subdir_key) # Generate channeldata.json and its compressed version chandata_json = json.dumps(channeldata, indent=2, sort_keys=True) pkgstore.add_file( bz2.compress(chandata_json.encode("utf-8")), channel_name, "channeldata.json.bz2", ) pkgstore.add_file(chandata_json, channel_name, "channeldata.json") # Generate index.html for the "root" directory channel_template = jinjaenv.get_template("channeldata-index.html.j2") pkgstore.add_file( channel_template.render( title=channel_name, packages=channeldata["packages"], subdirs=subdirs, current_time=datetime.now(timezone.utc), ), channel_name, "index.html", ) # NB. No rss.xml is being generated here subdir_template = jinjaenv.get_template("subdir-index.html.j2") for dir in subdirs: logger.debug(f"creating indexes for subdir {dir} of channel {channel_name}") raw_repodata = repo_data.export(dao, channel_name, dir) repodata = json.dumps(raw_repodata, indent=2, sort_keys=True) md5_repodata = hashlib.md5() md5_repodata.update(repodata.encode("utf-8")) sha_repodata = hashlib.sha256() sha_repodata.update(repodata.encode("utf-8")) compressed_repodata = bz2.compress(repodata.encode("utf-8")) md5_compressed = hashlib.md5() md5_compressed.update(compressed_repodata) sha_compressed = hashlib.sha256() sha_compressed.update(compressed_repodata) add_files = [] for fname in ("repodata.json", "current_repodata.json"): pkgstore.add_file(compressed_repodata, channel_name, f"{dir}/{fname}.bz2") pkgstore.add_file(repodata, channel_name, f"{dir}/{fname}") add_files.append( { "name": f"{fname}", "size": len(repodata), "timestamp": datetime.now(timezone.utc), "md5": md5_repodata.hexdigest(), "sha256": sha_repodata.hexdigest(), } ) add_files.append( { "name": f"{fname}.bz2", "size": len(compressed_repodata), "timestamp": datetime.now(timezone.utc), "md5": md5_compressed.hexdigest(), "sha256": sha_compressed.hexdigest(), } ) # Generate subdir index.html pkgstore.add_file( subdir_template.render( title=f"{channel_name}/{dir}", packages=raw_repodata["packages"], current_time=datetime.now(timezone.utc), add_files=add_files, ), channel_name, f"{dir}/index.html", ) pm = get_plugin_manager() pm.hook.post_package_indexing( pkgstore=pkgstore, channel_name=channel_name, subdirs=subdirs )
plugin_authenticators: List[Type[BaseAuthenticator]] = [ ep.load() for ep in pkg_resources.iter_entry_points('quetz.authenticator') ] auth_registry = AuthenticatorRegistry() auth_registry.set_router(app) for auth_cls in builtin_authenticators + plugin_authenticators: auth_obj = auth_cls(config) if auth_obj.is_enabled: auth_registry.register(auth_obj) # other routers pm = get_plugin_manager() api_router = APIRouter() plugin_routers = pm.hook.register_router() for router in plugin_routers: app.include_router(router) app.include_router(jobs_api.get_router()) app.include_router(metrics_api.get_router()) # helper functions async def check_token_revocation(session): valid = True identity_provider = session.get("identity_provider") if identity_provider is None:
def plugin_manager(): return get_plugin_manager()