async def load_collections_extra_docs( collection_paths: t.Mapping[str, str] ) -> t.Mapping[str, CollectionExtraDocsInfoT]: '''Load extra docs data. :arg collection_paths: Mapping of collection_name to the collection's path. :returns: A mapping of collection_name to CollectionExtraDocsInfoT. ''' flog = mlog.fields(func='load_collections_extra_docs') flog.debug('Enter') loaders = {} lib_ctx = app_context.lib_ctx.get() async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: for collection_name, collection_path in collection_paths.items(): loaders[collection_name] = await pool.spawn( load_collection_extra_docs(collection_name, collection_path)) responses = await asyncio.gather(*loaders.values()) # Note: Python dicts have always had a stable order as long as you don't modify the dict. # So loaders (implicitly, the keys) and responses have a matching order here. result = dict(zip(loaders, responses)) flog.debug('Leave') return result
async def get_collection_versions(deps: t.Mapping[str, str], galaxy_url: str, ) -> t.Dict[str, SemVer]: """ Retrieve the latest version of each collection. :arg deps: Mapping of collection name to a version specification. :arg galaxy_url: The url for the galaxy server to use. :returns: Dict mapping collection name to latest version. """ requestors = {} async with aiohttp.ClientSession() as aio_session: lib_ctx = app_context.lib_ctx.get() async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: client = GalaxyClient(aio_session, galaxy_server=galaxy_url) for collection_name, version_spec in deps.items(): requestors[collection_name] = await pool.spawn( client.get_latest_matching_version(collection_name, version_spec, pre=True)) responses = await asyncio.gather(*requestors.values()) # Note: Python dicts have a stable sort order and since we haven't modified the dict since we # used requestors.values() to generate responses, requestors and responses therefore have # a matching order. included_versions: t.Dict[str, SemVer] = {} for collection_name, version in zip(requestors, responses): included_versions[collection_name] = version return included_versions
async def get_version_info(collections, pypi_server_url): """ Return the versions of all the collections and ansible-base/ansible-core """ loop = asyncio.get_running_loop() loop.set_exception_handler(display_exception) requestors = {} async with aiohttp.ClientSession() as aio_session: lib_ctx = app_context.lib_ctx.get() async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: pypi_client = AnsibleBasePyPiClient( aio_session, pypi_server_url=pypi_server_url) requestors['_ansible_base'] = await pool.spawn( pypi_client.get_versions()) galaxy_client = GalaxyClient(aio_session) for collection in collections: requestors[collection] = await pool.spawn( galaxy_client.get_versions(collection)) collection_versions = {} responses = await asyncio.gather(*requestors.values()) for idx, collection_name in enumerate(requestors): collection_versions[collection_name] = responses[idx] return collection_versions
async def retrieve( collections: t.List[str], tmp_dir: str, galaxy_server: str, ansible_base_source: t.Optional[str] = None, collection_cache: t.Optional[str] = None ) -> t.Dict[str, 'semver.Version']: """ Download ansible-core and the latest versions of the collections. :arg collections: List of collection names to download. :arg tmp_dir: The directory to download into. :arg galaxy_server: URL to the galaxy server. :kwarg ansible_base_source: If given, a path to an ansible-core checkout or expanded sdist. This will be used instead of downloading an ansible-core package if the version matches with ``ansible_base_version``. :kwarg collection_cache: If given, a path to a directory containing collection tarballs. These tarballs will be used instead of downloading new tarballs provided that the versions match the criteria (latest compatible version known to galaxy). :returns: Map of collection name to directory it is in. ansible-core will use the special key, `_ansible_base`. """ collection_dir = os.path.join(tmp_dir, 'collections') os.mkdir(collection_dir, mode=0o700) requestors = {} lib_ctx = app_context.lib_ctx.get() async with aiohttp.ClientSession() as aio_session: async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: requestors['_ansible_base'] = await pool.spawn( get_ansible_base(aio_session, '@devel', tmp_dir, ansible_base_source=ansible_base_source)) downloader = CollectionDownloader( aio_session, collection_dir, galaxy_server=galaxy_server, collection_cache=collection_cache) for collection in collections: requestors[collection] = await pool.spawn( downloader.download_latest_matching(collection, '*')) responses = await asyncio.gather(*requestors.values()) responses = [ data.download_path if isinstance(data, DownloadResults) else data for data in responses ] # Note: Python dicts have always had a stable order as long as you don't modify the dict. # So requestors (implicitly, the keys) and responses have a matching order here. return dict(zip(requestors, responses))
async def make_collection_dists(dest_dir: str, collection_dirs: t.List[str]) -> None: dist_creators = [] lib_ctx = app_context.lib_ctx.get() async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: for collection_dir in collection_dirs: dir_name_only = os.path.basename(collection_dir) dummy_, dummy_, name, version = dir_name_only.split('-', 3) dist_creators.append(await pool.spawn( make_collection_dist(name, version, collection_dir, dest_dir))) await asyncio.gather(*dist_creators)
async def output_indexes(collection_to_plugin_info: CollectionInfoT, dest_dir: str, collection_metadata: t.Mapping[str, AnsibleCollectionMetadata], squash_hierarchy: bool = False, ) -> None: """ Generate collection-level index pages for the collections. :arg collection_to_plugin_info: Mapping of collection_name to Mapping of plugin_type to Mapping of plugin_name to short_description. :arg dest_dir: The directory to place the documentation in. :arg collection_metadata: Dictionary mapping collection names to collection metadata objects. :arg squash_hierarchy: If set to ``True``, no directory hierarchy will be used. Undefined behavior if documentation for multiple collections are created. """ flog = mlog.fields(func='output_indexes') flog.debug('Enter') if collection_metadata is None: collection_metadata = {} env = doc_environment(('antsibull.data', 'docsite')) # Get the templates collection_plugins_tmpl = env.get_template('plugins_by_collection.rst.j2') writers = [] lib_ctx = app_context.lib_ctx.get() if not squash_hierarchy: collection_toplevel = os.path.join(dest_dir, 'collections') flog.fields(toplevel=collection_toplevel, exists=os.path.isdir(collection_toplevel)).debug( 'collection_toplevel exists?') # This is only safe because we made sure that the top of the directory tree we're writing to # (docs/docsite/rst) is only writable by us. os.makedirs(collection_toplevel, mode=0o755, exist_ok=True) else: collection_toplevel = dest_dir async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: for collection_name, plugin_maps in collection_to_plugin_info.items(): if not squash_hierarchy: collection_dir = os.path.join(collection_toplevel, *(collection_name.split('.'))) else: collection_dir = collection_toplevel writers.append(await pool.spawn( write_plugin_lists(collection_name, plugin_maps, collection_plugins_tmpl, collection_dir, collection_metadata[collection_name]))) await asyncio.gather(*writers) flog.debug('Leave')
async def output_all_plugin_rst(collection_to_plugin_info: CollectionInfoT, plugin_info: t.Dict[str, t.Any], nonfatal_errors: PluginErrorsT, dest_dir: str, collection_metadata: t.Mapping[ str, AnsibleCollectionMetadata], squash_hierarchy: bool = False) -> None: """ Output rst files for each plugin. :arg collection_to_plugin_info: Mapping of collection_name to Mapping of plugin_type to Mapping of plugin_name to short_description. :arg plugin_info: Documentation information for all of the plugins. :arg nonfatal_errors: Mapping of plugins to nonfatal errors. Using this to note on the docs pages when documentation wasn't formatted such that we could use it. :arg dest_dir: The directory to place the documentation in. :arg collection_metadata: Dictionary mapping collection names to collection metadata objects. :arg squash_hierarchy: If set to ``True``, no directory hierarchy will be used. Undefined behavior if documentation for multiple collections are created. """ # Setup the jinja environment env = doc_environment(('antsibull.data', 'docsite')) # Get the templates plugin_tmpl = env.get_template('plugin.rst.j2') error_tmpl = env.get_template('plugin-error.rst.j2') writers = [] lib_ctx = app_context.lib_ctx.get() async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: for collection_name, plugins_by_type in collection_to_plugin_info.items( ): for plugin_type, plugins in plugins_by_type.items(): for plugin_short_name, dummy_ in plugins.items(): plugin_name = '.'.join( (collection_name, plugin_short_name)) writers.append(await pool.spawn( write_plugin_rst( collection_name, collection_metadata[collection_name], plugin_short_name, plugin_type, plugin_info[plugin_type].get(plugin_name), nonfatal_errors[plugin_type][plugin_name], plugin_tmpl, error_tmpl, dest_dir, squash_hierarchy=squash_hierarchy))) # Write docs for each plugin await asyncio.gather(*writers)
async def download_collections(versions: t.Mapping[str, SemVer], galaxy_url: str, download_dir: str, collection_cache: t.Optional[str] = None, ) -> None: requestors = {} async with aiohttp.ClientSession() as aio_session: lib_ctx = app_context.lib_ctx.get() async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: downloader = CollectionDownloader(aio_session, download_dir, collection_cache=collection_cache, galaxy_server=galaxy_url) for collection_name, version in versions.items(): requestors[collection_name] = await pool.spawn( downloader.download(collection_name, version)) await asyncio.gather(*requestors.values())
async def output_extra_docs(dest_dir: str, extra_docs_data: t.Mapping[ str, CollectionExtraDocsInfoT], squash_hierarchy: bool = False) -> None: """ Generate collection-level index pages for the collections. :arg dest_dir: The directory to place the documentation in. :arg extra_docs_data: Dictionary mapping collection names to CollectionExtraDocsInfoT. :arg squash_hierarchy: If set to ``True``, no directory hierarchy will be used. Undefined behavior if documentation for multiple collections are created. """ flog = mlog.fields(func='output_extra_docs') flog.debug('Enter') writers = [] lib_ctx = app_context.lib_ctx.get() if not squash_hierarchy: collection_toplevel = os.path.join(dest_dir, 'collections') else: collection_toplevel = dest_dir async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: for collection_name, (dummy, documents) in extra_docs_data.items(): if not squash_hierarchy: collection_dir = os.path.join(collection_toplevel, *(collection_name.split('.'))) else: collection_dir = collection_toplevel for source_path, rel_path in documents: full_path = os.path.join(collection_dir, rel_path) os.makedirs(os.path.dirname(full_path), mode=0o755, exist_ok=True) writers.append(await pool.spawn(copy_file(source_path, full_path))) await asyncio.gather(*writers) flog.debug('Leave')
async def output_all_plugin_stub_rst( stubs_info: t.Mapping[str, t.Mapping[str, t.Mapping[str, t.Any]]], dest_dir: str, collection_metadata: t.Mapping[str, AnsibleCollectionMetadata], squash_hierarchy: bool = False) -> None: """ Output rst files for each plugin stub. :arg stubs_info: Mapping of collection_name to Mapping of plugin_type to Mapping of plugin_name to routing information. :arg dest_dir: The directory to place the documentation in. :arg collection_metadata: Dictionary mapping collection names to collection metadata objects. :arg squash_hierarchy: If set to ``True``, no directory hierarchy will be used. Undefined behavior if documentation for multiple collections are created. """ # Setup the jinja environment env = doc_environment(('antsibull.data', 'docsite')) # Get the templates redirect_tmpl = env.get_template('plugin-redirect.rst.j2') tombstone_tmpl = env.get_template('plugin-tombstone.rst.j2') writers = [] lib_ctx = app_context.lib_ctx.get() async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: for collection_name, plugins_by_type in stubs_info.items(): for plugin_type, plugins in plugins_by_type.items(): for plugin_short_name, routing_data in plugins.items(): writers.append(await pool.spawn( write_stub_rst(collection_name, collection_metadata[collection_name], plugin_short_name, plugin_type, routing_data, redirect_tmpl, tombstone_tmpl, dest_dir, squash_hierarchy=squash_hierarchy))) # Write docs for each plugin await asyncio.gather(*writers)
async def collect_changelogs(collectors: t.List[CollectionChangelogCollector], base_collector: AnsibleBaseChangelogCollector, collection_cache: t.Optional[str]): lib_ctx = app_context.lib_ctx.get() with tempfile.TemporaryDirectory() as tmp_dir: async with aiohttp.ClientSession() as aio_session: async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: downloader = CollectionDownloader(aio_session, tmp_dir, collection_cache=collection_cache) async def base_downloader(version): return await get_ansible_base(aio_session, version, tmp_dir) requestors = [ await pool.spawn(collector.download(downloader)) for collector in collectors ] requestors.append( await pool.spawn(base_collector.download_changelog(base_downloader))) requestors.append( await pool.spawn(base_collector.download_porting_guide(aio_session))) await asyncio.gather(*requestors)
async def output_collection_namespace_indexes( collection_namespaces: t.Mapping[str, t.List[str]], dest_dir: str, breadcrumbs: bool = True) -> None: """ Generate collection namespace index pages for the collections. :arg collection_namespaces: Mapping from collection namespaces to list of collection names. :arg dest_dir: The directory to place the documentation in. :kwarg breadcrumbs: Default True. Set to False if breadcrumbs for collections should be disabled. This will disable breadcrumbs but save on memory usage. """ flog = mlog.fields(func='output_collection_namespace_indexes') flog.debug('Enter') env = doc_environment(('antsibull.data', 'docsite')) # Get the templates collection_list_tmpl = env.get_template( 'list_of_collections_by_namespace.rst.j2') writers = [] lib_ctx = app_context.lib_ctx.get() async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: for namespace, collection_names in collection_namespaces.items(): namespace_dir = os.path.join(dest_dir, 'collections', namespace) # This is only safe because we made sure that the top of the directory tree we're # writing to (docs/docsite/rst) is only writable by us. os.makedirs(namespace_dir, mode=0o755, exist_ok=True) writers.append(await pool.spawn( write_collection_namespace_index(namespace, collection_names, collection_list_tmpl, namespace_dir, breadcrumbs=breadcrumbs))) await asyncio.gather(*writers) flog.debug('Leave')
async def load_all_collection_routing( collection_metadata: t.Mapping[str, AnsibleCollectionMetadata] ) -> t.Dict[str, t.Dict[str, t.Dict[str, t.Any]]]: # Collection lib_ctx = app_context.lib_ctx.get() async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: requestors = [] for collection, metadata in collection_metadata.items(): requestors.append(await pool.spawn( load_collection_routing(collection, metadata))) responses = await asyncio.gather(*requestors) # Merge per-collection routing into one big routing table global_plugin_routing: t.Dict[str, t.Dict[str, t.Dict[str, t.Any]]] = {} for plugin_type in DOCUMENTABLE_PLUGINS: global_plugin_routing[plugin_type] = {} for collection_plugin_routing in responses: global_plugin_routing[plugin_type].update( collection_plugin_routing[plugin_type]) return global_plugin_routing
async def output_plugin_indexes(plugin_info: PluginCollectionInfoT, dest_dir: str) -> None: """ Generate top-level plugin index pages for all plugins of a type in all collections. :arg plugin_info: Mapping of plugin_type to Mapping of collection_name to Mapping of plugin_name to short_description. :arg dest_dir: The directory to place the documentation in. """ flog = mlog.fields(func='output_plugin_indexes') flog.debug('Enter') env = doc_environment(('antsibull.data', 'docsite')) # Get the templates plugin_list_tmpl = env.get_template('list_of_plugins.rst.j2') collection_toplevel = os.path.join(dest_dir, 'collections') flog.fields(toplevel=collection_toplevel, exists=os.path.isdir(collection_toplevel)).debug( 'collection_toplevel exists?') # This is only safe because we made sure that the top of the directory tree we're writing to # (docs/docsite/rst) is only writable by us. os.makedirs(collection_toplevel, mode=0o755, exist_ok=True) writers = [] lib_ctx = app_context.lib_ctx.get() async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: for plugin_type, per_collection_data in plugin_info.items(): filename = os.path.join( collection_toplevel, 'index_{type}.rst'.format(type=plugin_type)) writers.append(await pool.spawn( write_plugin_type_index(plugin_type, per_collection_data, plugin_list_tmpl, filename))) await asyncio.gather(*writers) flog.debug('Leave')