def generate_recipes(app): """ Go through every folder in the `bioconda-recipes/recipes` dir, have a README.rst file generated and generate a recipes.rst from the collected data. """ renderer = Renderer(app) load_config(os.path.join(os.path.dirname(RECIPE_DIR), "config.yml")) repodata = RepoData() repodata.set_cache(op.join(app.env.doctreedir, 'RepoDataCache.csv')) # force loading repodata to avoid duplicate loads from threads repodata.df # pylint: disable=pointless-statement recipes: List[Dict[str, Any]] = [] recipe_dirs = os.listdir(RECIPE_DIR) if parallel_available and len(recipe_dirs) > 5: nproc = app.parallel else: nproc = 1 if nproc == 1: for folder in status_iterator(recipe_dirs, 'Generating package READMEs...', "purple", len(recipe_dirs), app.verbosity): if not op.isdir(op.join(RECIPE_DIR, folder)): logger.error("Item '%s' in recipes folder is not a folder", folder) continue recipes.extend(generate_readme(folder, repodata, renderer)) else: tasks = ParallelTasks(nproc) chunks = make_chunks(recipe_dirs, nproc) def process_chunk(chunk): _recipes: List[Dict[str, Any]] = [] for folder in chunk: if not op.isdir(op.join(RECIPE_DIR, folder)): logger.error("Item '%s' in recipes folder is not a folder", folder) continue _recipes.extend(generate_readme(folder, repodata, renderer)) return _recipes def merge_chunk(_chunk, res): recipes.extend(res) for chunk in status_iterator( chunks, 'Generating package READMEs with {} threads...'.format(nproc), "purple", len(chunks), app.verbosity): tasks.add_task(process_chunk, chunk, merge_chunk) logger.info("waiting for workers...") tasks.join()
def printRootNodes(config_path, recipe_folder, sinceNDays, missing, rootNodes): config = utils.load_config(config_path) blacklist = utils.get_blacklist(config, recipe_folder) recipes = utils.get_recipes(recipe_folder) if sinceNDays: timeStamp = datetime.timestamp(datetime.now() - timedelta(sinceNDays)) linux, noarch, osx = getRepoData(timeStamp) arch = linux.intersection(osx) ready = noarch.union(arch) print( "{} built in noarch and both archs combined: {} noarch, {} linux-64, {} osx-64" .format(len(ready), len(noarch), len(linux), len(osx))) dag, name2recipes = graph.build(recipes, config=config_path, blacklist=blacklist) if not rootNodes: root_nodes = sorted([ (len(nx.algorithms.descendants(dag, k)), k) for k, v in dag.in_degree().items() if (k.startswith('bioconductor') or k.startswith('r-')) ]) else: root_nodes = sorted([ (len(nx.algorithms.descendants(dag, k)), k) for k, v in dag.in_degree().items() if v == 0 and (k.startswith('bioconductor') or k.startswith('r-')) ]) print("Package\tNumber of dependant packages") for n in root_nodes: # blacklisted packages also show up as root nodes with out degree 0 if n[1] in blacklist: continue if sinceNDays: if n[1] in ready: if not missing: print("recipes/{}\t{}".format(n[1], n[0])) elif missing: print("recipes/{}\t{}".format(n[1], n[0])) else: print("recipes/{}\t{}".format(n[1], n[0]))
def printMissingCRAN(config_path, recipe_folder): config = utils.load_config(config_path) recipes = utils.get_recipes(recipe_folder) repo = getRepoData() # Construct a set of all dependencies (ignoring versions) dependencies = set() for r in recipes: if "bioconductor" not in r: continue d = utils.load_meta_fast(r)[ 0] # a dictionary with keys requirements, build, etc. for dep in d['requirements']['run']: if dep.startswith('r-'): dependencies.add(dep.split(' ')[0]) missing = dependencies - repo print("Missing {} packages!".format(len(missing))) for m in missing: print(m)
# If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { 'python': ('https://docs.python.org/3', None), 'conda.io': ('https://conda.io/en/latest', None), 'conda-build': ('https://conda.io/projects/conda-build/en/latest/', None), 'conda': ('https://conda.io/projects/conda/en/latest/', None), } # We are using the `extlinks` extension to render links for identifiers: extlinks = { 'biotools': ('https://bio.tools/%s', ''), 'doi': ('https://doi.org/%s', ''), } # autogenerate autodoc stubs via autosummary autosummary_generate = True # placate assertion in utils.RepoData() from bioconda_utils import utils utils.load_config('../bioconda-recipes/config.yml')
def generate_recipes(app): """Generates recipe RST files - Checks out repository - Prepares `RepoData` - Selects recipes (if `BIOCONDA_FILTER_RECIPES` in environment) - Dispatches calls to `generate_readme` for each recipe - Removes old RST files """ source_dir = app.env.srcdir doctree_dir = app.env.doctreedir # .../build/doctrees repo_dir = op.join(op.dirname(app.env.srcdir), "_bioconda_recipes") recipe_basedir = op.join(repo_dir, app.config.bioconda_recipes_path) repodata_cache_file = op.join(doctree_dir, 'RepoDataCache.pkl') repo_config_file = os.path.join(repo_dir, app.config.bioconda_config_file) output_dir = op.join(source_dir, 'recipes') # Initialize Repo and point globals at the right place repo = BiocondaRepo(folder=repo_dir, home=app.config.bioconda_repo_url) repo.checkout_master() load_config(repo_config_file) logger.info("Preloading RepoData") repodata = RepoData() repodata.set_cache(repodata_cache_file) repodata.df # pylint: disable=pointless-statement logger.info("Preloading RepoData (done)") # Collect recipe names recipe_dirs = os.listdir(recipe_basedir) if 'BIOCONDA_FILTER_RECIPES' in os.environ: limiter = os.environ['BIOCONDA_FILTER_RECIPES'] try: recipe_dirs = recipe_dirs[:int(limiter)] except ValueError: match = re.compile(limiter) recipe_dirs = [ recipe for recipe in recipe_dirs if match.search(recipe) ] # Set up renderer preparing recipe readme.rst files recipe_base_url = "{base}/tree/master/{recipes}/".format( base=app.config.bioconda_repo_url.rstrip(".git"), recipes=app.config.bioconda_recipes_path) renderer = Renderer(app, {'gh_recipes': recipe_base_url}) recipes: List[str] = [] if parallel_available and len(recipe_dirs) > 5: nproc = app.parallel else: nproc = 1 if nproc == 1: for folder in status_iterator(recipe_dirs, 'Generating package READMEs...', "purple", len(recipe_dirs), app.verbosity): if not op.isdir(op.join(recipe_basedir, folder)): logger.error("Item '%s' in recipes folder is not a folder", folder) continue recipes.extend( generate_readme(recipe_basedir, output_dir, folder, repodata, renderer)) else: tasks = ParallelTasks(nproc) chunks = make_chunks(recipe_dirs, nproc) def process_chunk(chunk): _recipes: List[Dict[str, Any]] = [] for folder in chunk: if not op.isdir(op.join(recipe_basedir, folder)): logger.error("Item '%s' in recipes folder is not a folder", folder) continue _recipes.extend( generate_readme(recipe_basedir, output_dir, folder, repodata, renderer)) return _recipes def merge_chunk(_chunk, res): recipes.extend(res) for chunk in status_iterator( chunks, 'Generating package READMEs with {} threads...'.format(nproc), "purple", len(chunks), app.verbosity): tasks.add_task(process_chunk, chunk, merge_chunk) logger.info("waiting for workers...") tasks.join() files_wanted = set(recipes) for root, dirs, files in os.walk(output_dir, topdown=False): for fname in files: path = op.join(root, fname) if path not in files_wanted: os.unlink(path) for dname in dirs: try: os.rmdir(op.join(root, dname)) except OSError: pass
def config_fixture(): config = utils.load_config( os.path.join(os.path.dirname(__file__), "test-config.yaml")) yield config
def generate_recipes(app): """Generates recipe RST files - Checks out repository - Prepares `RepoData` - Selects recipes (if BIOCONDA_FILTER_RECIPES in environment) - Dispatches calls to `generate_readme` for each recipe - Removes old RST files """ source_dir = app.env.srcdir doctree_dir = app.env.doctreedir # .../build/doctrees repo_dir = op.join(op.dirname(app.env.srcdir), "_bioconda_recipes") recipe_basedir = op.join(repo_dir, app.config.bioconda_recipes_path) repodata_cache_file = op.join(doctree_dir, 'RepoDataCache.pkl') repo_config_file = os.path.join(repo_dir, app.config.bioconda_config_file) output_dir = op.join(source_dir, 'recipes') # Initialize Repo and point globals at the right place repo = BiocondaRepo(folder=repo_dir, home=app.config.bioconda_repo_url) repo.checkout_master() load_config(repo_config_file) logger.info("Preloading RepoData") repodata = RepoData() repodata.set_cache(repodata_cache_file) repodata.df # pylint: disable=pointless-statement logger.info("Preloading RepoData (done)") # Collect recipe names recipe_dirs = os.listdir(recipe_basedir) if 'BIOCONDA_FILTER_RECIPES' in os.environ: limiter = os.environ['BIOCONDA_FILTER_RECIPES'] try: recipe_dirs = recipe_dirs[:int(limiter)] except ValueError: match = re.compile(limiter) recipe_dirs = [recipe for recipe in recipe_dirs if match.search(recipe)] # Set up renderer preparing recipe readme.rst files recipe_base_url = "{base}/tree/master/{recipes}/".format( base=app.config.bioconda_repo_url.rstrip(".git"), recipes=app.config.bioconda_recipes_path ) renderer = Renderer(app, {'gh_recipes': recipe_base_url}) recipes: List[str] = [] if parallel_available and len(recipe_dirs) > 5: nproc = app.parallel else: nproc = 1 if nproc == 1: for folder in status_iterator( recipe_dirs, 'Generating package READMEs...', "purple", len(recipe_dirs), app.verbosity): if not op.isdir(op.join(recipe_basedir, folder)): logger.error("Item '%s' in recipes folder is not a folder", folder) continue recipes.extend(generate_readme(recipe_basedir, output_dir, folder, repodata, renderer)) else: tasks = ParallelTasks(nproc) chunks = make_chunks(recipe_dirs, nproc) def process_chunk(chunk): _recipes: List[Dict[str, Any]] = [] for folder in chunk: if not op.isdir(op.join(recipe_basedir, folder)): logger.error("Item '%s' in recipes folder is not a folder", folder) continue _recipes.extend(generate_readme(recipe_basedir, output_dir, folder, repodata, renderer)) return _recipes def merge_chunk(_chunk, res): recipes.extend(res) for chunk in status_iterator( chunks, 'Generating package READMEs with {} threads...'.format(nproc), "purple", len(chunks), app.verbosity): tasks.add_task(process_chunk, chunk, merge_chunk) logger.info("waiting for workers...") tasks.join() files_wanted = set(recipes) for root, dirs, files in os.walk(output_dir, topdown=False): for fname in files: path = op.join(root, fname) if path not in files_wanted: os.unlink(path) for dname in dirs: try: os.rmdir(op.join(root, dname)) except OSError: pass
def linter(config_file, recipes_folder): """Prepares a linter given config_folder and recipes_folder""" config = utils.load_config(config_file) yield lint.Linter(config, recipes_folder, nocatch=True)
def generate_recipes(app): """ Go through every folder in the `ggd-recipes/recipes` dir, have a README.rst file generated and generate a recipes.rst from the collected data. """ renderer = Renderer(app) load_config(os.path.join(os.path.dirname(RECIPE_DIR), "config.yaml")) repodata = RepoData() # Add ggd channels to repodata object #repodata.channels = ['ggd-genomics', 'conda-forge', 'bioconda', 'defaults'] recipes = [] ## Get each folder that contains a meat.yaml file recipe_dirs = [] for root, dirs, files in os.walk(RECIPE_DIR): if "meta.yaml" in files: recipe_dirs.append(root) if parallel_available and len(recipe_dirs) > 5: nproc = app.parallel else: nproc = 1 if nproc == 1: for folder in status_iterator( recipe_dirs, 'Generating package READMEs...', "purple", len(recipe_dirs), app.verbosity): recipes.extend(generate_readme(folder, repodata, renderer)) else: tasks = ParallelTasks(nproc) chunks = make_chunks(recipe_dirs, nproc) def process_chunk(chunk): _recipes = [] for folder in chunk: _recipes.extend(generate_readme(folder, repodata, renderer)) return _recipes def merge_chunk(chunk, res): recipes.extend(res) for chunk in status_iterator( chunks, 'Generating package READMEs with {} threads...'.format(nproc), "purple", len(chunks), app.verbosity): tasks.add_task(process_chunk, chunk, merge_chunk) logger.info("waiting for workers...") tasks.join() updated = renderer.render_to_file("source/recipes.rst", "recipes.rst_t", { 'recipes': recipes, # order of columns in the table; must be keys in template_options 'keys': ['Package', 'Version', 'Linux', 'OSX', 'NOARCH'], 'noarch_symbol': '<i class="fa fa-desktop"></i>', 'linux_symbol': '<i class="fa fa-linux"></i>', 'osx_symbol': '<i class="fa fa-apple"></i>', 'dot_symbol': '<i class="fa fa-dot-circle-o"></i>' }) if updated: logger.info("Updated source/recipes.rst")