def buildDAG(args, modified_files, formula_dir): oDag = dag.DAG() dMap = {} common_names = set([]) for directory, d_list, f_list in os.walk(formula_dir): if 'meta.yaml' in f_list: meta = MetaData(os.path.join(directory, 'meta.yaml')) reqs = meta.meta['requirements'] combined_deps = set(reqs.get('build', '')).union(reqs.get('run', '')) common_names.add(meta.name()) dMap[meta.meta_path] = (meta.name(), combined_deps, meta) # Populate DAG [oDag.add_node(x) for x in dMap.keys()] # Create edges for ind_node, name, dependencies, meta, dag_node in _walkMapAndDag(dMap, oDag): controlled_dependencies = set(dependencies).intersection(common_names) if dMap[dag_node][0] in controlled_dependencies: oDag.add_edge(dag_node, ind_node) # Remove edges (skips, unmodified recipes, etc) for ind_node, name, dependencies, meta, dag_node in _walkMapAndDag(dMap, oDag): controlled_dependencies = set(dependencies).intersection(common_names) if ind_node not in modified_files and controlled_dependencies and args.dependencies: continue elif ind_node not in modified_files: oDag.delete_node_if_exists(ind_node) return oDag
def have_variant_but_for_python(meta: MetaData) -> bool: """Checks if we have an exact or ``py[23]_`` prefixed match to name/version/buildstring Ignores osx. Args: meta: Variant MetaData object Returns: True if FIXME """ def strip_py(build): if build.startswith("py"): return build[4:] return build builds = RepoData().get_package_data('build', name=meta.name(), version=meta.version(), platform=['linux', 'noarch']) res = [ build for build in builds if strip_py(build) == strip_py(meta.build_id()) ] if res: logger.debug("Package %s=%s has %s (want %s)", meta.name(), meta.version(), res, meta.build_id()) return bool(res)
def __call__(self, args): # check some error conditions if args.recipe_directory and not os.path.isdir(args.recipe_directory): raise IOError( "The source recipe directory should be the directory of the " "conda-recipe you want to build a feedstock for. Got {}". format(args.recipe_directory)) # Get some information about the source recipe. if args.recipe_directory: meta = MetaData(args.recipe_directory) else: meta = None feedstock_directory = args.feedstock_directory.format( package=argparse.Namespace(name=meta.name())) msg = 'Initial commit of the {} feedstock.'.format(meta.name()) try: generate_feedstock_content(feedstock_directory, args.recipe_directory, meta) if not args.no_git_repo: create_git_repo(feedstock_directory, msg) print( "\nRepository created, please edit conda-forge.yml to configure the upload channels\n" "and afterwards call 'conda smithy register-github'") except RuntimeError as e: print(e)
def setup(*args): """ Go through every folder in the `bioconda-recipes/recipes` dir and generate a README.rst file. """ print('Generating package READMEs...') summaries = [] for folder in os.listdir(RECIPE_DIR): # Subfolders correspond to different versions versions = [] for sf in os.listdir(op.join(RECIPE_DIR, folder)): if not op.isdir(op.join(RECIPE_DIR, folder, sf)): # Not a folder continue try: LooseVersion(sf) except ValueError: print("'{}' does not look like a proper version!".format(sf)) continue versions.append(sf) versions.sort(key=LooseVersion, reverse=True) # Read the meta.yaml file try: metadata = MetaData(op.join(RECIPE_DIR, folder)) if metadata.version() not in versions: versions.insert(0, metadata.version()) except SystemExit: if versions: metadata = MetaData(op.join(RECIPE_DIR, folder, versions[0])) else: # ignore non-recipe folders continue # Format the README notes = metadata.get_section('extra').get('notes', '') if notes: notes = 'Notes\n-----\n\n' + notes summary = metadata.get_section('about').get('summary', '') summaries.append(summary) template_options = { 'title': metadata.name(), 'title_underline': '=' * len(metadata.name()), 'summary': summary, 'home': metadata.get_section('about').get('home', ''), 'versions': ', '.join(versions), 'license': metadata.get_section('about').get('license', ''), 'recipe': ('https://github.com/bioconda/bioconda-recipes/tree/master/recipes/' + op.dirname(op.relpath(metadata.meta_path, RECIPE_DIR))), 'notes': notes } readme = README_TEMPLATE.format(**template_options) # Write to file try: os.makedirs(op.join(OUTPUT_DIR, folder)) # exist_ok=True on Python 3 except OSError: pass output_file = op.join(OUTPUT_DIR, folder, 'README.rst') with open(output_file, 'wb') as ofh: ofh.write(readme.encode('utf-8'))
def setup(*args): """ Go through every folder in the `bioconda-recipes/recipes` dir and generate a README.rst file. """ print('Generating package READMEs...') summaries = [] for folder in os.listdir(RECIPE_DIR): # Subfolders correspond to different versions versions = [] for sf in os.listdir(op.join(RECIPE_DIR, folder)): if not op.isdir(op.join(RECIPE_DIR, folder, sf)): # Not a folder continue try: LooseVersion(sf) except ValueError: print("'{}' does not look like a proper version!".format(sf)) continue versions.append(sf) versions.sort(key=LooseVersion, reverse=True) # Read the meta.yaml file try: metadata = MetaData(op.join(RECIPE_DIR, folder)) if metadata.version() not in versions: versions.insert(0, metadata.version()) except SystemExit: if versions: metadata = MetaData(op.join(RECIPE_DIR, folder, versions[0])) else: # ignore non-recipe folders continue # Format the README notes = metadata.get_section('extra').get('notes', '') if notes: notes = 'Notes\n-----\n\n' + notes summary = metadata.get_section('about').get('summary', '') summaries.append(summary) template_options = { 'title': metadata.name(), 'title_underline': '=' * len(metadata.name()), 'summary': summary, 'home': metadata.get_section('about').get('home', ''), 'versions': ', '.join(versions), 'license': metadata.get_section('about').get('license', ''), 'recipe': ('https://github.com/bioconda/bioconda-recipes/tree/master/recipes/' + op.dirname(op.relpath(metadata.meta_path, RECIPE_DIR))), 'notes': notes } readme = README_TEMPLATE.format(**template_options) # Write to file try: os.makedirs(op.join(OUTPUT_DIR, folder)) # exist_ok=True on Python 3 except OSError: pass output_file = op.join(OUTPUT_DIR, folder, 'README.rst') with open(output_file, 'wb') as ofh: ofh.write(readme.encode('utf-8'))
def __call__(self, args): # check some error conditions if args.recipe_directory and not os.path.isdir(args.recipe_directory): raise IOError( "The source recipe directory should be the directory of the " "conda-recipe you want to build a feedstock for. Got {}". format(args.recipe_directory)) # Get some information about the source recipe. if args.recipe_directory: meta = MetaData(args.recipe_directory) else: meta = None feedstock_directory = args.feedstock_directory.format( package=argparse.Namespace(name=meta.name())) msg = "Initial feedstock commit with conda-smithy {}.".format( __version__) os.makedirs(feedstock_directory) subprocess.check_call(["git", "init"], cwd=feedstock_directory) generate_feedstock_content(feedstock_directory, args.recipe_directory) subprocess.check_call(["git", "commit", "-m", msg], cwd=feedstock_directory) print( "\nRepository created, please edit conda-forge.yml to configure the upload channels\n" "and afterwards call 'conda smithy register-github'")
def read_recipe_name_version_build(meta_yaml_path): """ Read the given metadata file and return (package_name, version, build_number) meta_yaml_path: May be a path to a meta.yaml file or it's parent recipe directory. """ # Provide these default values, otherwise conda-build will # choke on jinja templates that reference them. # This will be fixed when they finally merge conda-build PR#662 and PR#666 if "CONDA_NPY" not in os.environ: os.environ["CONDA_NPY"] = '19' if "CONDA_PY" not in os.environ: os.environ["CONDA_PY"] = '27' os.environ["GIT_FULL_HASH"] = "9999999" if os.path.isdir(meta_yaml_path): recipe_dir = meta_yaml_path else: recipe_dir = os.path.split(meta_yaml_path)[0] try: metadata = MetaData(recipe_dir) return (metadata.name(), metadata.version(), metadata.build_number()) except SystemExit as ex: raise Exception(*ex.args)
def render_recipe(recipe_path, config, no_download_source=False, variants=None, permit_unsatisfiable_variants=True, reset_build_id=True, expand_output=False): arg = recipe_path # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True elif arg.endswith('.yaml'): recipe_dir = os.path.dirname(arg) need_cleanup = False else: print("Ignoring non-recipe: %s" % arg) return None, None else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir, config=config) except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) if config.set_build_id: m.config.compute_build_id(m.name(), reset=reset_build_id) if m.needs_source_for_render and (not os.path.isdir(m.config.work_dir) or len(os.listdir(m.config.work_dir)) == 0): try_download(m, no_download_source=no_download_source) rendered_metadata = {} if m.final: rendered_metadata = [(m, False, False), ] index = None else: variants = (dict_of_lists_to_list_of_dicts(variants, m.config.platform) if variants else get_package_variants(m, m.config)) index = get_build_index(m.config, m.config.build_subdir) rendered_metadata = distribute_variants(m, variants, index, permit_unsatisfiable_variants=permit_unsatisfiable_variants) if not rendered_metadata: raise ValueError("No variants were satisfiable - no valid recipes could be rendered.") if expand_output: rendered_metadata = expand_outputs(rendered_metadata, index) if need_cleanup: utils.rm_rf(recipe_dir) return rendered_metadata, index
def __call__(self, args): # check some error conditions if args.recipe_directory and not os.path.isdir(args.recipe_directory): raise IOError( "The source recipe directory should be the directory of the " "conda-recipe you want to build a feedstock for. Got {}".format( args.recipe_directory ) ) # Get some information about the source recipe. if args.recipe_directory: meta = MetaData(args.recipe_directory) else: meta = None feedstock_directory = args.feedstock_directory.format( package=argparse.Namespace(name=meta.name()) ) msg = "Initial feedstock commit with conda-smithy {}.".format( __version__ ) os.makedirs(feedstock_directory) subprocess.check_call(["git", "init"], cwd=feedstock_directory) generate_feedstock_content(feedstock_directory, args.recipe_directory) subprocess.check_call( ["git", "commit", "-m", msg], cwd=feedstock_directory ) print( "\nRepository created, please edit conda-forge.yml to configure the upload channels\n" "and afterwards call 'conda smithy register-github'" )
def collapse_subpackage_nodes(graph): """Collapse all subpackage nodes into their parent recipe node We get one node per output, but a given recipe can have multiple outputs. It's important for dependency ordering in the graph that the outputs exist independently, but once those dependencies are established, we need to collapse subpackages down to a single job for the top-level recipe.""" # group nodes by their recipe path first, then within those groups by their variant node_groups = {} for node in graph.nodes(): if 'meta' in graph.node[node]: meta = graph.node[node]['meta'] meta_path = meta.meta_path or meta.meta['extra']['parent_recipe']['path'] master = False master_meta = MetaData(meta_path, config=meta.config) if master_meta.name() == meta.name(): master = True group = node_groups.get(meta_path, {}) subgroup = group.get(HashableDict(meta.config.variant), {}) if master: if 'master' in subgroup: raise ValueError("tried to set more than one node in a group as master") subgroup['master'] = node else: sps = subgroup.get('subpackages', []) sps.append(node) subgroup['subpackages'] = sps group[HashableDict(meta.config.variant)] = subgroup node_groups[meta_path] = group for recipe_path, group in node_groups.items(): for variant, subgroup in group.items(): # if no node is the top-level recipe (only outputs, no top-level output), need to obtain # package/name from recipe given by common recipe path. subpackages = subgroup.get('subpackages') if 'master' not in subgroup: sp0 = graph.node[subpackages[0]] master_meta = MetaData(recipe_path, config=sp0['meta'].config) worker = sp0['worker'] master_key = package_key(master_meta, worker['label']) graph.add_node(master_key, meta=master_meta, worker=worker) master = graph.node[master_key] else: master = subgroup['master'] master_key = package_key(graph.node[master]['meta'], graph.node[master]['worker']['label']) # fold in dependencies for all of the other subpackages within a group. This is just # the intersection of the edges between all nodes. Store this on the "master" node. if subpackages: remap_edges = [edge for edge in graph.edges() if edge[1] in subpackages] for edge in remap_edges: # make sure not to add references to yourself if edge[0] != master_key: graph.add_edge(edge[0], master_key) graph.remove_edge(*edge) # remove nodes that have been folded into master nodes for subnode in subpackages: graph.remove_node(subnode)
def collapse_subpackage_nodes(graph): """Collapse all subpackage nodes into their parent recipe node We get one node per output, but a given recipe can have multiple outputs. It's important for dependency ordering in the graph that the outputs exist independently, but once those dependencies are established, we need to collapse subpackages down to a single job for the top-level recipe.""" # group nodes by their recipe path first, then within those groups by their variant node_groups = {} for node in graph.nodes(): if 'meta' in graph.node[node]: meta = graph.node[node]['meta'] meta_path = meta.meta_path or meta.meta['extra']['parent_recipe']['path'] master = False master_meta = MetaData(meta_path, config=meta.config) if master_meta.name() == meta.name(): master = True group = node_groups.get(meta_path, {}) subgroup = group.get(HashableDict(meta.config.variant), {}) if master: if 'master' in subgroup: raise ValueError("tried to set more than one node in a group as master") subgroup['master'] = node else: sps = subgroup.get('subpackages', []) sps.append(node) subgroup['subpackages'] = sps group[HashableDict(meta.config.variant)] = subgroup node_groups[meta_path] = group for recipe_path, group in node_groups.items(): for variant, subgroup in group.items(): # if no node is the top-level recipe (only outputs, no top-level output), need to obtain # package/name from recipe given by common recipe path. subpackages = subgroup.get('subpackages') if 'master' not in subgroup: sp0 = graph.node[subpackages[0]] master_meta = MetaData(recipe_path, config=sp0['meta'].config) worker = sp0['worker'] master_key = package_key(master_meta, worker['label']) graph.add_node(master_key, meta=master_meta, worker=worker) master = graph.node[master_key] else: master = subgroup['master'] master_key = package_key(graph.node[master]['meta'], graph.node[master]['worker']['label']) # fold in dependencies for all of the other subpackages within a group. This is just # the intersection of the edges between all nodes. Store this on the "master" node. if subpackages: remap_edges = [edge for edge in graph.edges() if edge[1] in subpackages] for edge in remap_edges: # make sure not to add references to yourself if edge[0] != master_key: graph.add_edge(edge[0], master_key) graph.remove_edge(*edge) # remove nodes that have been folded into master nodes for subnode in subpackages: graph.remove_node(subnode)
def __call__(self, args): # check some error conditions if args.recipe_directory and not os.path.isdir(args.recipe_directory): raise IOError("The source recipe directory should be the directory of the " "conda-recipe you want to build a feedstock for. Got {}".format( args.recipe_directory)) # Get some information about the source recipe. if args.recipe_directory: meta = MetaData(args.recipe_directory) else: meta = None feedstock_directory = args.feedstock_directory.format(package=argparse.Namespace(name=meta.name())) msg = 'Initial commit of the {} feedstock.'.format(meta.name()) try: generate_feedstock_content(feedstock_directory, args.recipe_directory, meta) if not args.no_git_repo: create_git_repo(feedstock_directory, msg) print("\nRepository created, please edit conda-forge.yml to configure the upload channels\n" "and afterwards call 'conda smithy register-github'") except RuntimeError as e: print(e)
def read_recipe_name_version_build(meta_yaml_path): """ Read the given metadata file and return (package_name, version, build_number) meta_yaml_path: May be a path to a meta.yaml file or it's parent recipe directory. """ # Provide these default values, otherwise conda-build will # choke on jinja templates that reference them. # This will be fixed when they finally merge conda-build PR#662 and PR#666 if "CONDA_NPY" not in os.environ: os.environ["CONDA_NPY"] = '19' if "CONDA_PY" not in os.environ: os.environ["CONDA_PY"] = '27' os.environ["GIT_FULL_HASH"] = "9999999" if os.path.isdir(meta_yaml_path): recipe_dir = meta_yaml_path else: recipe_dir = os.path.split(meta_yaml_path)[0] try: metadata = MetaData(recipe_dir) return (metadata.name(), metadata.version(), metadata.build_number()) except SystemExit as ex: raise Exception(*ex.args)
def have_variant(meta: MetaData) -> bool: """Checks if we have an exact match to name/version/buildstring Args: meta: Variant MetaData object Returns: True if the variant's build string exists already in the repodata """ res = RepoData().get_package_data(name=meta.name(), version=meta.version(), build=meta.build_id(), platform=['linux', 'noarch']) if res: logger.debug("Package %s=%s=%s exists", meta.name(), meta.version(), meta.build_id()) return res
def __call__(self, args): if not os.path.isdir(args.recipe_directory): raise IOError( "The recipe directory should be the directory of the conda-recipe. Got {}" .format(args.recipe_directory)) meta = MetaData(args.recipe_directory) feedstock_directory = args.feedstock_directory.format( package=argparse.Namespace(name=meta.name())) generate_feedstock_content(feedstock_directory, args.recipe_directory) if not args.no_git_repo: create_git_repo(feedstock_directory, meta)
def have_noarch_python_build_number(meta: MetaData) -> bool: """Checks if we have a noarch:python build with same version+build_number Args: meta: Variant MetaData object Returns: True if noarch:python and version+build_number exists already in repodata """ if meta.get_value('build/noarch') != 'python': return False res = RepoData().get_package_data( name=meta.name(), version=meta.version(), build_number=meta.build_number(), platform=['noarch'], ) if res: logger.debug("Package %s=%s[build_number=%s, subdir=noarch] exists", meta.name(), meta.version(), meta.build_number()) return res
def will_build_variant(meta: MetaData) -> bool: """Check if the recipe variant will be built as currently rendered Args: meta: Variant MetaData object Returns: True if all extant build numbers are smaller than the one indicated by the variant MetaData. """ build_numbers = RepoData().get_package_data( 'build_number', name=meta.name(), version=meta.version(), platform=['linux', 'noarch'], ) current_num = int(meta.build_number()) res = all(num < current_num for num in build_numbers) if res: logger.debug("Package %s=%s will be built already because %s < %s)", meta.name(), meta.version(), max(build_numbers) if build_numbers else "N/A", meta.build_number()) return res
def __add_metadata(self, recipe_file: str, metadata: MetaData): pkg_name = metadata.name() pkg_version = metadata.version() effective_pkg_name = pkg_name if metadata.is_output: # i.e. it's a sub-package toplevel = metadata.get_top_level_recipe_without_outputs() parent_pkg_name = toplevel[PACKAGE][NAME] # Map sub-package to parent self.__add_sub_package((parent_pkg_name, pkg_version), pkg_name) # We want to record subsequent details as if they apply to # the parent package because that's the package we'll actually # build. Sub-packages are built as a consequence of that. The # MetaData.ms_depends() method doesn't return a complete set of # requirements (that I can see). effective_pkg_name = parent_pkg_name # The MetaData objects for some recipes don't appear to include # toplevel requirements (needed to build the sub-package), # so we manually dig into the parsed Dict here. if REQUIREMENTS in toplevel: # We could also look at requirements[BUILD] if HOST in toplevel[REQUIREMENTS]: nv = (effective_pkg_name, pkg_version) for spec in toplevel[REQUIREMENTS][HOST]: m = MatchSpec(spec) self.__add_package_requirement(nv, (m.name, m.version)) nv = (effective_pkg_name, pkg_version) self.__add_package_recipe(nv, recipe_file) self.__add_package_version(nv) for dep in metadata.ms_depends(HOST): self.__add_package_requirement(nv, (dep.name, dep.version)) return
def render_recipe(recipe_path, config, no_download_source=False, variants=None, permit_unsatisfiable_variants=True, reset_build_id=True, bypass_env_check=False): """Returns a list of tuples, each consisting of (metadata-object, needs_download, needs_render_in_env) You get one tuple per variant. Outputs are not factored in here (subpackages won't affect these results returned here.) """ arg = recipe_path # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True elif arg.endswith('.yaml'): recipe_dir = os.path.dirname(arg) need_cleanup = False else: print("Ignoring non-recipe: %s" % arg) return None, None else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir, config=config) except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) rendered_metadata = {} # important: set build id *before* downloading source. Otherwise source goes into a different # build folder. if config.set_build_id: m.config.compute_build_id(m.name(), m.version(), reset=reset_build_id) # this source may go into a folder that doesn't match the eventual build folder. # There's no way around it AFAICT. We must download the source to be able to render # the recipe (from anything like GIT_FULL_HASH), but we can't know the final build # folder until rendering is complete, because package names can have variant jinja2 in them. if m.needs_source_for_render and not m.source_provided: try_download(m, no_download_source=no_download_source) if m.final: if not hasattr(m.config, 'variants') or not m.config.variant: m.config.ignore_system_variants = True if os.path.isfile(os.path.join(m.path, 'conda_build_config.yaml')): m.config.variant_config_files = [ os.path.join(m.path, 'conda_build_config.yaml') ] m.config.variants = get_package_variants(m, variants=variants) m.config.variant = m.config.variants[0] rendered_metadata = [ (m, False, False), ] else: # merge any passed-in variants with any files found variants = get_package_variants(m, variants=variants) # when building, we don't want to fully expand all outputs into metadata, only expand # whatever variants we have (i.e. expand top-level variants, not output-only variants) rendered_metadata = distribute_variants( m, variants, permit_unsatisfiable_variants=permit_unsatisfiable_variants, allow_no_other_outputs=True, bypass_env_check=bypass_env_check) if need_cleanup: utils.rm_rf(recipe_dir) return rendered_metadata
def main(): print() print("Getting extra source packages.") # Force verbose mode config.verbose = True cwd = os.getcwd() # Get the metadata for the recipe recipe_dir = os.environ["RECIPE_DIR"] metadata = MetaData(recipe_dir) print(metadata.name()) print("-" * 75) print(' cwd:', cwd) # Figure out the work_dir # Look upwards for a directory with the name 'work'. # FIXME: Why does metadata.config.work_dir not return the correct # directory? bits = split_path(cwd) dirname = [] while bits and bits[-1] != 'work': dirname.insert(0, bits.pop(-1)) dirname = os.path.join(*dirname, '') work_dir = bits.pop(-1) assert work_dir == 'work' build_id = bits.pop(-1) croot = os.path.join(*bits) work_dir = os.path.join(croot, build_id, 'work') if has_only_one_dir(work_dir): real_work_dir = work_dir else: real_work_dir = os.path.join(croot, build_id) print(' work dir:', real_work_dir) print('conda root:', croot) print(' build id:', build_id) print(' src dir:', dirname) extra_sources_sections = metadata.get_section('extra')['sources'] for name, source_section in extra_sources_sections.items(): print() print("Extra source: %s" % name) print("-" * 75) # Create a fake metadata which contains the extra source_section. newmetadata = metadata.copy() newmetadata.meta['source'] = source_section if has_only_one_dir(work_dir): extra_work_dir = real_work_dir else: extra_work_dir = os.path.join(real_work_dir, name) newmetadata.config.__class__ = SpecialConfig newmetadata.config._work_dir = extra_work_dir print("Work Directory:", newmetadata.config.work_dir) # Download+extract source. source.provide(newmetadata, newmetadata.config) print("-" * 75) print() print("Extra source packages download and extracted!") print() print("Work Directory contents (%s)" % real_work_dir) print("-" * 75) print(os.listdir(real_work_dir)) print()
def generate_readme(folder, repodata, renderer): """Generates README.rst for the recipe in folder Args: folder: Toplevel folder name in recipes directory repodata: RepoData object renderer: Renderer object Returns: List of template_options for each concurrent version for which meta.yaml files exist in the recipe folder and its subfolders """ # Subfolders correspond to different versions versions = [] for sf in os.listdir(op.join(RECIPE_DIR, folder)): if not op.isdir(op.join(RECIPE_DIR, folder, sf)): # Not a folder continue try: LooseVersion(sf) except ValueError: logger.error("'{}' does not look like a proper version!" "".format(sf)) continue versions.append(sf) # Read the meta.yaml file(s) try: recipe = op.join(RECIPE_DIR, folder, "meta.yaml") if op.exists(recipe): metadata = MetaData(recipe) if metadata.version() not in versions: versions.insert(0, metadata.version()) else: if versions: recipe = op.join(RECIPE_DIR, folder, versions[0], "meta.yaml") metadata = MetaData(recipe) else: # ignore non-recipe folders return [] except UnableToParse as e: logger.error("Failed to parse recipe {}".format(recipe)) raise e name = metadata.name() versions_in_channel = repodata.get_versions(name) # Format the README template_options = { 'name': name, 'about': (metadata.get_section('about') or {}), 'extra': (metadata.get_section('extra') or {}), 'versions': versions_in_channel, 'gh_recipes': 'https://github.com/bioconda/bioconda-recipes/tree/master/recipes/', 'recipe_path': op.dirname(op.relpath(metadata.meta_path, RECIPE_DIR)), 'Package': '<a href="recipes/{0}/README.html">{0}</a>'.format(name) } renderer.render_to_file(op.join(OUTPUT_DIR, folder, 'README.rst'), 'readme.rst_t', template_options) recipes = [] for version, version_info in sorted(versions_in_channel.items()): t = template_options.copy() t.update({ 'Linux': '<i class="fa fa-linux"></i>' if 'linux' in version_info else '', 'OSX': '<i class="fa fa-apple"></i>' if 'osx' in version_info else '', 'Version': version }) recipes.append(t) return recipes
def setup(*args): """ Go through every folder in the `bioconda-recipes/recipes` dir and generate a README.rst file. """ print("Generating package READMEs...") # TODO obtain information from repodata.json. summaries = [] for folder in os.listdir(RECIPE_DIR): # Subfolders correspond to different versions versions = [] for sf in os.listdir(op.join(RECIPE_DIR, folder)): if not op.isdir(op.join(RECIPE_DIR, folder, sf)): # Not a folder continue try: LooseVersion(sf) except ValueError: print("'{}' does not look like a proper version!".format(sf)) continue versions.append(sf) # versions.sort(key=LooseVersion, reverse=True) # Read the meta.yaml file recipe = op.join(RECIPE_DIR, folder, "meta.yaml") if op.exists(recipe): metadata = MetaData(recipe) if metadata.version() not in versions: versions.insert(0, metadata.version()) else: if versions: recipe = op.join(RECIPE_DIR, folder, versions[0], "meta.yaml") metadata = MetaData(recipe) else: # ignore non-recipe folders continue # Format the README notes = metadata.get_section("extra").get("notes", "") if notes: notes = "Notes\n-----\n\n" + notes summary = metadata.get_section("about").get("summary", "") summaries.append(summary) template_options = { "title": metadata.name(), "title_underline": "=" * len(metadata.name()), "summary": summary, "home": metadata.get_section("about").get("home", ""), "versions": ", ".join(versions), "license": metadata.get_section("about").get("license", ""), "recipe": ( "https://github.com/bioconda/bioconda-recipes/tree/master/recipes/" + op.dirname(op.relpath(metadata.meta_path, RECIPE_DIR)) ), "notes": notes, } readme = README_TEMPLATE.format(**template_options) # Write to file try: os.makedirs(op.join(OUTPUT_DIR, folder)) # exist_ok=True on Python 3 except OSError: pass output_file = op.join(OUTPUT_DIR, folder, "README.rst") with open(output_file, "wb") as ofh: ofh.write(readme.encode("utf-8"))
def generate_readme(folder, repodata, renderer): """Generates README.rst for the recipe in folder Args: folder: Toplevel folder name in recipes directory repodata: RepoData object renderer: Renderer object Returns: List of template_options for each concurrent version for which meta.yaml files exist in the recipe folder and its subfolders """ output_file = op.join(OUTPUT_DIR, folder, 'README.rst') # Select meta yaml meta_fname = op.join(RECIPE_DIR, folder, 'meta.yaml') if not op.exists(meta_fname): for item in os.listdir(op.join(RECIPE_DIR, folder)): dname = op.join(RECIPE_DIR, folder, item) if op.isdir(dname): fname = op.join(dname, 'meta.yaml') if op.exists(fname): meta_fname = fname break else: logger.error("No 'meta.yaml' found in %s", folder) return [] # Read the meta.yaml file(s) try: metadata = MetaData(meta_fname) except UnableToParse: logger.error("Failed to parse recipe %s", meta_fname) return [] name = metadata.name() versions_in_channel = repodata.get_versions(name) sorted_versions = sorted(versions_in_channel.keys(), key=VersionOrder, reverse=True) # Format the README template_options = { 'name': name, 'about': (metadata.get_section('about') or {}), 'extra': (metadata.get_section('extra') or {}), 'versions': sorted_versions, 'gh_recipes': 'https://github.com/bioconda/bioconda-recipes/tree/master/recipes/', 'recipe_path': meta_fname, 'Package': '<a href="recipes/{0}/README.html">{0}</a>'.format(folder) } renderer.render_to_file(output_file, 'readme.rst_t', template_options) versions = [] for version in sorted_versions: version_info = versions_in_channel[version] version = template_options.copy() version.update({ 'Linux': '<i class="fa fa-linux"></i>' if 'linux' in version_info else '', 'OSX': '<i class="fa fa-apple"></i>' if 'osx' in version_info else '', 'Version': version }) versions.append(version) return versions
import sys import os import yaml import jinja2 import glob from conda_build.config import Config from conda_build.metadata import MetaData from distutils.version import LooseVersion config = Config() recipe_metadata = MetaData(os.path.join(sys.argv[1])) binary_package_glob = os.path.join(config.bldpkgs_dir, '{0}*.tar.bz2'.format(recipe_metadata.name())) binary_package = sorted(glob.glob(binary_package_glob), key=LooseVersion, reverse = True)[0] print(binary_package)
def generate_readme(folder, repodata, renderer): """Generates README.rst for the recipe in folder Args: folder: Toplevel folder name in recipes directory repodata: RepoData object renderer: Renderer object Returns: List of template_options for each concurrent version for which meta.yaml files exist in the recipe folder and its subfolders """ # Subfolders correspond to different versions versions = [] for sf in os.listdir(op.join(RECIPE_DIR, folder)): if not op.isdir(op.join(RECIPE_DIR, folder, sf)): # Not a folder continue try: LooseVersion(sf) except ValueError: logger.error("'{}' does not look like a proper version!" "".format(sf)) continue versions.append(sf) # Read the meta.yaml file(s) try: recipe = op.join(RECIPE_DIR, folder, "meta.yaml") if op.exists(recipe): metadata = MetaData(recipe) if metadata.version() not in versions: versions.insert(0, metadata.version()) else: if versions: recipe = op.join(RECIPE_DIR, folder, versions[0], "meta.yaml") metadata = MetaData(recipe) else: # ignore non-recipe folders return [] except UnableToParse as e: logger.error("Failed to parse recipe {}".format(recipe)) raise e ## Get all versions and build numbers for data package # Select meta yaml meta_fname = op.join(RECIPE_DIR, folder, 'meta.yaml') if not op.exists(meta_fname): for item in os.listdir(op.join(RECIPE_DIR, folder)): dname = op.join(RECIPE_DIR, folder, item) if op.isdir(dname): fname = op.join(dname, 'meta.yaml') if op.exists(fname): meta_fname = fname break else: logger.error("No 'meta.yaml' found in %s", folder) return [] meta_relpath = meta_fname[len(RECIPE_DIR)+1:] # Read the meta.yaml file(s) try: recipe_object = Recipe.from_file(RECIPE_DIR, meta_fname) except RecipeError as e: logger.error("Unable to process %s: %s", meta_fname, e) return [] # Format the README for package in sorted(list(set(recipe_object.package_names))): versions_in_channel = set(repodata.get_package_data(['version', 'build_number'], channels='ggd-genomics', name=package)) sorted_versions = sorted(versions_in_channel, key=lambda x: (VersionOrder(x[0]), x[1]), reverse=False) if sorted_versions: depends = [ depstring.split(' ', 1) if ' ' in depstring else (depstring, '') for depstring in repodata.get_package_data('depends', name=package, version=sorted_versions[0][0], build_number=sorted_versions[0][1], )[0] ] else: depends = [] # Format the README name = metadata.name() versions_in_channel = repodata.get_versions(name) template_options = { 'name': name, 'about': (metadata.get_section('about') or {}), 'species': (metadata.get_section('about')["identifiers"]["species"] if "species" in metadata.get_section('about')["identifiers"] else {}), 'genome_build': (metadata.get_section('about')["identifiers"]["genome-build"] if "genome-build" in metadata.get_section('about')["identifiers"] else {}), 'ggd_channel': (metadata.get_section('about')["tags"]["ggd-channel"] if "ggd-channel" in metadata.get_section('about')["tags"] else "genomics"), 'extra': (metadata.get_section('extra') or {}), 'versions': ["-".join(str(w) for w in v) for v in sorted_versions], 'gh_recipes': 'https://github.com/gogetdata/ggd-recipes/tree/master/recipes/', 'recipe_path': op.dirname(op.relpath(metadata.meta_path, RECIPE_DIR)), 'Package': '<a href="recipes/{0}/README.html">{0}</a>'.format(name) } renderer.render_to_file( op.join(OUTPUT_DIR, name, 'README.rst'), 'readme.rst_t', template_options) recipes = [] latest_version = "-".join(str(w) for w in sorted_versions[-1]) for version, version_info in sorted(versions_in_channel.items()): t = template_options.copy() if 'noarch' in version_info: t.update({ 'Linux': '<i class="fa fa-linux"></i>' if 'linux' in version_info else '<i class="fa fa-dot-circle-o"></i>', 'OSX': '<i class="fa fa-apple"></i>' if 'osx' in version_info else '<i class="fa fa-dot-circle-o"></i>', 'NOARCH': '<i class="fa fa-desktop"></i>' if 'noarch' in version_info else '', 'Version': latest_version ## The latest version #'Version': version }) else: t.update({ 'Linux': '<i class="fa fa-linux"></i>' if 'linux' in version_info else '', 'OSX': '<i class="fa fa-apple"></i>' if 'osx' in version_info else '', 'NOARCH': '<i class="fa fa-desktop"></i>' if 'noarch' in version_info else '', 'Version': latest_version ## The latest version #'Version': version }) recipes.append(t) return recipes
def render_recipe(recipe_path, config, no_download_source=False, variants=None, permit_unsatisfiable_variants=True, reset_build_id=True, bypass_env_check=False): """Returns a list of tuples, each consisting of (metadata-object, needs_download, needs_render_in_env) You get one tuple per variant. Outputs are not factored in here (subpackages won't affect these results returned here.) """ arg = recipe_path # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True elif arg.endswith('.yaml'): recipe_dir = os.path.dirname(arg) need_cleanup = False else: print("Ignoring non-recipe: %s" % arg) return None, None else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir, config=config) except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) rendered_metadata = {} # important: set build id *before* downloading source. Otherwise source goes into a different # build folder. if config.set_build_id: m.config.compute_build_id(m.name(), reset=reset_build_id) # this source may go into a folder that doesn't match the eventual build folder. # There's no way around it AFAICT. We must download the source to be able to render # the recipe (from anything like GIT_FULL_HASH), but we can't know the final build # folder until rendering is complete, because package names can have variant jinja2 in them. if m.needs_source_for_render and not m.source_provided: try_download(m, no_download_source=no_download_source) if m.final: if not hasattr(m.config, 'variants') or not m.config.variant: m.config.ignore_system_variants = True if os.path.isfile(os.path.join(m.path, 'conda_build_config.yaml')): m.config.variant_config_files = [os.path.join(m.path, 'conda_build_config.yaml')] m.config.variants = get_package_variants(m, variants=variants) m.config.variant = m.config.variants[0] rendered_metadata = [(m, False, False), ] else: # merge any passed-in variants with any files found variants = get_package_variants(m, variants=variants) # when building, we don't want to fully expand all outputs into metadata, only expand # whatever variants we have (i.e. expand top-level variants, not output-only variants) rendered_metadata = distribute_variants(m, variants, permit_unsatisfiable_variants=permit_unsatisfiable_variants, allow_no_other_outputs=True, bypass_env_check=bypass_env_check) if need_cleanup: utils.rm_rf(recipe_dir) return rendered_metadata
def __call__(self, args): if not os.path.isdir(args.recipe_directory): raise IOError("The recipe directory should be the directory of the conda-recipe. Got {}".format(args.recipe_directory)) meta = MetaData(args.recipe_directory) feedstock_directory = args.feedstock_directory.format(package=argparse.Namespace(name=meta.name())) generate_feedstock_content(feedstock_directory, args.recipe_directory) if not args.no_git_repo: create_git_repo(feedstock_directory, meta)
def render_recipe(recipe_path, config, no_download_source=False, variants=None, permit_unsatisfiable_variants=True, reset_build_id=True): """Returns a list of tuples, each consisting of (metadata-object, needs_download, needs_render_in_env) You get one tuple per variant. Outputs are not factored in here (subpackages won't affect these results returned here.) """ arg = recipe_path # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True elif arg.endswith('.yaml'): recipe_dir = os.path.dirname(arg) need_cleanup = False else: print("Ignoring non-recipe: %s" % arg) return None, None else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir, config=config) except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) if config.set_build_id: m.config.compute_build_id(m.name(), reset=reset_build_id) if m.needs_source_for_render and (not os.path.isdir(m.config.work_dir) or len(os.listdir(m.config.work_dir)) == 0): try_download(m, no_download_source=no_download_source) rendered_metadata = {} if m.final: rendered_metadata = [ (m, False, False), ] else: index, index_ts = get_build_index(m.config, m.config.build_subdir) # when building, we don't want to fully expand all outputs into metadata, only expand # whatever variants we have. variants = (dict_of_lists_to_list_of_dicts(variants) if variants else get_package_variants(m)) rendered_metadata = distribute_variants( m, variants, permit_unsatisfiable_variants=permit_unsatisfiable_variants, stub_subpackages=True) if need_cleanup: utils.rm_rf(recipe_dir) return rendered_metadata
def __call__(self, args): meta = MetaData(args.recipe_directory) feedstock_directory = args.feedstock_directory.format(package=argparse.Namespace(name=meta.name())) generate_feedstock_content(feedstock_directory, args.recipe_directory) if not args.no_git_repo: create_git_repo(feedstock_directory, meta)
def render_recipe(recipe_path, config, no_download_source=False, variants=None, permit_unsatisfiable_variants=True, reset_build_id=True, bypass_env_check=False): """Returns a list of tuples, each consisting of (metadata-object, needs_download, needs_render_in_env) You get one tuple per variant. Outputs are not factored in here (subpackages won't affect these results returned here.) """ arg = recipe_path # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True elif arg.endswith('.yaml'): recipe_dir = os.path.dirname(arg) need_cleanup = False else: print("Ignoring non-recipe: %s" % arg) return None, None else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir, config=config) except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) rendered_metadata = {} # important: set build id *before* downloading source. Otherwise source goes into a different # build folder. if config.set_build_id: m.config.compute_build_id(m.name(), reset=reset_build_id) # this source may go into a folder that doesn't match the eventual build folder. # There's no way around it AFAICT. We must download the source to be able to render # the recipe (from anything like GIT_FULL_HASH), but we can't know the final build # folder until rendering is complete, because package names can have variant jinja2 in them. if m.needs_source_for_render and (not os.path.isdir(m.config.work_dir) or len(os.listdir(m.config.work_dir)) == 0): try_download(m, no_download_source=no_download_source) if m.final: if not hasattr(m.config, 'variants'): m.config.variants = [m.config.variant] rendered_metadata = [ (m, False, False), ] else: index, index_ts = get_build_index( m.config.build_subdir, bldpkgs_dir=m.config.bldpkgs_dir, output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, omit_defaults=m.config.override_channels, debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking, timeout=m.config.timeout) # when building, we don't want to fully expand all outputs into metadata, only expand # whatever variants we have. variants = (dict_of_lists_to_list_of_dicts(variants) if variants else get_package_variants(m)) rendered_metadata = distribute_variants( m, variants, permit_unsatisfiable_variants=permit_unsatisfiable_variants, allow_no_other_outputs=True, bypass_env_check=bypass_env_check) if need_cleanup: utils.rm_rf(recipe_dir) return rendered_metadata
def setup(*args): """ Go through every folder in the `bioconda-recipes/recipes` dir and generate a README.rst file. """ print('Generating package READMEs...') repodata = defaultdict(lambda: defaultdict(list)) for platform in ['linux', 'osx']: for pkg in utils.get_channel_packages(channel='bioconda', platform=platform): d = parse_pkgname(pkg) repodata[d['name']][d['version']].append(platform) # e.g., repodata = { # 'package1': { # '0.1': ['linux'], # '0.2': ['linux', 'osx'], # }, #} summaries = [] recipes = [] for folder in os.listdir(RECIPE_DIR): # Subfolders correspond to different versions versions = [] for sf in os.listdir(op.join(RECIPE_DIR, folder)): if not op.isdir(op.join(RECIPE_DIR, folder, sf)): # Not a folder continue try: LooseVersion(sf) except ValueError: print("'{}' does not look like a proper version!".format(sf)) continue versions.append(sf) #versions.sort(key=LooseVersion, reverse=True) # Read the meta.yaml file recipe = op.join(RECIPE_DIR, folder, "meta.yaml") if op.exists(recipe): metadata = MetaData(recipe) if metadata.version() not in versions: versions.insert(0, metadata.version()) else: if versions: recipe = op.join(RECIPE_DIR, folder, versions[0], "meta.yaml") metadata = MetaData(recipe) else: # ignore non-recipe folders continue name = metadata.name() versions_in_channel = sorted(repodata[name].keys()) # Format the README notes = metadata.get_section('extra').get('notes', '') if notes: if isinstance(notes, list): notes = "\n".join(notes) notes = 'Notes\n-----\n\n' + notes summary = metadata.get_section('about').get('summary', '') summaries.append(summary) template_options = { 'title': metadata.name(), 'title_underline': '=' * len(metadata.name()), 'summary': summary, 'home': metadata.get_section('about').get('home', ''), 'versions': ', '.join(versions_in_channel), 'license': metadata.get_section('about').get('license', ''), 'recipe': ('https://github.com/bioconda/bioconda-recipes/tree/master/recipes/' + op.dirname(op.relpath(metadata.meta_path, RECIPE_DIR))), 'notes': notes } # Add additional keys to template_options for use in the recipes # datatable. template_options['Package'] = ( '<a href="recipes/{0}/README.html">{0}</a>'.format(name)) for version in versions_in_channel: t = template_options.copy() if 'linux' in repodata[name][version]: t['Linux'] = '<i class="fa fa-linux"></i>' if 'osx' in repodata[name][version]: t['OSX'] = '<i class="fa fa-apple"></i>' t['Version'] = version recipes.append(t) readme = README_TEMPLATE.format(**template_options) # Write to file try: os.makedirs(op.join(OUTPUT_DIR, folder)) # exist_ok=True on Python 3 except OSError: pass output_file = op.join(OUTPUT_DIR, folder, 'README.rst') # avoid re-writing the same contents, which invalidates the # sphinx-build cache if os.path.exists(output_file): if open(output_file, encoding='utf-8').read() == readme: continue with open(output_file, 'wb') as ofh: ofh.write(readme.encode('utf-8')) # render the recipes datatable page t = Template(RECIPES_TEMPLATE) recipes_contents = t.render( recipes=recipes, # order of columns in the table; must be keys in template_options keys=['Package', 'Version', 'License', 'Linux', 'OSX']) recipes_rst = 'source/recipes.rst' if not (os.path.exists(recipes_rst) and (open(recipes_rst).read() == recipes_contents)): with open(recipes_rst, 'w') as fout: fout.write(recipes_contents)