def setup(*args): """ Go through every folder in the `bioconda-recipes/recipes` dir and generate a README.rst file. """ print('Generating package READMEs...') summaries = [] for folder in os.listdir(RECIPE_DIR): # Subfolders correspond to different versions versions = [] for sf in os.listdir(op.join(RECIPE_DIR, folder)): if not op.isdir(op.join(RECIPE_DIR, folder, sf)): # Not a folder continue try: LooseVersion(sf) except ValueError: print("'{}' does not look like a proper version!".format(sf)) continue versions.append(sf) versions.sort(key=LooseVersion, reverse=True) # Read the meta.yaml file try: metadata = MetaData(op.join(RECIPE_DIR, folder)) if metadata.version() not in versions: versions.insert(0, metadata.version()) except SystemExit: if versions: metadata = MetaData(op.join(RECIPE_DIR, folder, versions[0])) else: # ignore non-recipe folders continue # Format the README notes = metadata.get_section('extra').get('notes', '') if notes: notes = 'Notes\n-----\n\n' + notes summary = metadata.get_section('about').get('summary', '') summaries.append(summary) template_options = { 'title': metadata.name(), 'title_underline': '=' * len(metadata.name()), 'summary': summary, 'home': metadata.get_section('about').get('home', ''), 'versions': ', '.join(versions), 'license': metadata.get_section('about').get('license', ''), 'recipe': ('https://github.com/bioconda/bioconda-recipes/tree/master/recipes/' + op.dirname(op.relpath(metadata.meta_path, RECIPE_DIR))), 'notes': notes } readme = README_TEMPLATE.format(**template_options) # Write to file try: os.makedirs(op.join(OUTPUT_DIR, folder)) # exist_ok=True on Python 3 except OSError: pass output_file = op.join(OUTPUT_DIR, folder, 'README.rst') with open(output_file, 'wb') as ofh: ofh.write(readme.encode('utf-8'))
def main(): recipe_dir = os.environ["RECIPE_DIR"] conda_platform = 'win-32' if os.environ["ARCH"] == '32' else 'win-64' prefix = os.environ['PREFIX'] metadata = MetaData(recipe_dir) msys2_tar_xz_url = metadata.get_section( 'extra')['msys2-binaries'][conda_platform]['url'] msys2_md5 = metadata.get_section( 'extra')['msys2-binaries'][conda_platform]['md5'] mv_srcs_list = metadata.get_section( 'extra')['msys2-binaries'][conda_platform]['mv-srcs'] mv_dsts_list = metadata.get_section( 'extra')['msys2-binaries'][conda_platform]['mv-dsts'] msys2_tar_xz = get_tar_xz(msys2_tar_xz_url, msys2_md5) tar = tarfile.open(msys2_tar_xz, 'r|xz') tar.extractall(path=prefix) try: patches = metadata.get_section( 'extra')['msys2-binaries'][conda_platform]['patches'] except: patches = [] if len(patches): for patchname in patches: patchset = patch.fromfile(join(getenv('RECIPE_DIR'), patchname)) patchset.apply(1, root=prefix) # shutil is a bit funny (like mv) with regards to how it treats # the destination depending on whether it is an existing directory or not # (i.e. moving into that versus moving as that). # Therefore, the rules employed are: # 1. If mv_dst ends with a '/' it is a directory that you want mv_src # moved into. # 2. If mv_src has a wildcard, mv_dst is a directory that you want mv_src # moved into. # In these cases we makedirs(mv_dst) and then call move(mv_src, mv_dst) # .. otherwise we makedirs(dirname(mv_dst)) and call move(mv_src, mv_dst) # .. however, if no mv_srcs exist we don't makedirs at all. for mv_src, mv_dst in zip(mv_srcs_list, mv_dsts_list): mv_dst_definitely_dir = False mv_srcs = glob(join(prefix, normpath(mv_src))) if '*' in mv_src or mv_dst.endswith('/') or len(mv_srcs) > 1: mv_dst_definitely_dir = True if len(mv_srcs): mv_dst = join(prefix, normpath(mv_dst)) mv_dst_mkdir = mv_dst if not mv_dst_definitely_dir: mv_dst_mkdir = dirname(mv_dst_mkdir) try: makedirs(mv_dst_mkdir) except: pass for mv_src in mv_srcs: move(mv_src, mv_dst) tar.close()
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile, join from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import croot from conda_build.metadata import MetaData check_external() with Locked(croot): for arg in args.recipe: if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: build.build(m) if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def main(): metadata = MetaData(os.environ["RECIPE_DIR"]) build_id = os.getcwd().split(os.path.sep)[-3] print "build_id:", build_id for name, section in metadata.get_section("extra").items(): source.provide( Source(section), config.Config(build_id=build_id))
def main(): recipe_dir = os.environ["RECIPE_DIR"] src_dir = os.environ["SRC_DIR"] main_work_dir = source.WORK_DIR metadata = MetaData(recipe_dir) extra_sources_sections = metadata.get_section('extra')['sources'] for name, source_section in extra_sources_sections.items(): # Override the location to clone into source.WORK_DIR = main_work_dir + '/' + name os.makedirs(source.WORK_DIR) # Download source source.provide(recipe_dir, source_section)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() if args.python: if args.python == ['all']: for py in [26, 27, 33, 34]: args.python = [str(py)] execute(args, parser) return if len(args.python) > 1: for py in args.python[:]: args.python = [py] execute(args, parser) else: config.CONDA_PY = int(args.python[0].replace('.', '')) if args.perl: config.CONDA_PERL = args.perl if args.numpy: if args.numpy == ['all']: for npy in [16, 17, 18, 19]: args.numpy = [str(npy)] execute(args, parser) return if len(args.numpy) > 1: for npy in args.numpy[:]: args.numpy = [npy] execute(args, parser) else: config.CONDA_NPY = int(args.numpy[0].replace('.', '')) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding()) if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import croot from conda_build.metadata import MetaData check_external() with Locked(croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding()) if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist try: build.build(m) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found matching:'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1].replace(' ', '-') recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() all_versions = { 'python': [26, 27, 33, 34], 'numpy': [16, 17, 18, 19], 'perl': None, 'R': None, } conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = int(versions[0].replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) == 2: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None channel_urls = args.channel or () try: build.build(m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn("Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = versions[0] if lang in ('python', 'numpy'): version = int(version.replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) if args.skip_existing: if not isdir(config.bldpkgs_dir): makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True) already_built = [] to_build_recursive = [] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.output: try: m.parse_again(permit_undefined_jinja=False) except SystemExit: # Something went wrong; possibly due to undefined GIT_ jinja variables. # Maybe we need to actually download the source in order to resolve the build_id. source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again(permit_undefined_jinja=False) print(build.bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe) except (RuntimeError, SystemExit) as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build_recursive: sys.exit(str(e)) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(dep_pkg) else: raise elif error_str.strip().startswith("Hint:"): lines = [line for line in error_str.splitlines() if line.strip().startswith('- ')] pkgs = [line.lstrip('- ') for line in lines] # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] pkgs = [pkg for pkg in pkgs if pkg.split(' ')[0] not in skip_names] for pkg in pkgs: # Handle package names that contain version deps. if ' ' in pkg: pkg = pkg.split(' ')[0] recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if exists(pkg): recipe_glob.append(pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(pkg) else: raise else: raise if try_again: continue if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def generate_readme(folder, repodata, renderer): """Generates README.rst for the recipe in folder Args: folder: Toplevel folder name in recipes directory repodata: RepoData object renderer: Renderer object Returns: List of template_options for each concurrent version for which meta.yaml files exist in the recipe folder and its subfolders """ # Subfolders correspond to different versions versions = [] for sf in os.listdir(op.join(RECIPE_DIR, folder)): if not op.isdir(op.join(RECIPE_DIR, folder, sf)): # Not a folder continue try: LooseVersion(sf) except ValueError: logger.error("'{}' does not look like a proper version!" "".format(sf)) continue versions.append(sf) # Read the meta.yaml file(s) try: recipe = op.join(RECIPE_DIR, folder, "meta.yaml") if op.exists(recipe): metadata = MetaData(recipe) if metadata.version() not in versions: versions.insert(0, metadata.version()) else: if versions: recipe = op.join(RECIPE_DIR, folder, versions[0], "meta.yaml") metadata = MetaData(recipe) else: # ignore non-recipe folders return [] except UnableToParse as e: logger.error("Failed to parse recipe {}".format(recipe)) raise e ## Get all versions and build numbers for data package # Select meta yaml meta_fname = op.join(RECIPE_DIR, folder, 'meta.yaml') if not op.exists(meta_fname): for item in os.listdir(op.join(RECIPE_DIR, folder)): dname = op.join(RECIPE_DIR, folder, item) if op.isdir(dname): fname = op.join(dname, 'meta.yaml') if op.exists(fname): meta_fname = fname break else: logger.error("No 'meta.yaml' found in %s", folder) return [] meta_relpath = meta_fname[len(RECIPE_DIR)+1:] # Read the meta.yaml file(s) try: recipe_object = Recipe.from_file(RECIPE_DIR, meta_fname) except RecipeError as e: logger.error("Unable to process %s: %s", meta_fname, e) return [] # Format the README for package in sorted(list(set(recipe_object.package_names))): versions_in_channel = set(repodata.get_package_data(['version', 'build_number'], channels='ggd-genomics', name=package)) sorted_versions = sorted(versions_in_channel, key=lambda x: (VersionOrder(x[0]), x[1]), reverse=False) if sorted_versions: depends = [ depstring.split(' ', 1) if ' ' in depstring else (depstring, '') for depstring in repodata.get_package_data('depends', name=package, version=sorted_versions[0][0], build_number=sorted_versions[0][1], )[0] ] else: depends = [] # Format the README name = metadata.name() versions_in_channel = repodata.get_versions(name) template_options = { 'name': name, 'about': (metadata.get_section('about') or {}), 'species': (metadata.get_section('about')["identifiers"]["species"] if "species" in metadata.get_section('about')["identifiers"] else {}), 'genome_build': (metadata.get_section('about')["identifiers"]["genome-build"] if "genome-build" in metadata.get_section('about')["identifiers"] else {}), 'ggd_channel': (metadata.get_section('about')["tags"]["ggd-channel"] if "ggd-channel" in metadata.get_section('about')["tags"] else "genomics"), 'extra': (metadata.get_section('extra') or {}), 'versions': ["-".join(str(w) for w in v) for v in sorted_versions], 'gh_recipes': 'https://github.com/gogetdata/ggd-recipes/tree/master/recipes/', 'recipe_path': op.dirname(op.relpath(metadata.meta_path, RECIPE_DIR)), 'Package': '<a href="recipes/{0}/README.html">{0}</a>'.format(name) } renderer.render_to_file( op.join(OUTPUT_DIR, name, 'README.rst'), 'readme.rst_t', template_options) recipes = [] latest_version = "-".join(str(w) for w in sorted_versions[-1]) for version, version_info in sorted(versions_in_channel.items()): t = template_options.copy() if 'noarch' in version_info: t.update({ 'Linux': '<i class="fa fa-linux"></i>' if 'linux' in version_info else '<i class="fa fa-dot-circle-o"></i>', 'OSX': '<i class="fa fa-apple"></i>' if 'osx' in version_info else '<i class="fa fa-dot-circle-o"></i>', 'NOARCH': '<i class="fa fa-desktop"></i>' if 'noarch' in version_info else '', 'Version': latest_version ## The latest version #'Version': version }) else: t.update({ 'Linux': '<i class="fa fa-linux"></i>' if 'linux' in version_info else '', 'OSX': '<i class="fa fa-apple"></i>' if 'osx' in version_info else '', 'NOARCH': '<i class="fa fa-desktop"></i>' if 'noarch' in version_info else '', 'Version': latest_version ## The latest version #'Version': version }) recipes.append(t) return recipes
def setup(*args): """ Go through every folder in the `bioconda-recipes/recipes` dir and generate a README.rst file. """ print("Generating package READMEs...") # TODO obtain information from repodata.json. summaries = [] for folder in os.listdir(RECIPE_DIR): # Subfolders correspond to different versions versions = [] for sf in os.listdir(op.join(RECIPE_DIR, folder)): if not op.isdir(op.join(RECIPE_DIR, folder, sf)): # Not a folder continue try: LooseVersion(sf) except ValueError: print("'{}' does not look like a proper version!".format(sf)) continue versions.append(sf) # versions.sort(key=LooseVersion, reverse=True) # Read the meta.yaml file recipe = op.join(RECIPE_DIR, folder, "meta.yaml") if op.exists(recipe): metadata = MetaData(recipe) if metadata.version() not in versions: versions.insert(0, metadata.version()) else: if versions: recipe = op.join(RECIPE_DIR, folder, versions[0], "meta.yaml") metadata = MetaData(recipe) else: # ignore non-recipe folders continue # Format the README notes = metadata.get_section("extra").get("notes", "") if notes: notes = "Notes\n-----\n\n" + notes summary = metadata.get_section("about").get("summary", "") summaries.append(summary) template_options = { "title": metadata.name(), "title_underline": "=" * len(metadata.name()), "summary": summary, "home": metadata.get_section("about").get("home", ""), "versions": ", ".join(versions), "license": metadata.get_section("about").get("license", ""), "recipe": ( "https://github.com/bioconda/bioconda-recipes/tree/master/recipes/" + op.dirname(op.relpath(metadata.meta_path, RECIPE_DIR)) ), "notes": notes, } readme = README_TEMPLATE.format(**template_options) # Write to file try: os.makedirs(op.join(OUTPUT_DIR, folder)) # exist_ok=True on Python 3 except OSError: pass output_file = op.join(OUTPUT_DIR, folder, "README.rst") with open(output_file, "wb") as ofh: ofh.write(readme.encode("utf-8"))
def generate_readme(folder, repodata, renderer): """Generates README.rst for the recipe in folder Args: folder: Toplevel folder name in recipes directory repodata: RepoData object renderer: Renderer object Returns: List of template_options for each concurrent version for which meta.yaml files exist in the recipe folder and its subfolders """ # Subfolders correspond to different versions versions = [] for sf in os.listdir(op.join(RECIPE_DIR, folder)): if not op.isdir(op.join(RECIPE_DIR, folder, sf)): # Not a folder continue try: LooseVersion(sf) except ValueError: logger.error("'{}' does not look like a proper version!" "".format(sf)) continue versions.append(sf) # Read the meta.yaml file(s) try: recipe = op.join(RECIPE_DIR, folder, "meta.yaml") if op.exists(recipe): metadata = MetaData(recipe) if metadata.version() not in versions: versions.insert(0, metadata.version()) else: if versions: recipe = op.join(RECIPE_DIR, folder, versions[0], "meta.yaml") metadata = MetaData(recipe) else: # ignore non-recipe folders return [] except UnableToParse as e: logger.error("Failed to parse recipe {}".format(recipe)) raise e name = metadata.name() versions_in_channel = repodata.get_versions(name) # Format the README template_options = { 'name': name, 'about': (metadata.get_section('about') or {}), 'extra': (metadata.get_section('extra') or {}), 'versions': versions_in_channel, 'gh_recipes': 'https://github.com/bioconda/bioconda-recipes/tree/master/recipes/', 'recipe_path': op.dirname(op.relpath(metadata.meta_path, RECIPE_DIR)), 'Package': '<a href="recipes/{0}/README.html">{0}</a>'.format(name) } renderer.render_to_file(op.join(OUTPUT_DIR, folder, 'README.rst'), 'readme.rst_t', template_options) recipes = [] for version, version_info in sorted(versions_in_channel.items()): t = template_options.copy() t.update({ 'Linux': '<i class="fa fa-linux"></i>' if 'linux' in version_info else '', 'OSX': '<i class="fa fa-apple"></i>' if 'osx' in version_info else '', 'Version': version }) recipes.append(t) return recipes
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() if args.python: if args.python == ['all']: for py in [26, 27, 33, 34]: args.python = [str(py)] execute(args, parser) return if len(args.python) > 1: for py in args.python[:]: args.python = [py] execute(args, parser) else: config.CONDA_PY = int(args.python[0].replace('.', '')) if args.perl: config.CONDA_PERL = args.perl if args.numpy: if args.numpy == ['all']: for npy in [16, 17, 18]: args.numpy = [str(npy)] execute(args, parser) return if len(args.numpy) > 1: for npy in args.numpy[:]: args.numpy = [npy] execute(args, parser) else: config.CONDA_NPY = int(args.numpy[0].replace('.', '')) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding()) if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found matching:'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def generate_readme(folder, repodata, renderer): """Generates README.rst for the recipe in folder Args: folder: Toplevel folder name in recipes directory repodata: RepoData object renderer: Renderer object Returns: List of template_options for each concurrent version for which meta.yaml files exist in the recipe folder and its subfolders """ output_file = op.join(OUTPUT_DIR, folder, 'README.rst') # Select meta yaml meta_fname = op.join(RECIPE_DIR, folder, 'meta.yaml') if not op.exists(meta_fname): for item in os.listdir(op.join(RECIPE_DIR, folder)): dname = op.join(RECIPE_DIR, folder, item) if op.isdir(dname): fname = op.join(dname, 'meta.yaml') if op.exists(fname): meta_fname = fname break else: logger.error("No 'meta.yaml' found in %s", folder) return [] # Read the meta.yaml file(s) try: metadata = MetaData(meta_fname) except UnableToParse: logger.error("Failed to parse recipe %s", meta_fname) return [] name = metadata.name() versions_in_channel = repodata.get_versions(name) sorted_versions = sorted(versions_in_channel.keys(), key=VersionOrder, reverse=True) # Format the README template_options = { 'name': name, 'about': (metadata.get_section('about') or {}), 'extra': (metadata.get_section('extra') or {}), 'versions': sorted_versions, 'gh_recipes': 'https://github.com/bioconda/bioconda-recipes/tree/master/recipes/', 'recipe_path': meta_fname, 'Package': '<a href="recipes/{0}/README.html">{0}</a>'.format(folder) } renderer.render_to_file(output_file, 'readme.rst_t', template_options) versions = [] for version in sorted_versions: version_info = versions_in_channel[version] version = template_options.copy() version.update({ 'Linux': '<i class="fa fa-linux"></i>' if 'linux' in version_info else '', 'OSX': '<i class="fa fa-apple"></i>' if 'osx' in version_info else '', 'Version': version }) versions.append(version) return versions
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () if on_win: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, used # by jinja2. see https://github.com/conda/conda-build/pull/520 assert "markupsafe" not in sys.modules delete_trash(None) conda_version = {"python": "CONDA_PY", "numpy": "CONDA_NPY", "perl": "CONDA_PERL", "R": "CONDA_R"} for lang in ["python", "numpy", "perl", "R"]: versions = getattr(args, lang) if not versions: continue if versions == ["all"]: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = versions[0] if lang in ("python", "numpy"): version = int(version.replace(".", "")) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ["python", "numpy"]: if all_versions[lang]: raise RuntimeError( "%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1] / 10, version) ) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) if args.skip_existing: if not isdir(config.bldpkgs_dir): makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) index = build.get_build_index( clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels ) already_built = [] to_build_recursive = [] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or "utf-8") if isfile(arg): if arg.endswith((".tar", ".tar.gz", ".tgz", ".tar.bz2")): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, "r:*") t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value("build/noarch_python"): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test( m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels ) elif args.source: source.provide(m.path, m.get_section("source")) print("Source tree in:", source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue build.build( m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels, include_recipe=args.include_recipe, ) except (RuntimeError, SystemExit) as e: error_str = str(e) if error_str.startswith("No packages found") or error_str.startswith("Could not find some"): # Build dependency if recipe exists dep_pkg = error_str.split(": ")[1] # Handle package names that contain version deps. if " " in dep_pkg: dep_pkg = dep_pkg.split(" ")[0] recipe_glob = glob(dep_pkg + "-[v0-9][0-9.]*") if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build_recursive: sys.exit(str(e)) print( ( "Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first" ).format(dep_pkg) ) recipes.appendleft(recipe_dir) to_build_recursive.append(dep_pkg) else: raise elif error_str.strip().startswith("Hint:"): lines = [line for line in error_str.splitlines() if line.strip().startswith("- ")] pkgs = [line.lstrip("- ") for line in lines] # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ["python", "r"] pkgs = [pkg for pkg in pkgs if pkg.split(" ")[0] not in skip_names] for pkg in pkgs: # Handle package names that contain version deps. if " " in pkg: pkg = pkg.split(" ")[0] recipe_glob = glob(pkg + "-[v0-9][0-9.]*") if exists(pkg): recipe_glob.append(pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print( ( "Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first" ).format(pkg) ) recipes.appendleft(recipe_dir) to_build_recursive.append(pkg) else: raise else: raise if try_again: continue if not args.notest: build.test( m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels ) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def setup(*args): """ Go through every folder in the `bioconda-recipes/recipes` dir and generate a README.rst file. """ print('Generating package READMEs...') repodata = defaultdict(lambda: defaultdict(list)) for platform in ['linux', 'osx']: for pkg in utils.get_channel_packages(channel='bioconda', platform=platform): d = parse_pkgname(pkg) repodata[d['name']][d['version']].append(platform) # e.g., repodata = { # 'package1': { # '0.1': ['linux'], # '0.2': ['linux', 'osx'], # }, #} summaries = [] recipes = [] for folder in os.listdir(RECIPE_DIR): # Subfolders correspond to different versions versions = [] for sf in os.listdir(op.join(RECIPE_DIR, folder)): if not op.isdir(op.join(RECIPE_DIR, folder, sf)): # Not a folder continue try: LooseVersion(sf) except ValueError: print("'{}' does not look like a proper version!".format(sf)) continue versions.append(sf) #versions.sort(key=LooseVersion, reverse=True) # Read the meta.yaml file recipe = op.join(RECIPE_DIR, folder, "meta.yaml") if op.exists(recipe): metadata = MetaData(recipe) if metadata.version() not in versions: versions.insert(0, metadata.version()) else: if versions: recipe = op.join(RECIPE_DIR, folder, versions[0], "meta.yaml") metadata = MetaData(recipe) else: # ignore non-recipe folders continue name = metadata.name() versions_in_channel = sorted(repodata[name].keys()) # Format the README notes = metadata.get_section('extra').get('notes', '') if notes: if isinstance(notes, list): notes = "\n".join(notes) notes = 'Notes\n-----\n\n' + notes summary = metadata.get_section('about').get('summary', '') summaries.append(summary) template_options = { 'title': metadata.name(), 'title_underline': '=' * len(metadata.name()), 'summary': summary, 'home': metadata.get_section('about').get('home', ''), 'versions': ', '.join(versions_in_channel), 'license': metadata.get_section('about').get('license', ''), 'recipe': ('https://github.com/bioconda/bioconda-recipes/tree/master/recipes/' + op.dirname(op.relpath(metadata.meta_path, RECIPE_DIR))), 'notes': notes } # Add additional keys to template_options for use in the recipes # datatable. template_options['Package'] = ( '<a href="recipes/{0}/README.html">{0}</a>'.format(name)) for version in versions_in_channel: t = template_options.copy() if 'linux' in repodata[name][version]: t['Linux'] = '<i class="fa fa-linux"></i>' if 'osx' in repodata[name][version]: t['OSX'] = '<i class="fa fa-apple"></i>' t['Version'] = version recipes.append(t) readme = README_TEMPLATE.format(**template_options) # Write to file try: os.makedirs(op.join(OUTPUT_DIR, folder)) # exist_ok=True on Python 3 except OSError: pass output_file = op.join(OUTPUT_DIR, folder, 'README.rst') # avoid re-writing the same contents, which invalidates the # sphinx-build cache if os.path.exists(output_file): if open(output_file, encoding='utf-8').read() == readme: continue with open(output_file, 'wb') as ofh: ofh.write(readme.encode('utf-8')) # render the recipes datatable page t = Template(RECIPES_TEMPLATE) recipes_contents = t.render( recipes=recipes, # order of columns in the table; must be keys in template_options keys=['Package', 'Version', 'License', 'Linux', 'OSX']) recipes_rst = 'source/recipes.rst' if not (os.path.exists(recipes_rst) and (open(recipes_rst).read() == recipes_contents)): with open(recipes_rst, 'w') as fout: fout.write(recipes_contents)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () if on_win: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, used # by jinja2. see https://github.com/conda/conda-build/pull/520 assert 'markupsafe' not in sys.modules delete_trash(None) conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = int(versions[0].replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) == 2 and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) if args.skip_existing: update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels) already_built = [] to_build = args.recipe[:] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels, include_recipe=args.include_recipe) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build: sys.exit(str(e)) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) to_build.append(dep_pkg) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn( "Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = versions[0] if lang in ('python', 'numpy'): version = int(version.replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1] / 10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) if args.skip_existing: if not isdir(config.bldpkgs_dir): makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True) already_built = [] to_build_recursive = [] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.output: try: m.parse_again(permit_undefined_jinja=False) except SystemExit: # Something went wrong; possibly due to undefined GIT_ jinja variables. # Maybe we need to actually download the source in order to resolve the build_id. source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again(permit_undefined_jinja=False) print(build.bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe) except (RuntimeError, SystemExit) as e: error_str = str(e) if error_str.startswith( 'No packages found') or error_str.startswith( 'Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build_recursive: sys.exit(str(e)) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(dep_pkg) else: raise elif error_str.strip().startswith("Hint:"): lines = [ line for line in error_str.splitlines() if line.strip().startswith('- ') ] pkgs = [line.lstrip('- ') for line in lines] # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] pkgs = [ pkg for pkg in pkgs if pkg.split(' ')[0] not in skip_names ] for pkg in pkgs: # Handle package names that contain version deps. if ' ' in pkg: pkg = pkg.split(' ')[0] recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if exists(pkg): recipe_glob.append(pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print( ("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(pkg) else: raise else: raise if try_again: continue if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () all_versions = { 'python': [26, 27, 33, 34], 'numpy': [16, 17, 18, 19], 'perl': None, 'R': None, } conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = int(versions[0].replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) == 2: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) if args.skip_existing: update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def main(): print() print("Getting extra source packages.") # Force verbose mode config.verbose = True cwd = os.getcwd() # Get the metadata for the recipe recipe_dir = os.environ["RECIPE_DIR"] metadata = MetaData(recipe_dir) print(metadata.name()) print("-" * 75) print(' cwd:', cwd) # Figure out the work_dir # Look upwards for a directory with the name 'work'. # FIXME: Why does metadata.config.work_dir not return the correct # directory? bits = split_path(cwd) dirname = [] while bits and bits[-1] != 'work': dirname.insert(0, bits.pop(-1)) dirname = os.path.join(*dirname, '') work_dir = bits.pop(-1) assert work_dir == 'work' build_id = bits.pop(-1) croot = os.path.join(*bits) work_dir = os.path.join(croot, build_id, 'work') if has_only_one_dir(work_dir): real_work_dir = work_dir else: real_work_dir = os.path.join(croot, build_id) print(' work dir:', real_work_dir) print('conda root:', croot) print(' build id:', build_id) print(' src dir:', dirname) extra_sources_sections = metadata.get_section('extra')['sources'] for name, source_section in extra_sources_sections.items(): print() print("Extra source: %s" % name) print("-" * 75) # Create a fake metadata which contains the extra source_section. newmetadata = metadata.copy() newmetadata.meta['source'] = source_section if has_only_one_dir(work_dir): extra_work_dir = real_work_dir else: extra_work_dir = os.path.join(real_work_dir, name) newmetadata.config.__class__ = SpecialConfig newmetadata.config._work_dir = extra_work_dir print("Work Directory:", newmetadata.config.work_dir) # Download+extract source. source.provide(newmetadata, newmetadata.config) print("-" * 75) print() print("Extra source packages download and extracted!") print() print("Work Directory contents (%s)" % real_work_dir) print("-" * 75) print(os.listdir(real_work_dir)) print()