def test_recipe_path_meta(tmpdir, recwarn): dir_recipe_path = tmpdir.mkdir("recipe-path") recipe_path = dir_recipe_path.join("meta.yaml") recipe_path.write("") assert find_recipe(str(recipe_path)) == str(dir_recipe_path) assert "RECIPE_PATH received is a file. File: {}\n" \ "It should be a path to a folder. \n" \ "Forcing conda-build to use the recipe file.".format(str(recipe_path)) \ == str(recwarn.pop(UserWarning).message) assert find_recipe(str(dir_recipe_path)) == str(recipe_path) assert not recwarn.list
def __init__(self, path, config=None, variant=None): self.undefined_jinja_vars = [] # decouple this config from whatever was fed in. People must change config by # accessing and changing this attribute. self.config = copy.copy(get_or_merge_config(config, variant=variant)) if isfile(path): self.meta_path = path self.path = os.path.dirname(path) else: self.meta_path = find_recipe(path) self.path = os.path.dirname(self.meta_path) self.requirements_path = join(self.path, 'requirements.txt') # Start with bare-minimum contents so we can call environ.get_dict() with impunity # We'll immediately replace these contents in parse_again() self.meta = dict() # This is the 'first pass' parse of meta.yaml, so not all variables are defined yet # (e.g. GIT_FULL_HASH, etc. are undefined) # Therefore, undefined jinja variables are permitted here # In the second pass, we'll be more strict. See build.build() # Primarily for debugging. Ensure that metadata is not altered after "finalizing" self.parse_again(permit_undefined_jinja=True) if 'host' in self.get_section('requirements'): self.config.has_separate_host_prefix = True self.config.disable_pip = self.disable_pip
def main(): p = get_render_parser() p.add_argument( '-f', '--file', action="store", help="write YAML to file, given as argument here.\ Overwrites existing files." ) # we do this one separately because we only allow one entry to conda render p.add_argument( 'recipe', action="store", metavar='RECIPE_PATH', choices=RecipeCompleter(), help="Path to recipe directory.", ) # this is here because we have a different default than build p.add_argument( '--verbose', action='store_true', help='Enable verbose output from download tools and progress updates', ) args = p.parse_args() set_language_env_vars(args, p) metadata, _, _ = render_recipe(find_recipe(args.recipe), no_download_source=args.no_source, verbose=args.verbose) if args.output: print(bldpkg_path(metadata)) else: print(output_yaml(metadata, args.file))
def __init__(self, path, config=None): self.undefined_jinja_vars = [] if not config: config = Config() self.config = config if isfile(path): self.meta_path = path self.path = os.path.dirname(path) else: self.meta_path = find_recipe(path) self.path = os.path.dirname(self.meta_path) self.requirements_path = join(self.path, 'requirements.txt') # Start with bare-minimum contents so we can call environ.get_dict() with impunity # We'll immediately replace these contents in parse_again() self.meta = parse("package:\n" " name: uninitialized", path=self.meta_path, config=self.config) # This is the 'first pass' parse of meta.yaml, so not all variables are defined yet # (e.g. GIT_FULL_HASH, etc. are undefined) # Therefore, undefined jinja variables are permitted here # In the second pass, we'll be more strict. See build.build() self.parse_again(config=config, permit_undefined_jinja=True) self.config.disable_pip = self.disable_pip
def main(): p = get_render_parser() p.add_argument( '-f', '--file', action="store", help="write YAML to file, given as argument here.\ Overwrites existing files." ) # we do this one separately because we only allow one entry to conda render p.add_argument( 'recipe', action="store", metavar='RECIPE_PATH', choices=RecipeCompleter(), help="Path to recipe directory.", ) args = p.parse_args() set_language_env_vars(args, p) metadata = render_recipe(find_recipe(args.recipe), no_download_source=args.no_source) if args.output: print(bldpkg_path(metadata)) else: output = yaml.dump(MetaYaml(metadata.meta), Dumper=IndentDumper, default_flow_style=False, indent=4) if args.file: with open(args.file, "w") as f: f.write(output) else: print(output)
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, **kwargs): import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe config = get_or_merge_config(config, **kwargs) recipe_paths_or_metadata = _ensure_list(recipe_paths_or_metadata) string_paths = [p for p in recipe_paths_or_metadata if isinstance(p, string_types)] paths = _expand_globs(string_paths, os.getcwd()) recipes = [] for recipe in paths[:]: try: recipes.append(find_recipe(recipe)) except IOError: pass metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')] recipes.extend(metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config") or os.path.isabs(recipe): absolute_recipes.append(recipe) else: absolute_recipes.append(os.path.normpath(os.path.join(os.getcwd(), recipe))) return build_tree(absolute_recipes, build_only=build_only, post=post, notest=notest, need_source_download=need_source_download, config=config)
def test_find_recipe_multipe_base(): with _generate_tmp_tree() as (tmp, (dA, dB, dC), (f1, f2, f3, f4)): f5 = os.path.join(tmp, "meta.yaml") f6 = os.path.join(dB, "meta.yaml") f7 = os.path.join(dC, "conda.yaml") for f in (f5, f6, f7): makefile(f) # multiple meta files, use the one in base level assert utils.find_recipe(tmp) == f5
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, variants=None, stats=None, **kwargs): """Run the build step. If recipe paths are provided, renders recipe before building. Tests built packages by default. notest=True to skip test.""" import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe assert post in (None, True, False), ("post must be boolean or None. Remember, you must pass " "other arguments (config) by keyword.") config = get_or_merge_config(config, **kwargs) # if people don't pass in an object to capture stats in, they won't get them returned. # We'll still track them, though. if not stats: stats = {} recipe_paths_or_metadata = _ensure_list(recipe_paths_or_metadata) for recipe in recipe_paths_or_metadata: if not any((hasattr(recipe, "config"), isinstance(recipe, string_types))): raise ValueError("Recipe passed was unrecognized object: {}".format(recipe)) string_paths = [p for p in recipe_paths_or_metadata if isinstance(p, string_types)] paths = _expand_globs(string_paths, os.getcwd()) recipes = [] for recipe in paths: if (os.path.isdir(recipe) or (os.path.isfile(recipe) and os.path.basename(recipe) in ('meta.yaml', 'conda.yaml'))): try: recipes.append(find_recipe(recipe)) except IOError: continue metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')] recipes.extend(metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config"): absolute_recipes.append(recipe) else: if not os.path.isabs(recipe): recipe = os.path.normpath(os.path.join(os.getcwd(), recipe)) if not os.path.exists(recipe): raise ValueError("Path to recipe did not exist: {}".format(recipe)) absolute_recipes.append(recipe) if not absolute_recipes: raise ValueError('No valid recipes found for input: {}'.format(recipe_paths_or_metadata)) return build_tree(sorted(absolute_recipes), config, stats, build_only=build_only, post=post, notest=notest, variants=variants)
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, variants=None, stats=None, **kwargs): """Run the build step. If recipe paths are provided, renders recipe before building. Tests built packages by default. notest=True to skip test.""" import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe assert post in (None, True, False), ("post must be boolean or None. Remember, you must pass " "other arguments (config) by keyword.") config = get_or_merge_config(config, **kwargs) # if people don't pass in an object to capture stats in, they won't get them returned. # We'll still track them, though. if not stats: stats = {} recipe_paths_or_metadata = _ensure_list(recipe_paths_or_metadata) for recipe in recipe_paths_or_metadata: if not any((hasattr(recipe, "config"), isinstance(recipe, string_types))): raise ValueError("Recipe passed was unrecognized object: {}".format(recipe)) string_paths = [p for p in recipe_paths_or_metadata if isinstance(p, string_types)] paths = _expand_globs(string_paths, os.getcwd()) recipes = [] for recipe in paths: if (os.path.isdir(recipe) or (os.path.isfile(recipe) and os.path.basename(recipe) in ('meta.yaml', 'conda.yaml'))): try: recipes.append(find_recipe(recipe)) except IOError: continue metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')] recipes.extend(metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config"): absolute_recipes.append(recipe) else: if not os.path.isabs(recipe): recipe = os.path.normpath(os.path.join(os.getcwd(), recipe)) if not os.path.exists(recipe): raise ValueError("Path to recipe did not exist: {}".format(recipe)) absolute_recipes.append(recipe) if not absolute_recipes: raise ValueError('No valid recipes found for input: {}'.format(recipe_paths_or_metadata)) return build_tree(absolute_recipes, config, stats, build_only=build_only, post=post, notest=notest, need_source_download=need_source_download, variants=variants)
def test_find_recipe_multipe_bad(): with _generate_tmp_tree() as (tmp, (dA, dB, dC), (f1, f2, f3, f4)): f5 = os.path.join(dB, "meta.yaml") f6 = os.path.join(dC, "conda.yaml") for f in (f5, f6): makefile(f) # nothing in base with pytest.raises(IOError): utils.find_recipe(tmp) f7 = os.path.join(tmp, "meta.yaml") f8 = os.path.join(tmp, "conda.yaml") for f in (f7, f8): makefile(f) # too many in base with pytest.raises(IOError): utils.find_recipe(tmp)
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, **kwargs): import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe assert post in (None, True, False), ( "post must be boolean or None. Remember, you must pass " "other arguments (config) by keyword.") config = get_or_merge_config(config, **kwargs) recipe_paths_or_metadata = _ensure_list(recipe_paths_or_metadata) for recipe in recipe_paths_or_metadata: if not any( (hasattr(recipe, "config"), isinstance(recipe, string_types))): raise ValueError( "Recipe passed was unrecognized object: {}".format(recipe)) string_paths = [ p for p in recipe_paths_or_metadata if isinstance(p, string_types) ] paths = _expand_globs(string_paths, os.getcwd()) recipes = [] for recipe in paths: try: recipes.append(find_recipe(recipe)) except IOError: pass metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')] recipes.extend(metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config"): absolute_recipes.append(recipe) else: if not os.path.isabs(recipe): recipe = os.path.normpath(os.path.join(os.getcwd(), recipe)) if not os.path.exists(recipe): raise ValueError( "Path to recipe did not exist: {}".format(recipe)) absolute_recipes.append(recipe) return build_tree(absolute_recipes, build_only=build_only, post=post, notest=notest, need_source_download=need_source_download, config=config)
def test_find_recipe(): with _generate_tmp_tree() as (tmp, (dA, dB, dC), (f1, f2, f3, f4)): f5 = os.path.join(tmp, "meta.yaml") f6 = os.path.join(dA, "meta.yml") f7 = os.path.join(dB, "conda.yaml") f8 = os.path.join(dC, "conda.yml") # check that each of these are valid recipes for f in (f5, f6, f7, f8): makefile(f) assert utils.find_recipe(tmp) == f os.remove(f)
def test_find_recipe_relative(): with _generate_tmp_tree() as (tmp, (dA, dB, dC), (f1, f2, f3, f4)): f5 = os.path.join(dA, "meta.yaml") makefile(f5) # check that even when given a relative recipe path we still return # the absolute path saved = os.getcwd() os.chdir(tmp) try: assert utils.find_recipe("dirA") == f5 finally: os.chdir(saved)
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, variants=None, stats=None, **kwargs): """Run the build step. If recipe paths are provided, renders recipe before building. Tests built packages by default. notest=True to skip test.""" import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe assert post in (None, True, False), ( "post must be boolean or None. Remember, you must pass " "other arguments (config) by keyword.") recipes = [] for recipe in _ensure_list(recipe_paths_or_metadata): if isinstance(recipe, string_types): for recipe in _expand_globs(recipe, os.getcwd()): try: recipe = find_recipe(recipe) except IOError: continue recipes.append(recipe) elif hasattr(recipe, "config"): recipes.append(recipe) else: raise ValueError( "Recipe passed was unrecognized object: {}".format(recipe)) if not recipes: raise ValueError('No valid recipes found for input: {}'.format( recipe_paths_or_metadata)) return build_tree( sorted(recipes), config=get_or_merge_config(config, **kwargs), # If people don't pass in an object to capture stats in, they won't get them returned. # We'll still track them, though. stats=stats or {}, build_only=build_only, post=post, notest=notest, variants=variants)
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, **kwargs): import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe config = get_or_merge_config(config, **kwargs) recipe_paths_or_metadata = _ensure_list(recipe_paths_or_metadata) string_paths = [ p for p in recipe_paths_or_metadata if isinstance(p, string_types) ] paths = _expand_globs(string_paths, os.getcwd()) recipes = [] for recipe in paths[:]: try: recipes.append(find_recipe(recipe)) except IOError: pass metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')] recipes.extend(metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config") or os.path.isabs(recipe): absolute_recipes.append(recipe) else: absolute_recipes.append( os.path.normpath(os.path.join(os.getcwd(), recipe))) return build_tree(absolute_recipes, build_only=build_only, post=post, notest=notest, need_source_download=need_source_download, config=config)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile import conda_build.build as build import conda_build.source as source from conda_build.config import config check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn("Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") set_language_env_vars(args, parser, execute=execute) if args.skip_existing: for d in config.bldpkgs_dirs: if not isdir(d): makedirs(d) update_index(d) index = build.get_build_index(clear_cache=True) already_built = set() to_build_recursive = [] recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False # recurse looking for meta.yaml that is potentially not in immediate folder recipe_dir = find_recipe(recipe_dir) if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) # this fully renders any jinja templating, throwing an error if any data is missing m, need_source_download = render_recipe(recipe_dir, no_download_source=False, verbose=False) if m.get_value('build/noarch_python'): config.noarch = True if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.skip_existing: # 'or m.pkg_fn() in index' is for conda <4.1 and could be removed in the future. if ('local::' + m.pkg_fn() in index or m.pkg_fn() in index or m.pkg_fn() in already_built): print(m.dist(), "is already built, skipping.") continue if args.output: print(bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source'), verbose=build.verbose) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe, keep_old_work=args.keep_old_work, need_source_download=need_source_download, dirty=args.dirty) except (NoPackagesFound, Unsatisfiable) as e: error_str = str(e) # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] add_recipes = [] for line in error_str.splitlines(): if not line.startswith(' - '): continue pkg = line.lstrip(' - ').split(' -> ')[-1] pkg = pkg.strip().split(' ')[0] if pkg in skip_names: continue recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if os.path.exists(pkg): recipe_glob.append(pkg) if recipe_glob: try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) add_recipes.append(recipe_dir) to_build_recursive.append(pkg) else: raise recipes.appendleft(arg) recipes.extendleft(reversed(add_recipes)) if try_again: continue if not args.notest: build.test(m) if need_cleanup: shutil.rmtree(recipe_dir) # outputs message, or does upload, depending on value of args.binstar_upload handle_binstar_upload(build.bldpkg_path(m), args) already_built.add(m.pkg_fn())
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile import conda_build.build as build import conda_build.source as source from conda_build.config import config check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn( "Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") set_language_env_vars(args, parser, execute=execute) if args.skip_existing: for d in config.bldpkgs_dirs: if not isdir(d): makedirs(d) update_index(d) index = build.get_build_index(clear_cache=True) already_built = set() to_build_recursive = [] recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False # recurse looking for meta.yaml that is potentially not in immediate folder recipe_dir = find_recipe(recipe_dir) if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) # this fully renders any jinja templating, throwing an error if any data is missing m, need_source_download = render_recipe(recipe_dir, no_download_source=False, verbose=False, dirty=args.dirty) if m.get_value('build/noarch_python'): config.noarch = True if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.skip_existing: # 'or m.pkg_fn() in index' is for conda <4.1 and could be removed in the future. if ('local::' + m.pkg_fn() in index or m.pkg_fn() in index or m.pkg_fn() in already_built): print(m.dist(), "is already built, skipping.") continue if args.output: print(bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source'), verbose=build.verbose) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe, keep_old_work=args.keep_old_work, need_source_download=need_source_download, dirty=args.dirty) except (NoPackagesFound, Unsatisfiable) as e: error_str = str(e) # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] add_recipes = [] for line in error_str.splitlines(): if not line.startswith(' - '): continue pkg = line.lstrip(' - ').split(' -> ')[-1] pkg = pkg.strip().split(' ')[0] if pkg in skip_names: continue recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if os.path.exists(pkg): recipe_glob.append(pkg) if recipe_glob: try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) add_recipes.append(recipe_dir) to_build_recursive.append(pkg) else: raise recipes.appendleft(arg) recipes.extendleft(reversed(add_recipes)) if try_again: continue if not args.notest: build.test(m) if need_cleanup: shutil.rmtree(recipe_dir) # outputs message, or does upload, depending on value of args.binstar_upload handle_binstar_upload(build.bldpkg_path(m), args) already_built.add(m.pkg_fn())
def test_find_recipe_file_bad(): with _generate_tmp_tree() as (tmp, _, (f1, f2, f3, f4)): # file provided is not valid meta with pytest.raises(IOError): utils.find_recipe(f1)
def test_find_recipe_file(): with _generate_tmp_tree() as (tmp, _, (f1, f2, f3, f4)): f5 = os.path.join(tmp, "meta.yaml") makefile(f5) # file provided is valid meta assert utils.find_recipe(f5) == f5
def test_find_recipe_no_meta(): with _generate_tmp_tree() as (tmp, _, (f1, f2, f3, f4)): # no meta files in tmp with pytest.raises(IOError): utils.find_recipe(tmp)