def execute(args): parser, args = parse_args(args) config = Config(**args.__dict__) build.check_external() # change globals in build module, see comment there as well config.channel_urls = args.channel or () config.override_channels = args.override_channels config.verbose = not args.quiet or args.debug if 'purge' in args.recipe: build.clean_build(config) return if 'purge-all' in args.recipe: build.clean_build(config) config.clean_pkgs() return set_language_env_vars(args, parser, config=config, execute=execute) action = None if args.output: action = output_action logging.basicConfig(level=logging.ERROR) config.verbose = False config.quiet = True elif args.test: action = test_action elif args.source: action = source_action elif args.check: action = check_action if action: for recipe in args.recipe: recipe_dir, need_cleanup = get_recipe_abspath(recipe) if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) # this fully renders any jinja templating, throwing an error if any data is missing m, _, _ = render_recipe(recipe_dir, no_download_source=False, config=config) action(m, config) if need_cleanup: rm_rf(recipe_dir) else: api.build(args.recipe, post=args.post, build_only=args.build_only, notest=args.notest, keep_old_work=args.keep_old_work, already_built=None, config=config) if not args.output and len(build.get_build_folders(config.croot)) > 0: build.print_build_intermediate_warning(config)
def test(recipedir_or_package_or_metadata, move_broken=True, config=None, **kwargs): import os from conda_build.conda_interface import url_path from conda_build.build import test from conda_build.render import render_recipe from conda_build.utils import get_recipe_abspath, rm_rf from conda_build import source config = get_or_merge_config(config, **kwargs) # we want to know if we're dealing with package input. If so, we can move the input on success. is_package = False if hasattr(recipedir_or_package_or_metadata, 'config'): metadata = recipedir_or_package_or_metadata recipe_config = metadata.config else: recipe_dir, need_cleanup = get_recipe_abspath(recipedir_or_package_or_metadata) config.need_cleanup = need_cleanup # This will create a new local build folder if and only if config doesn't already have one. # What this means is that if we're running a test immediately after build, we use the one # that the build already provided metadata, _, _ = render_recipe(recipe_dir, config=config) recipe_config = config # this recipe came from an extracted tarball. if need_cleanup: # ensure that the local location of the package is indexed, so that conda can find the # local package local_location = os.path.dirname(recipedir_or_package_or_metadata) # strip off extra subdir folders for platform in ('win', 'linux', 'osx'): if os.path.basename(local_location).startswith(platform + "-"): local_location = os.path.dirname(local_location) update_index(local_location, config=config) local_url = url_path(local_location) # channel_urls is an iterable, but we don't know if it's a tuple or list. Don't know # how to add elements. recipe_config.channel_urls = list(recipe_config.channel_urls) recipe_config.channel_urls.insert(0, local_url) is_package = True if metadata.meta.get('test') and metadata.meta['test'].get('source_files'): source.provide(metadata.path, metadata.get_section('source'), config=config) rm_rf(recipe_dir) with recipe_config: # This will create a new local build folder if and only if config doesn't already have one. # What this means is that if we're running a test immediately after build, we use the one # that the build already provided recipe_config.compute_build_id(metadata.name()) test_result = test(metadata, config=recipe_config, move_broken=move_broken) if (test_result and is_package and hasattr(recipe_config, 'output_folder') and recipe_config.output_folder): os.rename(recipedir_or_package_or_metadata, os.path.join(recipe_config.output_folder, os.path.basename(recipedir_or_package_or_metadata))) return test_result
def execute(args): parser, args = parse_args(args) config = Config(**args.__dict__) build.check_external() # change globals in build module, see comment there as well config.channel_urls = args.channel or () config.override_channels = args.override_channels config.verbose = not args.quiet or args.debug if 'purge' in args.recipe: build.clean_build(config) return if 'purge-all' in args.recipe: build.clean_build(config) config.clean_pkgs() return if on_win: delete_trash(None) set_language_env_vars(args, parser, config=config, execute=execute) action = None if args.output: action = output_action logging.basicConfig(level=logging.ERROR) config.verbose = False config.quiet = True elif args.test: action = test_action elif args.source: action = source_action elif args.check: action = check_action if action: for recipe in args.recipe: recipe_dir, need_cleanup = get_recipe_abspath(recipe) if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) # this fully renders any jinja templating, throwing an error if any data is missing m, _, _ = render_recipe(recipe_dir, no_download_source=False, config=config) action(m, config) if need_cleanup: rm_rf(recipe_dir) else: api.build(args.recipe, post=args.post, build_only=args.build_only, notest=args.notest, keep_old_work=args.keep_old_work, already_built=None, config=config) if not args.output and len(build.get_build_folders(config.croot)) > 0: build.print_build_intermediate_warning(config)
def _construct_metadata_for_test_from_package(package, config): recipe_dir, need_cleanup = utils.get_recipe_abspath(package) config.need_cleanup = need_cleanup config.recipe_dir = recipe_dir hash_input = {} info_dir = os.path.normpath(os.path.join(recipe_dir, "info")) with open(os.path.join(info_dir, "index.json")) as f: package_data = json.load(f) if package_data["subdir"] != "noarch": config.host_subdir = package_data["subdir"] # We may be testing an (old) package built without filename hashing. hash_input = os.path.join(info_dir, "hash_input.json") if os.path.isfile(hash_input): with open(os.path.join(info_dir, "hash_input.json")) as f: hash_input = json.load(f) else: config.filename_hashing = False hash_input = {} # not actually used as a variant, since metadata will have been finalized. # This is still necessary for computing the hash correctly though config.variant = hash_input log = utils.get_logger(__name__) # get absolute file location local_pkg_location = os.path.normpath(os.path.abspath(os.path.dirname(package))) # get last part of the path last_element = os.path.basename(local_pkg_location) is_channel = False for platform in ("win-", "linux-", "osx-", "noarch"): if last_element.startswith(platform): is_channel = True if not is_channel: log.warn( "Copying package to conda-build croot. No packages otherwise alongside yours will" " be available unless you specify -c local. To avoid this warning, your package " "must reside in a channel structure with platform-subfolders. See more info on " "what a valid channel is at " "https://conda.io/docs/user-guide/tasks/create-custom-channels.html" ) local_dir = os.path.join(config.croot, config.host_subdir) mkdir_p(local_dir) local_pkg_location = os.path.join(local_dir, os.path.basename(package)) utils.copy_into(package, local_pkg_location) local_pkg_location = local_dir local_channel = os.path.dirname(local_pkg_location) # update indices in the channel update_index(local_channel, verbose=config.debug, threads=1) try: # raise IOError() # metadata = render_recipe( # os.path.join(info_dir, "recipe"), config=config, reset_build_id=False # )[0][0] metadata = get_metadata(os.path.join(info_dir, "recipe", "recipe.yaml"), config) # with open(os.path.join(info_dir, "recipe", "recipe.yaml")) as fi: # metadata = yaml.load(fi) # no recipe in package. Fudge metadata except SystemExit: # force the build string to line up - recomputing it would # yield a different result metadata = MetaData.fromdict( { "package": { "name": package_data["name"], "version": package_data["version"], }, "build": { "number": int(package_data["build_number"]), "string": package_data["build"], }, "requirements": {"run": package_data["depends"]}, }, config=config, ) # HACK: because the recipe is fully baked, detecting "used" variables no longer works. The set # of variables in the hash_input suffices, though. if metadata.noarch: metadata.config.variant["target_platform"] = "noarch" metadata.config.used_vars = list(hash_input.keys()) urls = list(utils.ensure_list(metadata.config.channel_urls)) local_path = url_path(local_channel) # replace local with the appropriate real channel. Order is maintained. urls = [url if url != "local" else local_path for url in urls] if local_path not in urls: urls.insert(0, local_path) metadata.config.channel_urls = urls utils.rm_rf(metadata.config.test_dir) return metadata, hash_input