def execute(args): _, args = parse_args(args) config = Config(**args.__dict__) config.verbose = not args.quiet api.update_index(args.dir, config=config, force=args.force, check_md5=args.check_md5, remove=args.remove)
def execute(args): parser, args = parse_args(args) config = Config(**args.__dict__) build.check_external() # change globals in build module, see comment there as well channel_urls = args.channel or () config.channel_urls = [] for url in channel_urls: # allow people to specify relative or absolute paths to local channels # These channels still must follow conda rules - they must have the # appropriate platform-specific subdir (e.g. win-64) if os.path.isdir(url): if not os.path.isabs(url): url = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), url))) url = url_path(url) config.channel_urls.append(url) config.override_channels = args.override_channels config.verbose = not args.quiet or args.debug if 'purge' in args.recipe: build.clean_build(config) return if 'purge-all' in args.recipe: build.clean_build(config) config.clean_pkgs() return set_language_env_vars(args, parser, config=config, execute=execute) action = None if args.output: action = output_action logging.basicConfig(level=logging.ERROR) config.verbose = False config.quiet = True elif args.test: action = test_action elif args.source: action = source_action elif args.check: action = check_action if action: for recipe in args.recipe: action(recipe, config) outputs = [] else: outputs = api.build(args.recipe, post=args.post, build_only=args.build_only, notest=args.notest, already_built=None, config=config, noverify=args.no_verify) if not args.output and len(utils.get_build_folders(config.croot)) > 0: build.print_build_intermediate_warning(config) return outputs
def get_package_combined_spec(recipedir_or_metadata, config=None, variants=None): # outputs a tuple of (combined_spec_dict_of_lists, used_spec_file_dict) # # The output of this function is order preserving, unlike get_package_variants if hasattr(recipedir_or_metadata, 'config'): config = recipedir_or_metadata.config if not config: from conda_build.config import Config config = Config() files = find_config_files(recipedir_or_metadata, config) specs = OrderedDict(internal_defaults=get_default_variant(config)) for f in files: specs[f] = parse_config_file(f, config) # this is the override of the variants from files and args with values from CLI or env vars if hasattr(config, 'variant') and config.variant: specs['config.variant'] = config.variant if variants: specs['argument_variants'] = variants for f, spec in specs.items(): validate_spec(f, spec) # this merges each of the specs, providing a debug message when a given setting is overridden # by a later spec combined_spec = combine_specs(specs, log_output=config.verbose) return combined_spec, specs
def __init__(self, path, config=None): self.undefined_jinja_vars = [] if not config: config = Config() self.config = config if isfile(path): self.meta_path = path self.path = os.path.dirname(path) else: self.meta_path = find_recipe(path) self.path = os.path.dirname(self.meta_path) self.requirements_path = join(self.path, 'requirements.txt') # Start with bare-minimum contents so we can call environ.get_dict() with impunity # We'll immediately replace these contents in parse_again() self.meta = parse("package:\n" " name: uninitialized", path=self.meta_path, config=self.config) # This is the 'first pass' parse of meta.yaml, so not all variables are defined yet # (e.g. GIT_FULL_HASH, etc. are undefined) # Therefore, undefined jinja variables are permitted here # In the second pass, we'll be more strict. See build.build() self.parse_again(config=config, permit_undefined_jinja=True) self.config.disable_pip = self.disable_pip
def testing_config(testing_workdir): def boolify(v): return True if 'v' == 'true' else False result = Config( croot=testing_workdir, anaconda_upload=False, verbose=True, activate=False, debug=False, variant=None, test_run_post=False, # These bits ensure that default values are used instead of any # present in ~/.condarc filename_hashing=filename_hashing_default, _src_cache_root=_src_cache_root_default, error_overlinking=boolify(error_overlinking_default), error_overdepending=boolify(error_overdepending_default), noarch_python_build_age=noarch_python_build_age_default, enable_static=boolify(enable_static_default), no_rewrite_stdout_env=boolify(no_rewrite_stdout_env_default), ignore_verify_codes=ignore_verify_codes_default, exit_on_verify_error=exit_on_verify_error_default, conda_pkg_format=conda_pkg_format_default) assert result.no_rewrite_stdout_env == False assert result._src_cache_root == None assert result.src_cache_root == testing_workdir assert result.noarch_python_build_age == 0 return result
def skeletonize( packages, output_dir, version, recursive, architecture, no_override_arch, distro, conda_forge_style, single_sysroot, build_number, ): cdt_info = _gen_cdts(single_sysroot) if architecture in ["aarch64", "ppc64le"]: cdt_info["centos7"] = cdt_info["centos7-alt"] with tempfile.TemporaryDirectory() as tmpdir: write_conda_recipe( packages, distro, output_dir, architecture, recursive, not no_override_arch, None, Config(cache_dir=str(tmpdir)), build_number, conda_forge_style, single_sysroot, cdt_info, )
def testing_config(testing_workdir, request): return Config(croot=testing_workdir, anaconda_upload=False, verbose=True, activate=False, debug=False, variant=None)
def main(): _, args = parse_args(sys.argv[1:]) args = args.__dict__ config = Config(**args) channel_urls = get_rc_urls() + get_channel_urls({}) init_api_context() print(f"Updating build index: {(config.output_folder)}\n") update_index(config.output_folder, verbose=config.debug, threads=1) # setting the repodata timeout to very high for conda context.local_repodata_ttl = 100000 recipe = args["recipe"][0] global solver solver = MambaSolver(channel_urls, context.subdir) solver.replace_channels() cbc, _ = conda_build.variants.get_package_combined_spec(recipe, config=config) if args["test"]: api.test(recipe, config=config) else: api.build( recipe, post=args["post"], build_only=args["build_only"], notest=args["notest"], config=config, variants=args["variants"], )
def execute(args): parser, args = parse_args(args) config = Config(**args.__dict__) build.check_external() # change globals in build module, see comment there as well config.channel_urls = args.channel or () config.override_channels = args.override_channels config.verbose = not args.quiet or args.debug if 'purge' in args.recipe: build.clean_build(config) return if 'purge-all' in args.recipe: build.clean_build(config) config.clean_pkgs() return if on_win: delete_trash(None) set_language_env_vars(args, parser, config=config, execute=execute) action = None if args.output: action = output_action logging.basicConfig(level=logging.ERROR) config.verbose = False config.quiet = True elif args.test: action = test_action elif args.source: action = source_action elif args.check: action = check_action if action: for recipe in args.recipe: recipe_dir, need_cleanup = get_recipe_abspath(recipe) if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) # this fully renders any jinja templating, throwing an error if any data is missing m, _, _ = render_recipe(recipe_dir, no_download_source=False, config=config) action(m, config) if need_cleanup: rm_rf(recipe_dir) else: api.build(args.recipe, post=args.post, build_only=args.build_only, notest=args.notest, keep_old_work=args.keep_old_work, already_built=None, config=config) if not args.output and len(build.get_build_folders(config.croot)) > 0: build.print_build_intermediate_warning(config)
def _get_source_code(recipe_dir): # Use conda build to do all the downloading/extracting bits md = render(recipe_dir, config=Config(**CB_CONFIG)) if not md: return md = md[0][0] # provide source dir return provide(md)
def fromstring(cls, metadata, config=None): m = super(MetaData, cls).__new__(cls) if not config: config = Config() m.meta = parse(metadata, path='', config=config) m.config = config m.parse_again(config=config, permit_undefined_jinja=True) return m
def _get_source_code(recipe_dir): # Use conda build to do all the downloading/extracting bits md = render( recipe_dir, config=Config(**CB_CONFIG), finalize=False, bypass_env_check=True, ) if not md: return None md = md[0][0] # provide source dir return provide(md)
def execute(args): parser, args = parse_args(args) config = Config(**args.__dict__) if not args.repo: parser.print_help() sys.exit() for package in args.packages: api.skeletonize(package, repo=args.repo, config=config)
def execute(args): parser, args = parse_args(args) config = Config(**args.__dict__) if not args.repo: parser.print_help() sys.exit() api.skeletonize(args.packages, args.repo, output_dir=args.output_dir, recursive=args.recursive, version=args.version, config=config)
def prepare(**kwargs): """ Prepare and configure the stage for mambabuild to run. The given **kwargs are passed to conda-build's Config which is the value returned by this function. """ config = Config(**kwargs) config.channel_urls = get_channel_urls(kwargs) init_api_context() config.output_folder = os.path.abspath(config.output_folder) if not os.path.exists(config.output_folder): mkdir_p(config.output_folder) print(f"Updating build index: {(config.output_folder)}\n") update_index(config.output_folder, verbose=config.debug, threads=1) return config
def get_package_variants(recipedir_or_metadata, config=None, variants=None): if hasattr(recipedir_or_metadata, 'config'): config = recipedir_or_metadata.config if not config: from conda_build.config import Config config = Config() files = find_config_files( recipedir_or_metadata, ensure_list(config.variant_config_files), ignore_system_config=config.ignore_system_variants, exclusive_config_file=config.exclusive_config_file) specs = OrderedDict(internal_defaults=get_default_variant(config)) for f in files: specs[f] = parse_config_file(f, config) # this is the override of the variants from files and args with values from CLI or env vars if hasattr(config, 'variant') and config.variant: specs['config.variant'] = config.variant if variants: specs['argument_variants'] = variants for f, spec in specs.items(): try: validate_spec(spec) except ValueError as e: raise ValueError("Error in config {}: {}".format(f, str(e))) # this merges each of the specs, providing a debug message when a given setting is overridden # by a later spec combined_spec, extend_keys = combine_specs(specs, log_output=config.verbose) extend_keys.update({'zip_keys', 'extend_keys'}) # delete the default specs, so that they don't unnecessarily limit the matrix specs = specs.copy() del specs['internal_defaults'] combined_spec = dict_of_lists_to_list_of_dicts(combined_spec, extend_keys=extend_keys) for source, source_specs in reversed(specs.items()): for k, vs in source_specs.items(): if k not in extend_keys: # when filtering ends up killing off all variants, we just ignore that. Generally, # this arises when a later variant config overrides, rather than selects a # subspace of earlier configs combined_spec = (filter_by_key_value( combined_spec, k, vs, source_name=source) or combined_spec) return combined_spec
def execute(args): p, args = parse_args(args) config = Config() set_language_env_vars(args, p, config) with LoggingContext(logging.CRITICAL + 1): metadata, _, _ = render_recipe(args.recipe, no_download_source=args.no_source, config=config) if args.output: print(bldpkg_path(metadata)) else: print(output_yaml(metadata, args.file))
def inspect_prefix_length(packages, min_prefix_length=_prefix_length): from conda_build.tarcheck import check_prefix_lengths config = Config(prefix_length=min_prefix_length) packages = _ensure_list(packages) prefix_lengths = check_prefix_lengths(packages, config) if prefix_lengths: print("Packages with binary prefixes shorter than %d characters:" % min_prefix_length) for fn, length in prefix_lengths.items(): print("{0} ({1} chars)".format(fn, length)) else: print("No packages found with binary prefixes shorter than %d characters." % min_prefix_length) return len(prefix_lengths) == 0
def parsed_meta_yaml(text): """ :param str text: The raw text in conda-forge feedstock meta.yaml file :return: `dict|None` -- parsed YAML dict if successful, None if not """ try: yaml_dict = parse(Template(text).render(), Config()) except UndefinedError: # assume we hit a RECIPE_DIR reference in the vars and can't parse it. # just erase for now try: yaml_dict = parse( Template( re.sub('{{ (environ\[")?RECIPE_DIR("])? }}/', '', text) ).render(), Config()) except Exception as e: print(e) return None except Exception as e: print(e) return None return yaml_dict
def update_index(dir_paths, config=None, force=False, check_md5=False, remove=False): from locale import getpreferredencoding import os from .conda_interface import PY3 from conda_build.index import update_index dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)] # Don't use byte strings in Python 2 if not PY3: dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths] if not config: config = Config() for path in dir_paths: update_index(path, config, force=force, check_md5=check_md5, remove=remove)
def create_metapackage(name, version, entry_points=(), build_string=None, build_number=0, dependencies=(), home=None, license_name=None, summary=None, config=None): # local import to avoid circular import, we provid create_metapackage in api from conda_build.api import build if not config: config = Config() d = defaultdict(dict) d['package']['name'] = name d['package']['version'] = version d['build']['number'] = build_number d['build']['entry_points'] = entry_points # MetaData does the auto stuff if the build string is None d['build']['string'] = build_string d['requirements']['run'] = dependencies d['about']['home'] = home d['about']['license'] = license_name d['about']['summary'] = summary d = dict(d) m = MetaData.fromdict(d, config=config) config.compute_build_id(m.name()) return build(m, config=config, need_source_download=False)
def create_metapackage(name, version, entry_points=(), build_string=None, build_number=0, dependencies=(), home=None, license_name=None, summary=None, config=None): # local import to avoid circular import, we provid create_metapackage in api from conda_build.build import build if not config: config = Config() d = defaultdict(dict) d['package']['name'] = name d['package']['version'] = version d['build']['number'] = build_number d['build']['entry_points'] = entry_points # MetaData does the auto stuff if the build string is None d['build']['string'] = build_string d['requirements']['run'] = dependencies d['about']['home'] = home d['about']['license'] = license_name d['about']['summary'] = summary d = dict(d) m = MetaData.fromdict(d, config=config) config.compute_build_id(m.name()) return build(m, config=config, need_source_download=False)
def publish(c, recipe): config = CondaConfig() config.verbose = False config.debug = False spinner = Halo( text=f"Deriving conda build package paths for `{recipe}` recipe...", spinner="dots", ) spinner.start() package_paths = conda_build_api.get_output_file_paths(recipe, config=config) spinner.succeed(f"Derived the following paths: {package_paths}") for p in package_paths: if not Path(p).exists(): raise exceptions.Exit( f"{Fore.RED}ERROR:{Style.RESET_ALL} Conda package path `{p}` does not exist! Please run " f"`$ inv build {recipe}` to build this package.", code=1, ) packages = " ".join(package_paths) c.run(f"anaconda upload {packages}") print(f"{Fore.CYAN}Publish done!")
def build_conda_pack(base_path, tmp, hexrd_package_channel, hexrdgui_output_folder): # First build the hexrdgui package recipe_path = str(base_path / '..' / 'conda.recipe') config = Config() config.channel = ['cjh1', 'anaconda', 'conda-forge'] config.channel_urls = ['cjh1', 'anaconda', 'conda-forge'] if hexrdgui_output_folder is not None: config.output_folder = hexrdgui_output_folder if hexrd_package_channel is not None: config.channel.insert(0, 'hexrd-channel') config.channel_urls.insert(0, hexrd_package_channel) config.CONDA_PY = '38' logger.info('Building hexrdgui conda package.') CondaBuild.build(recipe_path, config=config) logger.info('Creating new conda environment.') # Now create a new environment to install the package into env_prefix = str(tmp / package_env_name) channels = ['--channel', 'anaconda', '--channel', 'conda-forge'] # For the mac we need to use our own version of Python built with the # latest SDK. See https://github.com/HEXRD/hexrdgui/issues/505 for # more details. So we add the HEXRD channel that has our Python package. if platform.system() == 'Darwin': channels = ['--channel', 'HEXRD'] + channels Conda.run_command(Conda.Commands.CREATE, '--prefix', env_prefix, *channels, 'python=3.8.4') hexrdgui_output_folder_uri = Path( hexrdgui_output_folder).absolute().as_uri() logger.info('Installing hexrdgui into new environment.') # Install hexrdgui into new environment params = [ Conda.Commands.INSTALL, '--prefix', env_prefix, '--channel', hexrdgui_output_folder_uri, '--channel', hexrd_package_channel, '--channel', 'cjh1', '--channel', 'anaconda', '--channel', 'conda-forge', 'hexrdgui' ] Conda.run_command(*params) logger.info('Generating tar from environment using conda-pack.') # Now use conda-pack to create relocatable archive archive_path = str(tmp / ('hexrdgui.%s' % archive_format)) CondaPack.pack(prefix=env_prefix, output=archive_path, format=archive_format) return archive_path
def execute(args): parser, args = parse_args(args) config = Config(**args.__dict__) build.check_external() # change globals in build module, see comment there as well config.channel_urls = args.channel or () config.override_channels = args.override_channels config.verbose = not args.quiet or args.debug if 'purge' in args.recipe: build.clean_build(config) return if 'purge-all' in args.recipe: build.clean_build(config) config.clean_pkgs() return set_language_env_vars(args, parser, config=config, execute=execute) action = None if args.output: action = output_action logging.basicConfig(level=logging.ERROR) config.verbose = False config.quiet = True elif args.test: action = test_action elif args.source: action = source_action elif args.check: action = check_action if action: for recipe in args.recipe: action(recipe, config) else: api.build(args.recipe, post=args.post, build_only=args.build_only, notest=args.notest, keep_old_work=args.keep_old_work, already_built=None, config=config, noverify=args.no_verify) if not args.output and len(build.get_build_folders(config.croot)) > 0: build.print_build_intermediate_warning(config)
def fromdict(cls, metadata, config=None): """ Create a MetaData object from metadata dict directly. """ m = super(MetaData, cls).__new__(cls) m.path = '' m.meta_path = '' m.meta = sanitize(metadata) if not config: config = Config() m.config = config m.undefined_jinja_vars = [] return m
def execute(args): p, args = parse_args(args) config = Config() set_language_env_vars(args, p, config) metadata, _, _ = render_recipe(args.recipe, no_download_source=args.no_source, config=config) if args.output: logging.basicConfig(level=logging.ERROR) silence_loggers(show_warnings_and_errors=False) print(bldpkg_path(metadata, config=config)) else: logging.basicConfig(level=logging.INFO) print(output_yaml(metadata, args.file))
def parse_meta_yaml(text): """Parse the meta.yaml. Parameters ---------- text : str The raw text in conda-forge feedstock meta.yaml file Returns ------- dict : The parsed YAML dict. If parseing fails, returns an empty dict. """ content = render_meta_yaml(text) return parse(content, Config())
def parsed_meta_yaml(text): """ :param str text: The raw text in conda-forge feedstock meta.yaml file :return: `dict|None` -- parsed YAML dict if successful, None if not """ try: env = jinja2.Environment(undefined=NullUndefined) content = env.from_string(text).render( os=os, environ=defaultdict(lambda: ''), compiler=lambda x: x + '_compiler_stub', pin_subpackage=lambda *args, **kwargs: 'subpackage_stub', pin_compatible=lambda *args, **kwargs: 'compatible_pin_stub', cdt=lambda *args, **kwargs: 'cdt_stub',) return parse(content, Config()) except: return {}
def fromdict(cls, metadata, config=None, variant=None): """ Create a MetaData object from metadata dict directly. """ m = super(MetaData, cls).__new__(cls) m.path = '' m.meta_path = '' m.requirements_path = '' m.meta = sanitize(metadata) if not config: config = Config(variant=variant) m.config = config m.undefined_jinja_vars = [] m.final = False return m
def parse_meta_yaml(text, **kwargs): """Parse the meta.yaml. Parameters ---------- text : str The raw text in conda-forge feedstock meta.yaml file Returns ------- dict : The parsed YAML dict. If parseing fails, returns an empty dict. """ from conda_build.config import Config from conda_build.metadata import parse content = render_meta_yaml(text) return parse(content, Config(**kwargs))
def _get_source_code(recipe_dir): from conda_build.api import render from conda_build.config import Config from conda_build.source import provide # Use conda build to do all the downloading/extracting bits md = render( recipe_dir, config=Config(**CB_CONFIG), finalize=False, bypass_env_check=True, ) if not md: return None md = md[0][0] # provide source dir try: return provide(md) except SystemExit: raise RuntimeError(f"Could not download source for {recipe_dir}!")
def _get_source_code(recipe_dir): try: from conda_build.api import render from conda_build.config import Config from conda_build.source import provide # Use conda build to do all the downloading/extracting bits md = render( recipe_dir, config=Config(**CB_CONFIG), finalize=False, bypass_env_check=True, ) if not md: return None md = md[0][0] # provide source dir return provide(md) except (SystemExit, Exception) as e: raise RuntimeError("conda build src exception:" + str(e))
def get_package_variants(recipedir_or_metadata, config=None): if hasattr(recipedir_or_metadata, 'config'): config = recipedir_or_metadata.config if not config: from conda_build.config import Config config = Config() files = find_config_files( recipedir_or_metadata, ensure_list(config.variant_config_files), ignore_system_config=config.ignore_system_variants) specs = get_default_variants( config.platform) + [parse_config_file(f, config) for f in files] target_platform_default = [{'target_platform': config.subdir}] # this is the override of the variants from files and args with values from CLI or env vars if config.variant: combined_spec, extend_keys = combine_specs(target_platform_default + specs + [config.variant]) else: # this tweaks behavior from clobbering to appending/extending combined_spec, extend_keys = combine_specs(target_platform_default + specs) # clobber the variant with anything in the config (stuff set via CLI flags or env vars) for k, v in config.variant.items(): if k in extend_keys: if hasattr(combined_spec[k], 'keys'): combined_spec[k].update(v) else: combined_spec[k].extend(v) elif k == 'zip_keys': combined_spec[k].extend(v) combined_spec[k] = list( list(set_group) for set_group in set( tuple(group) for group in combined_spec[k])) else: combined_spec[k] = [v] validate_variant(combined_spec) return dict_of_lists_to_list_of_dicts(combined_spec, config.platform)
def build_conda_pack(base_path, tmp, hexrd_package_channel, hexrdgui_output_folder): # First build the hexrdgui package recipe_path = str(base_path / '..' / 'conda.recipe') config = Config() config.channel = ['cjh1', 'anaconda', 'conda-forge'] config.channel_urls = ['cjh1', 'anaconda', 'conda-forge'] if hexrdgui_output_folder is not None: config.output_folder = hexrdgui_output_folder if hexrd_package_channel is not None: config.channel.insert(0, 'hexrd-channel') config.channel_urls.insert(0, hexrd_package_channel) config.CONDA_PY = '38' logger.info('Building hexrdgui conda package.') CondaBuild.build(recipe_path, config=config) logger.info('Creating new conda environment.') # Now create a new environment to install the package into env_prefix = str(tmp / package_env_name) Conda.run_command(Conda.Commands.CREATE, '--prefix', env_prefix, 'python=3.8') hexrdgui_output_folder_uri = Path( hexrdgui_output_folder).absolute().as_uri() logger.info('Installing hexrdgui into new environment.') # Install hexrdgui into new environment params = [ Conda.Commands.INSTALL, '--prefix', env_prefix, '--channel', hexrdgui_output_folder_uri, '--channel', hexrd_package_channel, '--channel', 'cjh1', '--channel', 'anaconda', '--channel', 'conda-forge', 'hexrdgui' ] Conda.run_command(*params) logger.info('Generating tar from environment using conda-pack.') # Now use conda-pack to create relocatable archive archive_path = str(tmp / ('hexrdgui.%s' % archive_format)) CondaPack.pack(prefix=env_prefix, output=archive_path, format=archive_format) return archive_path
def execute(args): parser, args = parse_args(args) config = Config(**args.__dict__) build.check_external() # change globals in build module, see comment there as well channel_urls = args.__dict__.get('channel') or args.__dict__.get('channels') or () config.channel_urls = [] for url in channel_urls: # allow people to specify relative or absolute paths to local channels # These channels still must follow conda rules - they must have the # appropriate platform-specific subdir (e.g. win-64) if os.path.isdir(url): if not os.path.isabs(url): url = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), url))) url = url_path(url) config.channel_urls.append(url) config.override_channels = args.override_channels config.verbose = not args.quiet or args.debug if 'purge' in args.recipe: build.clean_build(config) return if 'purge-all' in args.recipe: build.clean_build(config) config.clean_pkgs() return action = None outputs = None if args.output: action = output_action config.verbose = False config.quiet = True config.debug = False elif args.test: action = test_action elif args.source: action = source_action elif args.check: action = check_action if action == test_action: failed_recipes = [] recipes = [item for sublist in [glob(os.path.abspath(recipe)) if '*' in recipe else [recipe] for recipe in args.recipe] for item in sublist] for recipe in recipes: try: action(recipe, config) except: if not args.keep_going: raise else: failed_recipes.append(recipe) continue if failed_recipes: print("Failed recipes:") for recipe in failed_recipes: print(" - %s" % recipe) sys.exit(len(failed_recipes)) else: print("All tests passed") outputs = [] elif action: outputs = [action(recipe, config) for recipe in args.recipe] else: outputs = api.build(args.recipe, post=args.post, build_only=args.build_only, notest=args.notest, already_built=None, config=config, verify=args.verify, variants=args.variants) if not args.output and len(utils.get_build_folders(config.croot)) > 0: build.print_build_intermediate_warning(config) return outputs