def get_output_file_paths(recipe_path_or_metadata, no_download_source=False, config=None, variants=None, **kwargs): """Get output file paths for any packages that would be created by a recipe Both split packages (recipes with more than one ouptut) and build matrices, created with variants, contribute to the list of file paths here. """ from conda_build.render import bldpkg_path from conda_build.conda_interface import string_types config = get_or_merge_config(config, **kwargs) if hasattr(recipe_path_or_metadata, '__iter__') and not isinstance(recipe_path_or_metadata, string_types): list_of_metas = [hasattr(item[0], 'config') for item in recipe_path_or_metadata if len(item) == 3] if list_of_metas and all(list_of_metas): metadata = recipe_path_or_metadata elif isinstance(recipe_path_or_metadata, string_types): # first, render the parent recipe (potentially multiple outputs, depending on variants). metadata = render(recipe_path_or_metadata, no_download_source=no_download_source, variants=variants, config=config) else: assert hasattr(recipe_path_or_metadata, 'config'), ("Expecting metadata object - got {}" .format(recipe_path_or_metadata)) metadata = [(recipe_path_or_metadata, None, None)] # Next, loop over outputs that each metadata defines outs = [] for (m, _, _) in metadata: if m.skip(): outs.append("Skipped: {} defines build/skip for this configuration." .format(m.path)) else: outs.append(bldpkg_path(m)) return outs
def test(recipedir_or_package_or_metadata, move_broken=True, config=None, stats=None, **kwargs): """Run tests on either packages (.tar.bz2 or extracted) or recipe folders For a recipe folder, it renders the recipe enough to know what package to download, and obtains it from your currently configuured channels.""" from conda_build.build import test if hasattr(recipedir_or_package_or_metadata, 'config'): config = recipedir_or_package_or_metadata.config else: config = get_or_merge_config(config, **kwargs) # if people don't pass in an object to capture stats in, they won't get them returned. # We'll still track them, though. if not stats: stats = {} with config: # This will create a new local build folder if and only if config # doesn't already have one. What this means is that if we're # running a test immediately after build, we use the one that the # build already provided test_result = test(recipedir_or_package_or_metadata, config=config, move_broken=move_broken, stats=stats) return test_result
def test(recipedir_or_package_or_metadata, move_broken=True, config=None, **kwargs): """Run tests on either a package or a recipe folder For a recipe folder, it renders the recipe enough to know what package to download, and obtains it from your currently configuured channels.""" from conda_build.build import test if hasattr(recipedir_or_package_or_metadata, 'config'): config = recipedir_or_package_or_metadata.config else: config = get_or_merge_config(config, **kwargs) with config: # This will create a new local build folder if and only if config doesn't already have one. # What this means is that if we're running a test immediately after build, we use the one # that the build already provided test_result = test(recipedir_or_package_or_metadata, config=config, move_broken=move_broken) return test_result
def skeletonize(packages, repo, output_dir=".", version=None, recursive=False, config=None, **kwargs): """Generate a conda recipe from an external repo. Translates metadata from external sources into expected conda recipe format.""" config = get_or_merge_config(config, **kwargs) config.compute_build_id('skeleton') packages = _ensure_list(packages) module = getattr( __import__("conda_build.skeletons", globals=globals(), locals=locals(), fromlist=[repo]), repo) func_args = module.skeletonize.__code__.co_varnames kwargs = { name: value for name, value in kwargs.items() if name in func_args } with config: skeleton_return = module.skeletonize(packages, output_dir=output_dir, version=version, recursive=recursive, config=config, **kwargs) return skeleton_return
def test_get_or_create_config_does_not_change_passed_in_config(config): # arguments merged into existing configs should only affect new config, not the one that # was passed in assert config.dirty is False newconfig = get_or_merge_config(config, dirty=True) assert newconfig.dirty is True assert config.dirty is False
def render(recipe_path, config=None, variants=None, permit_unsatisfiable_variants=True, finalize=True, bypass_env_check=False, **kwargs): """Given path to a recipe, return the MetaData object(s) representing that recipe, with jinja2 templates evaluated. Returns a list of (metadata, needs_download, needs_reparse in env) tuples""" from conda_build.render import render_recipe, finalize_metadata from conda_build.exceptions import DependencyNeedsBuildingError from conda_build.conda_interface import NoPackagesFoundError from collections import OrderedDict config = get_or_merge_config(config, **kwargs) metadata_tuples = render_recipe( recipe_path, bypass_env_check=bypass_env_check, no_download_source=config.no_download_source, config=config, variants=variants, permit_unsatisfiable_variants=permit_unsatisfiable_variants) output_metas = OrderedDict() for meta, download, render_in_env in metadata_tuples: if not meta.skip() or not config.trim_skip: for od, om in meta.get_output_metadata_set( permit_unsatisfiable_variants=permit_unsatisfiable_variants, permit_undefined_jinja=not finalize, bypass_env_check=bypass_env_check): if not om.skip() or not config.trim_skip: if 'type' not in od or od['type'] == 'conda': if finalize and not om.final: try: om = finalize_metadata( om, permit_unsatisfiable_variants= permit_unsatisfiable_variants) except (DependencyNeedsBuildingError, NoPackagesFoundError): if not permit_unsatisfiable_variants: raise # remove outputs section from output objects for simplicity if not om.path and om.meta.get('outputs'): om.parent_outputs = om.meta['outputs'] del om.meta['outputs'] output_metas[om.dist(), om.config.variant.get('target_platform'), tuple((var, om.config.variant[var]) for var in om.get_used_vars())] = \ ((om, download, render_in_env)) else: output_metas["{}: {}".format(om.type, om.name()), om.config.variant.get('target_platform'), tuple((var, om.config.variant[var]) for var in om.get_used_vars())] = \ ((om, download, render_in_env)) return list(output_metas.values())
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, **kwargs): import os from conda_build.build import build_tree config = get_or_merge_config(config, **kwargs) recipes = _ensure_list(recipe_paths_or_metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config"): absolute_recipes.append(recipe) elif os.path.isabs(recipe): absolute_recipes.append(recipe) else: absolute_recipes.append( os.path.normpath(os.path.join(os.getcwd(), recipe))) return build_tree(absolute_recipes, build_only=build_only, post=post, notest=notest, need_source_download=need_source_download, config=config)
def skeletonize(packages, repo, output_dir=".", version=None, recursive=False, config=None, **kwargs): """Generate a conda recipe from an external repo. Translates metadata from external sources into expected conda recipe format.""" # here we're dumping all extra kwargs as attributes on the config object. We'll extract # only relevant ones below config = get_or_merge_config(config, **kwargs) config.compute_build_id('skeleton') packages = _ensure_list(packages) # This is a little bit of black magic. The idea is that for any keyword argument that # we inspect from the given module's skeletonize funtion, we should hoist the argument # off of the config object, and pass it as a keyword argument. This is sort of the # inverse of what we do in the CLI code - there we take CLI arguments and dangle them # all on the config object as attributes. module = getattr(__import__("conda_build.skeletons", globals=globals(), locals=locals(), fromlist=[repo]), repo) func_args = module.skeletonize.__code__.co_varnames kwargs = {name: getattr(config, name) for name in dir(config) if name in func_args} kwargs.update({name: value for name, value in kwargs.items() if name in func_args}) # strip out local arguments that we pass directly for arg in skeletonize.__code__.co_varnames: if arg in kwargs: del kwargs[arg] with config: skeleton_return = module.skeletonize(packages, output_dir=output_dir, version=version, recursive=recursive, config=config, **kwargs) return skeleton_return
def test(recipedir_or_package_or_metadata, move_broken=True, config=None, **kwargs): import os from conda_build.build import test from conda_build.render import render_recipe config = get_or_merge_config(config, **kwargs) if hasattr(recipedir_or_package_or_metadata, 'config'): metadata = recipedir_or_package_or_metadata recipe_config = metadata.config elif os.path.isdir(recipedir_or_package_or_metadata): # This will create a new local build folder if and only if config doesn't already have one. # What this means is that if we're running a test immediately after build, we use the one # that the build already provided config.compute_build_id(recipedir_or_package_or_metadata) metadata, _, _ = render_recipe(recipedir_or_package_or_metadata, config=config) recipe_config = config else: # fall back to old way (use recipe, rather than package) metadata, _, _ = render_recipe(recipedir_or_package_or_metadata, no_download_source=False, config=config, **kwargs) recipe_config = config with recipe_config: # This will create a new local build folder if and only if config doesn't already have one. # What this means is that if we're running a test immediately after build, we use the one # that the build already provided config.compute_build_id(metadata.name()) test_result = test(metadata, config=recipe_config, move_broken=move_broken) return test_result
def render_yaml(path, variants=None, variant_config_files=None, schema=None, permit_undefined_jinja=False): """ Call conda-build's render tool to get a list of dictionaries of the rendered YAML file for each variant that will be built. """ config = get_or_merge_config(None, variant=variants) config.variant_config_files = variant_config_files config.verbose = False if not os.path.isfile(path): metas = conda_build.api.render(path, config=config, bypass_env_check=True, finalize=False) else: # api.render will only work if path is pointing to a meta.yaml file. # For other files, use the MetaData class directly. # The absolute path is needed because MetaData seems to do some caching based on file name. metas = conda_build.metadata.MetaData( os.path.abspath(path), config=config).get_rendered_recipe_text( permit_undefined_jinja=permit_undefined_jinja) if schema: utils.validate_dict_schema(metas, schema) return metas
def render(recipe_path, config=None, variants=None, permit_unsatisfiable_variants=True, **kwargs): """Given path to a recipe, return the MetaData object(s) representing that recipe, with jinja2 templates evaluated. Returns a list of (metadata, needs_download, needs_reparse in env) tuples""" from conda_build.render import render_recipe, finalize_metadata from conda_build.exceptions import DependencyNeedsBuildingError config = get_or_merge_config(config, **kwargs) metadata_tuples, index = render_recipe( recipe_path, no_download_source=config.no_download_source, config=config, variants=variants, permit_unsatisfiable_variants=permit_unsatisfiable_variants) metadata = [] for (_m, download, reparse) in metadata_tuples: for (output_dict, m) in _m.get_output_metadata_set(): if output_dict.get('type') != 'wheel': try: m = finalize_metadata(m, index) except DependencyNeedsBuildingError: log = _get_logger(__name__) log.warn( "Could not finalize metadata due to missing dependencies. " "If building, these should get built in order and it's OK to " "ignore this message..") metadata.append((m, download, reparse)) return metadata
def render(recipe_path, config=None, variants=None, permit_unsatisfiable_variants=True, finalize=True, **kwargs): """Given path to a recipe, return the MetaData object(s) representing that recipe, with jinja2 templates evaluated. Returns a list of (metadata, needs_download, needs_reparse in env) tuples""" from conda_build.render import render_recipe, finalize_metadata from conda_build.exceptions import DependencyNeedsBuildingError from conda_build.conda_interface import NoPackagesFoundError from collections import OrderedDict config = get_or_merge_config(config, **kwargs) metadata_tuples = render_recipe(recipe_path, no_download_source=config.no_download_source, config=config, variants=variants, permit_unsatisfiable_variants=permit_unsatisfiable_variants) output_metas = OrderedDict() for meta, download, render_in_env in metadata_tuples: for od, om in meta.get_output_metadata_set( permit_unsatisfiable_variants=permit_unsatisfiable_variants): # only show conda packages right now if 'type' not in od or od['type'] == 'conda': assert hasattr(om.config, 'variants') if finalize and not om.final: try: om = finalize_metadata(om, permit_unsatisfiable_variants=permit_unsatisfiable_variants) except (DependencyNeedsBuildingError, NoPackagesFoundError): if not permit_unsatisfiable_variants: raise output_metas[om.dist()] = ((om, download, render_in_env)) return list(output_metas.values())
def _get_package_dependencies(path, variant_config_files, variants): """ Return a list of output packages and a list of dependency packages for the recipe at a given path. Uses conda-render to determine this information. """ # Call conda-build's render tool to get a list of dictionaries representing # the recipe for each variant that will be built. config = get_or_merge_config(None) config.variant_config_files = variant_config_files config.verbose = False metas = conda_build.api.render(path, config=config, variants=variants, bypass_env_check=True, finalize=False) # Parse out the package names and dependencies from each variant packages = set() run_deps = set() host_deps = set() build_deps = set() test_deps = set() for meta, _, _ in metas: packages.add(meta.meta['package']['name']) run_deps.update(meta.meta['requirements'].get('run', [])) host_deps.update(meta.meta['requirements'].get('host', [])) build_deps.update(meta.meta['requirements'].get('build', [])) if 'test' in meta.meta: test_deps.update(meta.meta['test'].get('requires', [])) return packages, run_deps, host_deps, build_deps, test_deps
def get_config(folder, variant=None, additional_files=None): if not additional_files: additional_files = [] if not variant: variant = {} config = get_or_merge_config(None, variant) if cb_split_version >= (3, 20, 5): config_files = find_config_files(folder, config) else: config_files = find_config_files(folder) all_files = [os.path.abspath(p) for p in config_files + additional_files] # reverse files an uniquify def make_unique_list(lx): seen = set() return [x for x in lx if not (x in seen or seen.add(x))] # we reverse the order so that command line can overwrite the hierarchy all_files = make_unique_list(all_files[::-1])[::-1] console.print(f"\nLoading config files: [green]{', '.join(all_files)}\n") parsed_cfg = collections.OrderedDict() for f in all_files: parsed_cfg[f] = parse_config_file(f, config) # this merges each of the specs, providing a debug message when a given setting is overridden # by a later spec combined_spec = combine_specs(parsed_cfg, log_output=config.verbose) # console.print(combined_spec) return combined_spec, config
def execute(args): p, args = parse_args(args) config = get_or_merge_config(None, **args.__dict__) variants = get_package_variants(args.recipe, config) set_language_env_vars(variants) channel_urls = args.__dict__.get('channel') or args.__dict__.get('channels') or () config.channel_urls = [] for url in channel_urls: # allow people to specify relative or absolute paths to local channels # These channels still must follow conda rules - they must have the # appropriate platform-specific subdir (e.g. win-64) if os.path.isdir(url): if not os.path.isabs(url): url = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), url))) url = url_path(url) config.channel_urls.append(url) config.override_channels = args.override_channels metadata_tuples = api.render(args.recipe, config=config, no_download_source=args.no_source) if args.output: with LoggingContext(logging.CRITICAL + 1): config.verbose = False config.debug = False paths = api.get_output_file_paths(metadata_tuples, config=config) print('\n'.join(sorted(paths))) else: logging.basicConfig(level=logging.INFO) for (m, _, _) in metadata_tuples: print(api.output_yaml(m, args.file))
def __init__(self, path, config=None, variant=None): self.undefined_jinja_vars = [] # decouple this config from whatever was fed in. People must change config by # accessing and changing this attribute. self.config = copy.copy(get_or_merge_config(config, variant=variant)) if isfile(path): self.meta_path = path self.path = os.path.dirname(path) else: self.meta_path = find_recipe(path) self.path = os.path.dirname(self.meta_path) self.requirements_path = join(self.path, 'requirements.txt') # Start with bare-minimum contents so we can call environ.get_dict() with impunity # We'll immediately replace these contents in parse_again() self.meta = dict() # This is the 'first pass' parse of meta.yaml, so not all variables are defined yet # (e.g. GIT_FULL_HASH, etc. are undefined) # Therefore, undefined jinja variables are permitted here # In the second pass, we'll be more strict. See build.build() # Primarily for debugging. Ensure that metadata is not altered after "finalizing" self.parse_again(permit_undefined_jinja=True) if 'host' in self.get_section('requirements'): self.config.has_separate_host_prefix = True self.config.disable_pip = self.disable_pip
def check(recipe_path, no_download_source=False, config=None, **kwargs): from conda_build.render import render_recipe config = get_or_merge_config(config, **kwargs) metadata, _, _ = render_recipe(recipe_path, no_download_source=no_download_source, config=config) return metadata.check_fields()
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, **kwargs): import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe config = get_or_merge_config(config, **kwargs) recipe_paths_or_metadata = _ensure_list(recipe_paths_or_metadata) string_paths = [p for p in recipe_paths_or_metadata if isinstance(p, string_types)] paths = _expand_globs(string_paths, os.getcwd()) recipes = [] for recipe in paths[:]: try: recipes.append(find_recipe(recipe)) except IOError: pass metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')] recipes.extend(metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config") or os.path.isabs(recipe): absolute_recipes.append(recipe) else: absolute_recipes.append(os.path.normpath(os.path.join(os.getcwd(), recipe))) return build_tree(absolute_recipes, build_only=build_only, post=post, notest=notest, need_source_download=need_source_download, config=config)
def render(recipe_path, config=None, variants=None, permit_unsatisfiable_variants=True, **kwargs): """Given path to a recipe, return the MetaData object(s) representing that recipe, with jinja2 templates evaluated. Returns a list of (metadata, needs_download, needs_reparse in env) tuples""" from conda_build.render import render_recipe from collections import OrderedDict config = get_or_merge_config(config, **kwargs) metadata_tuples, index = render_recipe( recipe_path, no_download_source=config.no_download_source, config=config, variants=variants, permit_unsatisfiable_variants=permit_unsatisfiable_variants) output_metas = OrderedDict() for meta, download, render_in_env in metadata_tuples: for od, om in meta.get_output_metadata_set( permit_unsatisfiable_variants=permit_unsatisfiable_variants): # only show conda packages right now if 'type' not in od or od['type'] == 'conda': output_metas[om.dist()] = ((om, download, render_in_env)) return list(output_metas.values())
def test(recipedir_or_package_or_metadata, move_broken=True, config=None, **kwargs): import os from conda_build.conda_interface import url_path from conda_build.build import test from conda_build.render import render_recipe from conda_build.utils import get_recipe_abspath, rm_rf from conda_build import source config = get_or_merge_config(config, **kwargs) # we want to know if we're dealing with package input. If so, we can move the input on success. is_package = False if hasattr(recipedir_or_package_or_metadata, 'config'): metadata = recipedir_or_package_or_metadata recipe_config = metadata.config else: recipe_dir, need_cleanup = get_recipe_abspath(recipedir_or_package_or_metadata) config.need_cleanup = need_cleanup # This will create a new local build folder if and only if config doesn't already have one. # What this means is that if we're running a test immediately after build, we use the one # that the build already provided metadata, _, _ = render_recipe(recipe_dir, config=config) recipe_config = config # this recipe came from an extracted tarball. if need_cleanup: # ensure that the local location of the package is indexed, so that conda can find the # local package local_location = os.path.dirname(recipedir_or_package_or_metadata) # strip off extra subdir folders for platform in ('win', 'linux', 'osx'): if os.path.basename(local_location).startswith(platform + "-"): local_location = os.path.dirname(local_location) update_index(local_location, config=config) local_url = url_path(local_location) # channel_urls is an iterable, but we don't know if it's a tuple or list. Don't know # how to add elements. recipe_config.channel_urls = list(recipe_config.channel_urls) recipe_config.channel_urls.insert(0, local_url) is_package = True if metadata.meta.get('test') and metadata.meta['test'].get('source_files'): source.provide(metadata.path, metadata.get_section('source'), config=config) rm_rf(recipe_dir) with recipe_config: # This will create a new local build folder if and only if config doesn't already have one. # What this means is that if we're running a test immediately after build, we use the one # that the build already provided recipe_config.compute_build_id(metadata.name()) test_result = test(metadata, config=recipe_config, move_broken=move_broken) if (test_result and is_package and hasattr(recipe_config, 'output_folder') and recipe_config.output_folder): os.rename(recipedir_or_package_or_metadata, os.path.join(recipe_config.output_folder, os.path.basename(recipedir_or_package_or_metadata))) return test_result
def build_feedstock_from_command(command, # pylint: disable=too-many-arguments, too-many-locals recipe_config_file=None, output_folder=utils.DEFAULT_OUTPUT_FOLDER, conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG, local_src_dir=None): ''' Build a feedstock from a build_command object. ''' utils.check_if_package_exists('conda-build') # pylint: disable=import-outside-toplevel import conda_build.api from conda_build.config import get_or_merge_config saved_working_directory = None if command.repository: saved_working_directory = os.getcwd() os.chdir(os.path.abspath(command.repository)) recipes_to_build = inputs.parse_arg_list(command.recipe) for variant in utils.make_variants(command.python, command.build_type, command.mpi_type, command.cudatoolkit): build_config_data, recipe_config_file = load_package_config(recipe_config_file, variant, command.recipe_path) # Build each recipe if build_config_data['recipes'] is None: build_config_data['recipes'] = [] print("INFO: No recipe to build for given configuration.") for recipe in build_config_data['recipes']: if recipes_to_build and recipe['name'] not in recipes_to_build: continue config = get_or_merge_config(None, variant=variant) config.skip_existing = True config.prefix_length = 225 config.output_folder = output_folder config.variant_config_files = [conda_build_config] if os.path.exists(conda_build_config) else [] recipe_conda_build_config = get_conda_build_config() if recipe_conda_build_config: config.variant_config_files.append(recipe_conda_build_config) config.channel_urls = [os.path.abspath(output_folder)] config.channel_urls += command.channels config.channel_urls += build_config_data.get('channels', []) _set_local_src_dir(local_src_dir, recipe, recipe_config_file) try: conda_build.api.build(os.path.join(os.getcwd(), recipe['path']), config=config) except Exception as exc: # pylint: disable=broad-except traceback.print_exc() raise OpenCEError(Error.BUILD_RECIPE, recipe['name'] if 'name' in recipe else os.getcwd, str(exc)) from exc if saved_working_directory: os.chdir(saved_working_directory)
def build_feedstock_from_command( command, # pylint: disable=too-many-arguments recipe_config_file=None, output_folder=utils.DEFAULT_OUTPUT_FOLDER, extra_channels=None, conda_build_config=utils.DEFAULT_CONDA_BUILD_CONFIG, local_src_dir=None): ''' Build a feedstock from a build_command object. ''' if not extra_channels: extra_channels = [] saved_working_directory = None if command.repository: saved_working_directory = os.getcwd() os.chdir(os.path.abspath(command.repository)) recipes_to_build = inputs.parse_arg_list(command.recipe) for variant in utils.make_variants(command.python, command.build_type, command.mpi_type, command.cudatoolkit): build_config_data, recipe_config_file = load_package_config( recipe_config_file, variant) # Build each recipe for recipe in build_config_data['recipes']: if recipes_to_build and recipe['name'] not in recipes_to_build: continue config = get_or_merge_config(None, variant=variant) config.skip_existing = True config.prefix_length = 225 config.output_folder = output_folder config.variant_config_files = [conda_build_config] recipe_conda_build_config = os.path.join( os.getcwd(), "config", "conda_build_config.yaml") if os.path.exists(recipe_conda_build_config): config.variant_config_files.append(recipe_conda_build_config) config.channel_urls = extra_channels + command.channels + build_config_data.get( 'channels', []) _set_local_src_dir(local_src_dir, recipe, recipe_config_file) try: conda_build.api.build(os.path.join(os.getcwd(), recipe['path']), config=config) except Exception as exc: # pylint: disable=broad-except traceback.print_exc() raise OpenCEError( Error.BUILD_RECIPE, recipe['name'] if 'name' in recipe else os.getcwd, str(exc)) from exc if saved_working_directory: os.chdir(saved_working_directory)
def create_metapackage(name, version, entry_points=(), build_string=None, build_number=0, dependencies=(), home=None, license_name=None, summary=None, config=None, **kwargs): from .metapackage import create_metapackage config = get_or_merge_config(config, **kwargs) return create_metapackage(name=name, version=version, entry_points=entry_points, build_string=build_string, build_number=build_number, dependencies=dependencies, home=home, license_name=license_name, summary=summary, config=config)
def _get_configs(): build_config_data, _ = build_feedstock.load_package_config() config = get_or_merge_config(None) config.variant_config_files = [utils.DEFAULT_CONDA_BUILD_CONFIG] config.verbose = False recipe_conda_build_config = build_feedstock.get_conda_build_config() if recipe_conda_build_config: config.variant_config_files.append(recipe_conda_build_config) return build_config_data, config
def skeletonize(packages, repo, output_dir=".", version=None, recursive=False, config=None, **kwargs): """Generate a conda recipe from an external repo. Translates metadata from external sources into expected conda recipe format.""" version = getattr(config, "version", version) if version: kwargs.update({'version': version}) if recursive: kwargs.update({'recursive': recursive}) if output_dir != ".": output_dir = expanduser(output_dir) kwargs.update({'output_dir': output_dir}) # here we're dumping all extra kwargs as attributes on the config object. We'll extract # only relevant ones below config = get_or_merge_config(config, **kwargs) config.compute_build_id('skeleton') packages = _ensure_list(packages) # This is a little bit of black magic. The idea is that for any keyword argument that # we inspect from the given module's skeletonize funtion, we should hoist the argument # off of the config object, and pass it as a keyword argument. This is sort of the # inverse of what we do in the CLI code - there we take CLI arguments and dangle them # all on the config object as attributes. module = getattr( __import__("conda_build.skeletons", globals=globals(), locals=locals(), fromlist=[repo]), repo) func_args = module.skeletonize.__code__.co_varnames kwargs = { name: getattr(config, name) for name in dir(config) if name in func_args } kwargs.update( {name: value for name, value in kwargs.items() if name in func_args}) # strip out local arguments that we pass directly for arg in skeletonize.__code__.co_varnames: if arg in kwargs: del kwargs[arg] with config: skeleton_return = module.skeletonize(packages, output_dir=output_dir, version=version, recursive=recursive, config=config, **kwargs) return skeleton_return
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, variants=None, stats=None, **kwargs): """Run the build step. If recipe paths are provided, renders recipe before building. Tests built packages by default. notest=True to skip test.""" import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe assert post in (None, True, False), ("post must be boolean or None. Remember, you must pass " "other arguments (config) by keyword.") config = get_or_merge_config(config, **kwargs) # if people don't pass in an object to capture stats in, they won't get them returned. # We'll still track them, though. if not stats: stats = {} recipe_paths_or_metadata = _ensure_list(recipe_paths_or_metadata) for recipe in recipe_paths_or_metadata: if not any((hasattr(recipe, "config"), isinstance(recipe, string_types))): raise ValueError("Recipe passed was unrecognized object: {}".format(recipe)) string_paths = [p for p in recipe_paths_or_metadata if isinstance(p, string_types)] paths = _expand_globs(string_paths, os.getcwd()) recipes = [] for recipe in paths: if (os.path.isdir(recipe) or (os.path.isfile(recipe) and os.path.basename(recipe) in ('meta.yaml', 'conda.yaml'))): try: recipes.append(find_recipe(recipe)) except IOError: continue metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')] recipes.extend(metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config"): absolute_recipes.append(recipe) else: if not os.path.isabs(recipe): recipe = os.path.normpath(os.path.join(os.getcwd(), recipe)) if not os.path.exists(recipe): raise ValueError("Path to recipe did not exist: {}".format(recipe)) absolute_recipes.append(recipe) if not absolute_recipes: raise ValueError('No valid recipes found for input: {}'.format(recipe_paths_or_metadata)) return build_tree(sorted(absolute_recipes), config, stats, build_only=build_only, post=post, notest=notest, variants=variants)
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, variants=None, stats=None, **kwargs): """Run the build step. If recipe paths are provided, renders recipe before building. Tests built packages by default. notest=True to skip test.""" import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe assert post in (None, True, False), ("post must be boolean or None. Remember, you must pass " "other arguments (config) by keyword.") config = get_or_merge_config(config, **kwargs) # if people don't pass in an object to capture stats in, they won't get them returned. # We'll still track them, though. if not stats: stats = {} recipe_paths_or_metadata = _ensure_list(recipe_paths_or_metadata) for recipe in recipe_paths_or_metadata: if not any((hasattr(recipe, "config"), isinstance(recipe, string_types))): raise ValueError("Recipe passed was unrecognized object: {}".format(recipe)) string_paths = [p for p in recipe_paths_or_metadata if isinstance(p, string_types)] paths = _expand_globs(string_paths, os.getcwd()) recipes = [] for recipe in paths: if (os.path.isdir(recipe) or (os.path.isfile(recipe) and os.path.basename(recipe) in ('meta.yaml', 'conda.yaml'))): try: recipes.append(find_recipe(recipe)) except IOError: continue metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')] recipes.extend(metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config"): absolute_recipes.append(recipe) else: if not os.path.isabs(recipe): recipe = os.path.normpath(os.path.join(os.getcwd(), recipe)) if not os.path.exists(recipe): raise ValueError("Path to recipe did not exist: {}".format(recipe)) absolute_recipes.append(recipe) if not absolute_recipes: raise ValueError('No valid recipes found for input: {}'.format(recipe_paths_or_metadata)) return build_tree(absolute_recipes, config, stats, build_only=build_only, post=post, notest=notest, need_source_download=need_source_download, variants=variants)
def check(recipe_path, no_download_source=False, config=None, variants=None, **kwargs): """Check validity of input recipe path Verifies that recipe can be completely rendered, and that fields of the rendered recipe are valid fields, with some value checking. """ config = get_or_merge_config(config, **kwargs) metadata = render(recipe_path, no_download_source=no_download_source, config=config, variants=variants) return all(m[0].check_fields() for m in metadata)
def get_configs(variant, conda_build_config=None): build_config_data, _ = build_feedstock.load_package_config( variants=variant) config = get_or_merge_config(None, variant=variant) config.variant_config_files = conda_build_config if conda_build_config else [] config.verbose = False recipe_conda_build_config = build_feedstock.get_conda_build_config() if recipe_conda_build_config: config.variant_config_files.append(recipe_conda_build_config) return build_config_data, config
def get_output_file_path(recipe_path_or_metadata, no_download_source=False, config=None, **kwargs): from conda_build.render import render_recipe, bldpkg_path config = get_or_merge_config(config, **kwargs) if hasattr(recipe_path_or_metadata, 'config'): metadata = recipe_path_or_metadata else: metadata, _, _ = render_recipe(recipe_path_or_metadata, no_download_source=no_download_source, config=config) return bldpkg_path(metadata)
def execute(args, print_results=True): p, args = parse_args(args) config = get_or_merge_config(None, **args.__dict__) variants = get_package_variants(args.recipe, config, variants=args.variants) set_language_env_vars(variants) channel_urls = args.__dict__.get('channel') or args.__dict__.get( 'channels') or () config.channel_urls = [] for url in channel_urls: # allow people to specify relative or absolute paths to local channels # These channels still must follow conda rules - they must have the # appropriate platform-specific subdir (e.g. win-64) if os.path.isdir(url): if not os.path.isabs(url): url = os.path.normpath( os.path.abspath(os.path.join(os.getcwd(), url))) url = url_path(url) config.channel_urls.append(url) config.override_channels = args.override_channels if args.output: config.verbose = False config.debug = False metadata_tuples = api.render(args.recipe, config=config, no_download_source=args.no_source, variants=args.variants) if print_results: if args.output: with LoggingContext(logging.CRITICAL + 1): paths = api.get_output_file_paths(metadata_tuples, config=config) print('\n'.join(sorted(paths))) else: logging.basicConfig(level=logging.INFO) for (m, _, _) in metadata_tuples: print("--------------") print("Hash contents:") print("--------------") pprint(m.get_hash_contents()) print("----------") print("meta.yaml:") print("----------") print(api.output_yaml(m, args.file, suppress_outputs=True)) else: return metadata_tuples
def apply_variant(self, variant, differentiating_keys=()): copied = copy.deepcopy(self) copied.variant = variant for idx, r in enumerate(self.requirements["build"]): vname = r.name.replace("-", "_") if vname in variant: copied.requirements["build"][idx] = CondaBuildSpec( r.name + " " + variant[vname] ) copied.requirements["build"][idx].from_pinnings = True for idx, r in enumerate(self.requirements["host"]): vname = r.name.replace("-", "_") if vname in variant: copied.requirements["host"][idx] = CondaBuildSpec( r.name + " " + variant[vname] ) copied.requirements["host"][idx].from_pinnings = True # todo figure out if we should pin like that in the run reqs as well? # for idx, r in enumerate(self.requirements["run"]): # vname = r.name.replace("-", "_") # if vname in variant: # copied.requirements["run"][idx] = CondaBuildSpec( # r.name + " " + variant[vname] # ) # copied.requirements["run"][idx].from_pinnings = True # insert compiler_cxx, compiler_c and compiler_fortran for idx, r in enumerate(self.requirements["build"]): if r.name.startswith("COMPILER_"): lang = r.splitted[1].lower() if variant.get(lang + "_compiler"): compiler = ( f"{variant[lang + '_compiler']}_{variant['target_platform']}" ) if variant.get(lang + "_compiler_version"): version = variant[lang + "_compiler_version"] copied.requirements["build"][idx].final = f"{compiler} {version}*" else: copied.requirements["build"][idx].final = f"{compiler}" copied.requirements["build"][idx].from_pinnings = True for r in self.requirements["host"]: if r.name.startswith("COMPILER_"): raise RuntimeError("Compiler should be in build section") copied.config = get_or_merge_config(self.config, variant=variant) copied.differentiating_variant = [] for k in differentiating_keys: copied.differentiating_variant.append(variant[k]) return copied
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, **kwargs): import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe assert post in (None, True, False), ( "post must be boolean or None. Remember, you must pass " "other arguments (config) by keyword.") config = get_or_merge_config(config, **kwargs) recipe_paths_or_metadata = _ensure_list(recipe_paths_or_metadata) for recipe in recipe_paths_or_metadata: if not any( (hasattr(recipe, "config"), isinstance(recipe, string_types))): raise ValueError( "Recipe passed was unrecognized object: {}".format(recipe)) string_paths = [ p for p in recipe_paths_or_metadata if isinstance(p, string_types) ] paths = _expand_globs(string_paths, os.getcwd()) recipes = [] for recipe in paths: try: recipes.append(find_recipe(recipe)) except IOError: pass metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')] recipes.extend(metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config"): absolute_recipes.append(recipe) else: if not os.path.isabs(recipe): recipe = os.path.normpath(os.path.join(os.getcwd(), recipe)) if not os.path.exists(recipe): raise ValueError( "Path to recipe did not exist: {}".format(recipe)) absolute_recipes.append(recipe) return build_tree(absolute_recipes, build_only=build_only, post=post, notest=notest, need_source_download=need_source_download, config=config)
def build_feedstock(args_string=None): ''' Entry function. ''' parser = make_parser() args = parser.parse_args(args_string) saved_working_directory = None if args.working_directory: saved_working_directory = os.getcwd() os.chdir(os.path.abspath(args.working_directory)) build_config_data, recipe_config_file = load_package_config( args.recipe_config_file) args.recipes = utils.parse_arg_list(args.recipe_list) # Build each recipe for recipe in build_config_data['recipes']: if args.recipes and recipe['name'] not in args.recipes: continue config = get_or_merge_config(None) config.skip_existing = True config.output_folder = args.output_folder config.variant_config_files = [args.conda_build_config] recipe_conda_build_config = os.path.join(os.getcwd(), "config", "conda_build_config.yaml") if os.path.exists(recipe_conda_build_config): config.variant_config_files.append(recipe_conda_build_config) config.channel_urls = args.channels_list + build_config_data.get( 'channels', []) _set_local_src_dir(args.local_src_dir, recipe, recipe_config_file) try: for variant in utils.make_variants(args.python_versions, args.build_types, args.mpi_types): conda_build.api.build(os.path.join(os.getcwd(), recipe['path']), config=config, variants=variant) except Exception as exc: # pylint: disable=broad-except traceback.print_exc() raise OpenCEError( Error.BUILD_RECIPE, recipe['name'] if 'name' in recipe else os.getcwd, str(exc)) from exc if saved_working_directory: os.chdir(saved_working_directory)
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, variants=None, stats=None, **kwargs): """Run the build step. If recipe paths are provided, renders recipe before building. Tests built packages by default. notest=True to skip test.""" import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe assert post in (None, True, False), ( "post must be boolean or None. Remember, you must pass " "other arguments (config) by keyword.") recipes = [] for recipe in _ensure_list(recipe_paths_or_metadata): if isinstance(recipe, string_types): for recipe in _expand_globs(recipe, os.getcwd()): try: recipe = find_recipe(recipe) except IOError: continue recipes.append(recipe) elif hasattr(recipe, "config"): recipes.append(recipe) else: raise ValueError( "Recipe passed was unrecognized object: {}".format(recipe)) if not recipes: raise ValueError('No valid recipes found for input: {}'.format( recipe_paths_or_metadata)) return build_tree( sorted(recipes), config=get_or_merge_config(config, **kwargs), # If people don't pass in an object to capture stats in, they won't get them returned. # We'll still track them, though. stats=stats or {}, build_only=build_only, post=post, notest=notest, variants=variants)
def execute(args, print_results=True): p, args = parse_args(args) config = get_or_merge_config(None, **args.__dict__) variants = get_package_variants(args.recipe, config, variants=args.variants) set_language_env_vars(variants) config.channel_urls = get_channel_urls(args.__dict__) config.override_channels = args.override_channels if args.output: config.verbose = False config.debug = False metadata_tuples = api.render(args.recipe, config=config, no_download_source=args.no_source, variants=args.variants) if args.file and len(metadata_tuples) > 1: log.warning( "Multiple variants rendered. " "Only one will be written to the file you specified ({}).".format( args.file)) if print_results: if args.output: with LoggingContext(logging.CRITICAL + 1): paths = api.get_output_file_paths(metadata_tuples, config=config) print('\n'.join(sorted(paths))) if args.file: m = metadata_tuples[-1][0] api.output_yaml(m, args.file, suppress_outputs=True) else: logging.basicConfig(level=logging.INFO) for (m, _, _) in metadata_tuples: print("--------------") print("Hash contents:") print("--------------") pprint(m.get_hash_contents()) print("----------") print("meta.yaml:") print("----------") print(api.output_yaml(m, args.file, suppress_outputs=True)) else: return metadata_tuples
def render(recipe_path, config=None, variants=None, permit_unsatisfiable_variants=True, finalize=True, bypass_env_check=False, **kwargs): """Given path to a recipe, return the MetaData object(s) representing that recipe, with jinja2 templates evaluated. Returns a list of (metadata, needs_download, needs_reparse in env) tuples""" from conda_build.render import render_recipe, finalize_metadata from conda_build.exceptions import DependencyNeedsBuildingError from conda_build.conda_interface import NoPackagesFoundError from collections import OrderedDict config = get_or_merge_config(config, **kwargs) metadata_tuples = render_recipe(recipe_path, bypass_env_check=bypass_env_check, no_download_source=config.no_download_source, config=config, variants=variants, permit_unsatisfiable_variants=permit_unsatisfiable_variants) output_metas = OrderedDict() for meta, download, render_in_env in metadata_tuples: if not meta.skip() or not config.trim_skip: for od, om in meta.get_output_metadata_set( permit_unsatisfiable_variants=permit_unsatisfiable_variants, permit_undefined_jinja=not finalize, bypass_env_check=bypass_env_check): if not om.skip() or not config.trim_skip: if 'type' not in od or od['type'] == 'conda': if finalize and not om.final: try: om = finalize_metadata(om, permit_unsatisfiable_variants=permit_unsatisfiable_variants) except (DependencyNeedsBuildingError, NoPackagesFoundError): if not permit_unsatisfiable_variants: raise # remove outputs section from output objects for simplicity if not om.path and om.meta.get('outputs'): om.parent_outputs = om.meta['outputs'] del om.meta['outputs'] output_metas[om.dist(), om.config.variant.get('target_platform'), tuple((var, om.config.variant[var]) for var in om.get_used_vars())] = \ ((om, download, render_in_env)) else: output_metas["{}: {}".format(om.type, om.name()), om.config.variant.get('target_platform'), tuple((var, om.config.variant[var]) for var in om.get_used_vars())] = \ ((om, download, render_in_env)) return list(output_metas.values())
def test(recipedir_or_package_or_metadata, move_broken=True, config=None, **kwargs): from conda_build.build import test if hasattr(recipedir_or_package_or_metadata, 'config'): config = recipedir_or_package_or_metadata.config else: config = get_or_merge_config(config, **kwargs) with config: # This will create a new local build folder if and only if config doesn't already have one. # What this means is that if we're running a test immediately after build, we use the one # that the build already provided test_result = test(recipedir_or_package_or_metadata, config=config, move_broken=move_broken) return test_result
def skeletonize(packages, repo, output_dir=".", version=None, recursive=False, config=None, **kwargs): """Generate a conda recipe from an external repo. Translates metadata from external sources into expected conda recipe format.""" config = get_or_merge_config(config, **kwargs) config.compute_build_id('skeleton') packages = _ensure_list(packages) module = getattr(__import__("conda_build.skeletons", globals=globals(), locals=locals(), fromlist=[repo]), repo) func_args = module.skeletonize.__code__.co_varnames kwargs = {name: value for name, value in kwargs.items() if name in func_args} with config: skeleton_return = module.skeletonize(packages, output_dir=output_dir, version=version, recursive=recursive, config=config, **kwargs) return skeleton_return
def execute(args): p, args = parse_args(args) config = get_or_merge_config(None, **args.__dict__) variants = get_package_variants(args.recipe, config) set_language_env_vars(variants) metadata_tuples = api.render(args.recipe, config=config, no_download_source=args.no_source) if args.output: with LoggingContext(logging.CRITICAL + 1): paths = api.get_output_file_paths(metadata_tuples) print('\n'.join(sorted(paths))) else: logging.basicConfig(level=logging.INFO) for (m, _, _) in metadata_tuples: print(api.output_yaml(m, args.file))
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, **kwargs): import os from conda_build.build import build_tree config = get_or_merge_config(config, **kwargs) recipes = _ensure_list(recipe_paths_or_metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config"): absolute_recipes.append(recipe) elif os.path.isabs(recipe): absolute_recipes.append(recipe) else: absolute_recipes.append(os.path.normpath(os.path.join(os.getcwd(), recipe))) return build_tree(absolute_recipes, build_only=build_only, post=post, notest=notest, need_source_download=need_source_download, config=config)
def render(recipe_path, config=None, variants=None, permit_unsatisfiable_variants=True, **kwargs): """Given path to a recipe, return the MetaData object(s) representing that recipe, with jinja2 templates evaluated. Returns a list of (metadata, needs_download, needs_reparse in env) tuples""" from conda_build.render import render_recipe from collections import OrderedDict config = get_or_merge_config(config, **kwargs) metadata_tuples, index = render_recipe(recipe_path, no_download_source=config.no_download_source, config=config, variants=variants, permit_unsatisfiable_variants=permit_unsatisfiable_variants) output_metas = OrderedDict() for meta, download, render_in_env in metadata_tuples: for od, om in meta.get_output_metadata_set( permit_unsatisfiable_variants=permit_unsatisfiable_variants): # only show conda packages right now if 'type' not in od or od['type'] == 'conda': output_metas[om.dist()] = ((om, download, render_in_env)) return list(output_metas.values())
def execute(args, print_results=True): p, args = parse_args(args) config = get_or_merge_config(None, **args.__dict__) variants = get_package_variants(args.recipe, config, variants=args.variants) set_language_env_vars(variants) config.channel_urls = get_channel_urls(args.__dict__) config.override_channels = args.override_channels if args.output: config.verbose = False config.debug = False metadata_tuples = api.render(args.recipe, config=config, no_download_source=args.no_source, variants=args.variants) if print_results: if args.output: with LoggingContext(logging.CRITICAL + 1): paths = api.get_output_file_paths(metadata_tuples, config=config) print('\n'.join(sorted(paths))) else: logging.basicConfig(level=logging.INFO) for (m, _, _) in metadata_tuples: print("--------------") print("Hash contents:") print("--------------") pprint(m.get_hash_contents()) print("----------") print("meta.yaml:") print("----------") print(api.output_yaml(m, args.file, suppress_outputs=True)) else: return metadata_tuples
def get_output_file_paths(recipe_path_or_metadata, no_download_source=False, config=None, variants=None, **kwargs): """Get output file paths for any packages that would be created by a recipe Both split packages (recipes with more than one output) and build matrices, created with variants, contribute to the list of file paths here. """ from conda_build.render import bldpkg_path from conda_build.conda_interface import string_types from conda_build.utils import get_skip_message config = get_or_merge_config(config, **kwargs) if hasattr(recipe_path_or_metadata, '__iter__') and not isinstance(recipe_path_or_metadata, string_types): list_of_metas = [hasattr(item[0], 'config') for item in recipe_path_or_metadata if len(item) == 3] if list_of_metas and all(list_of_metas): metadata = recipe_path_or_metadata else: raise ValueError("received mixed list of metas: {}".format(recipe_path_or_metadata)) elif isinstance(recipe_path_or_metadata, string_types): # first, render the parent recipe (potentially multiple outputs, depending on variants). metadata = render(recipe_path_or_metadata, no_download_source=no_download_source, variants=variants, config=config, finalize=True, **kwargs) else: assert hasattr(recipe_path_or_metadata, 'config'), ("Expecting metadata object - got {}" .format(recipe_path_or_metadata)) metadata = [(recipe_path_or_metadata, None, None)] # Next, loop over outputs that each metadata defines outs = [] for (m, _, _) in metadata: if m.skip(): outs.append(get_skip_message(m)) else: outs.append(bldpkg_path(m)) return sorted(list(set(outs)))
def render(recipe_path, config=None, **kwargs): from conda_build.render import render_recipe config = get_or_merge_config(config, **kwargs) return render_recipe(recipe_path, no_download_source=config.no_download_source, config=config)
def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, output_id=None, config=None, verbose=True, link_source_method='auto', **kwargs): """Set up either build/host or test environments, leaving you with a quick tool to debug your package's build or test phase. """ from fnmatch import fnmatch import logging import os import time from conda_build.conda_interface import string_types from conda_build.build import test as run_test, build as run_build from conda_build.utils import CONDA_TARBALL_EXTENSIONS, on_win, LoggingContext is_package = False default_config = get_or_merge_config(config, **kwargs) args = {"set_build_id": False} path_is_build_dir = False workdirs = [os.path.join(recipe_or_package_path_or_metadata_tuples, d) for d in (os.listdir(recipe_or_package_path_or_metadata_tuples) if os.path.isdir(recipe_or_package_path_or_metadata_tuples) else []) if (d.startswith('work') and os.path.isdir(os.path.join(recipe_or_package_path_or_metadata_tuples, d)))] metadatas_conda_debug = [os.path.join(f, "metadata_conda_debug.yaml") for f in workdirs if os.path.isfile(os.path.join(f, "metadata_conda_debug.yaml"))] metadatas_conda_debug = sorted(metadatas_conda_debug) if len(metadatas_conda_debug): path_is_build_dir = True path = recipe_or_package_path_or_metadata_tuples if not path: path = os.path.join(default_config.croot, "debug_{}".format(int(time.time() * 1000))) config = get_or_merge_config(config=default_config, croot=path, verbose=verbose, _prefix_length=10, **args) config.channel_urls = get_channel_urls(kwargs) metadata_tuples = [] best_link_source_method = 'skip' if isinstance(recipe_or_package_path_or_metadata_tuples, string_types): if path_is_build_dir: for metadata_conda_debug in metadatas_conda_debug: best_link_source_method = 'symlink' from conda_build.metadata import MetaData metadata = MetaData(metadata_conda_debug, config, {}) metadata_tuples.append((metadata, False, True)) else: ext = os.path.splitext(recipe_or_package_path_or_metadata_tuples)[1] if not ext or not any(ext in _ for _ in CONDA_TARBALL_EXTENSIONS): metadata_tuples = render(recipe_or_package_path_or_metadata_tuples, config=config, **kwargs) else: # this is a package, we only support testing test = True is_package = True else: metadata_tuples = recipe_or_package_path_or_metadata_tuples if metadata_tuples: outputs = get_output_file_paths(metadata_tuples) matched_outputs = outputs if output_id: matched_outputs = [_ for _ in outputs if fnmatch(os.path.basename(_), output_id)] if len(matched_outputs) > 1: raise ValueError("Specified --output-id matches more than one output ({}). Please refine your output id so that only " "a single output is found.".format(matched_outputs)) elif not matched_outputs: raise ValueError("Specified --output-id did not match any outputs. Available outputs are: {} Please check it and try again".format(outputs)) if len(matched_outputs) > 1 and not path_is_build_dir: raise ValueError("More than one output found for this recipe ({}). Please use the --output-id argument to filter down " "to a single output.".format(outputs)) else: matched_outputs = outputs target_metadata = metadata_tuples[outputs.index(matched_outputs[0])][0] # make sure that none of the _placehold stuff gets added to env paths target_metadata.config.prefix_length = 10 if best_link_source_method == 'symlink': for metadata, _, _ in metadata_tuples: debug_source_loc = os.path.join(os.sep + 'usr', 'local', 'src', 'conda', '{}-{}'.format(metadata.get_value('package/name'), metadata.get_value('package/version'))) link_target = os.path.dirname(metadata.meta_path) try: dn = os.path.dirname(debug_source_loc) try: os.makedirs(dn) except FileExistsError: pass try: os.unlink(debug_source_loc) except: pass print("Making debug info source symlink: {} => {}".format(debug_source_loc, link_target)) os.symlink(link_target, debug_source_loc) except PermissionError as e: raise Exception("You do not have the necessary permissions to create symlinks in {}\nerror: {}" .format(dn, str(e))) except Exception as e: raise Exception("Unknown error creating symlinks in {}\nerror: {}" .format(dn, str(e))) ext = ".bat" if on_win else ".sh" if verbose: log_context = LoggingContext() else: log_context = LoggingContext(logging.CRITICAL + 1) if path_is_build_dir: activation_file = "build_env_setup" + ext activation_string = "cd {work_dir} && {source} {activation_file}\n".format( work_dir=target_metadata.config.work_dir, source="call" if on_win else "source", activation_file=os.path.join(target_metadata.config.work_dir, activation_file)) elif not test: with log_context: run_build(target_metadata, stats={}, provision_only=True) activation_file = "build_env_setup" + ext activation_string = "cd {work_dir} && {source} {activation_file}\n".format( work_dir=target_metadata.config.work_dir, source="call" if on_win else "source", activation_file=os.path.join(target_metadata.config.work_dir, activation_file)) else: if not is_package: raise ValueError("Debugging for test mode is only supported for package files that already exist. " "Please build your package first, then use it to create the debugging environment.") else: test_input = recipe_or_package_path_or_metadata_tuples # use the package to create an env and extract the test files. Stop short of running the tests. # tell people what steps to take next with log_context: run_test(test_input, config=config, stats={}, provision_only=True) activation_file = os.path.join(config.test_dir, "conda_test_env_vars" + ext) activation_string = "cd {work_dir} && {source} {activation_file}\n".format( work_dir=config.test_dir, source="call" if on_win else "source", activation_file=os.path.join(config.test_dir, activation_file)) return activation_string
import sys import os import yaml import glob import shutil from conda_build.config import get_or_merge_config with open(os.path.join(sys.argv[1], 'meta.yaml')) as f: name = yaml.load(f)['package']['name'] packages_dir = get_or_merge_config(None).bldpkgs_dir binary_package_glob = os.path.join(packages_dir, '{0}*.tar.bz2'.format(name)) binary_packages = glob.glob(binary_package_glob) for binary_package in binary_packages: shutil.move(binary_package, '.')
def skeletonize(packages, output_dir=".", version=None, meta_cpan_url="http://fastapi.metacpan.org/v1", recursive=False, force=False, config=None, write_core=False): ''' Loops over packages, outputting conda recipes converted from CPAN metata. ''' config = get_or_merge_config(config) # TODO: load/use variants? perl_version = config.variant.get('perl', get_default_variant(config)['perl']) # wildcards are not valid for perl perl_version = perl_version.replace(".*", "") package_dicts = {} indent = '\n - ' indent_core = '\n #- ' processed_packages = set() orig_version = version while packages: package = packages.pop() # If we're passed version in the same format as `PACKAGE=VERSION` # update version if '=' in package: package, _, version = package.partition('=') else: version = orig_version # Skip duplicates if package in processed_packages: continue processed_packages.add(package) # Convert modules into distributions orig_package = package package = dist_for_module( meta_cpan_url, package, perl_version, config=config) if package == 'perl': print(("WARNING: {0} is a Perl core module that is not developed " + "outside of Perl, so we are skipping creating a recipe " + "for it.").format(orig_package)) continue elif package not in {orig_package, orig_package.replace('::', '-')}: print( ("WARNING: {0} was part of the {1} distribution, so we are " + "making a recipe for {1} instead.").format(orig_package, package) ) latest_release_data = get_release_info(meta_cpan_url, package, None, perl_version, config=config) packagename = perl_to_conda(package) # Skip duplicates if ((version is not None and ((packagename + '-' + version) in processed_packages)) or ((packagename + '-' + latest_release_data['version']) in processed_packages)): continue d = package_dicts.setdefault(package, {'packagename': packagename, 'run_depends': '', 'build_depends': '', 'build_comment': '# ', 'test_commands': '', 'usesha256': '', 'useurl': '', 'source_comment': '', 'summary': "''", 'import_tests': ''}) # Fetch all metadata from CPAN if version is None: release_data = latest_release_data else: release_data = get_release_info(meta_cpan_url, package, parse_version(version), perl_version, config=config) # Check if recipe directory already exists dir_path = join(output_dir, packagename, release_data['version']) # Add Perl version to core module requirements, since these are empty # packages, unless we're newer than what's in core if metacpan_api_is_core_version(meta_cpan_url, package): if not write_core: print('We found core module %s. Skipping recipe creation.' % packagename) continue d['useurl'] = '#' d['usesha256'] = '#' d['source_comment'] = '#' empty_recipe = True # Add dependencies to d if not in core, or newer than what's in core else: build_deps, build_core_deps, run_deps, run_core_deps, packages_to_append = \ deps_for_package(package, release_data=release_data, perl_version=perl_version, output_dir=output_dir, meta_cpan_url=meta_cpan_url, recursive=recursive, config=config) # Get which deps are in perl_core d['build_depends'] += indent.join([''] + list(build_deps | run_deps)) d['build_depends'] += indent_core.join([''] + list(build_core_deps | run_core_deps)) d['run_depends'] += indent.join([''] + list(run_deps)) d['run_depends'] += indent_core.join([''] + list(run_core_deps)) # Make sure we append any packages before continuing packages.extend(packages_to_append) empty_recipe = False # If we are recursively getting packages for a particular version # we need to make sure this is reset on the loop version = None if exists(dir_path) and not force: print( 'Directory %s already exists and you have not specified --force ' % dir_path) continue elif exists(dir_path) and force: print('Directory %s already exists, but forcing recipe creation' % dir_path) # If this is something we're downloading, get MD5 d['cpanurl'] = '' # Conda build will guess the filename d['filename'] = repr('') d['sha256'] = '' if release_data.get('archive'): d['filename'] = basename(release_data['archive']) if release_data.get('download_url'): d['cpanurl'] = release_data['download_url'] d['sha256'], size = get_checksum_and_size( release_data['download_url']) d['filename'] = basename(release_data['download_url']) print("Using url %s (%s) for %s." % (d['cpanurl'], size, package)) else: d['useurl'] = '#' d['usesha256'] = '#' d['source_comment'] = '#' try: d['homeurl'] = release_data['resources']['homepage'] except KeyError: d['homeurl'] = 'http://metacpan.org/pod/' + package if 'abstract' in release_data: # TODO this does not escape quotes in a YAML friendly manner summary = repr(release_data['abstract']).lstrip('u') d['summary'] = summary # d['summary'] = repr(release_data['abstract']).lstrip('u') try: d['license'] = (release_data['license'][0] if isinstance(release_data['license'], list) else release_data['license']) except KeyError: d['license'] = 'perl_5' d['version'] = release_data['version'] processed_packages.add(packagename + '-' + d['version']) # Create import tests module_prefix = package.replace('::', '-').split('-')[0] if 'provides' in release_data: for provided_mod in sorted(set(release_data['provides'])): # Filter out weird modules that don't belong if (provided_mod.startswith(module_prefix) and '::_' not in provided_mod): d['import_tests'] += indent + provided_mod if d['import_tests']: d['import_comment'] = '' else: d['import_comment'] = '# ' if not exists(dir_path): makedirs(dir_path) # Write recipe files to a directory # TODO def write_recipe print("Writing recipe for %s-%s" % (packagename, d['version'])) with open(join(dir_path, 'meta.yaml'), 'w') as f: f.write(CPAN_META.format(**d)) with open(join(dir_path, 'build.sh'), 'w') as f: if empty_recipe: f.write('#!/bin/bash\necho "Nothing to do."\n') else: f.write(CPAN_BUILD_SH.format(**d)) with open(join(dir_path, 'bld.bat'), 'w') as f: if empty_recipe: f.write('echo "Nothing to do."\n') else: f.write(CPAN_BLD_BAT.format(**d))