def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, **kwargs): import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe config = get_or_merge_config(config, **kwargs) recipe_paths_or_metadata = _ensure_list(recipe_paths_or_metadata) string_paths = [p for p in recipe_paths_or_metadata if isinstance(p, string_types)] paths = _expand_globs(string_paths, os.getcwd()) recipes = [] for recipe in paths[:]: try: recipes.append(find_recipe(recipe)) except IOError: pass metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')] recipes.extend(metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config") or os.path.isabs(recipe): absolute_recipes.append(recipe) else: absolute_recipes.append(os.path.normpath(os.path.join(os.getcwd(), recipe))) return build_tree(absolute_recipes, build_only=build_only, post=post, notest=notest, need_source_download=need_source_download, config=config)
def update_index(dir_paths, config=None, force=False, check_md5=False, remove=False): from locale import getpreferredencoding import os from .conda_interface import PY3 from conda_build.index import update_index dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)] # Don't use byte strings in Python 2 if not PY3: dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths] if not config: config = Config() for path in dir_paths: update_index(path, force=force, check_md5=check_md5, remove=remove, verbose=config.verbose, locking=config.locking, timeout=config.timeout)
def convert(package_file, output_dir=".", show_imports=False, platforms=None, force=False, dependencies=None, verbose=False, quiet=True, dry_run=False): """Convert changes a package from one platform to another. It applies only to things that are portable, such as pure python, or header-only C/C++ libraries.""" from .convert import conda_convert platforms = _ensure_list(platforms) if package_file.endswith('tar.bz2'): return conda_convert(package_file, output_dir=output_dir, show_imports=show_imports, platforms=platforms, force=force, verbose=verbose, quiet=quiet, dry_run=dry_run, dependencies=dependencies) elif package_file.endswith('.whl'): raise RuntimeError('Conversion from wheel packages is not ' 'implemented yet, stay tuned.') else: raise RuntimeError("cannot convert: %s" % package_file)
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, **kwargs): import os from conda_build.build import build_tree config = get_or_merge_config(config, **kwargs) recipes = _ensure_list(recipe_paths_or_metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config"): absolute_recipes.append(recipe) elif os.path.isabs(recipe): absolute_recipes.append(recipe) else: absolute_recipes.append( os.path.normpath(os.path.join(os.getcwd(), recipe))) return build_tree(absolute_recipes, build_only=build_only, post=post, notest=notest, need_source_download=need_source_download, config=config)
def update_index(dir_paths, config=None, force=False, check_md5=False, remove=False, channel_name=None, subdir=None, threads=None, patch_generator=None, verbose=False, progress=False, hotfix_source_repo=None, **kwargs): from locale import getpreferredencoding import os from .conda_interface import PY3 from conda_build.index import update_index from conda_build.utils import ensure_list dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)] # Don't use byte strings in Python 2 if not PY3: dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths] for path in dir_paths: update_index(path, check_md5=check_md5, channel_name=channel_name, patch_generator=patch_generator, threads=threads, verbose=verbose, progress=progress, hotfix_source_repo=hotfix_source_repo, subdirs=ensure_list(subdir))
def develop(recipe_dir, prefix=_sys.prefix, no_pth_file=False, build_ext=False, clean=False, uninstall=False): """Install a Python package in 'development mode'. This works by creating a conda.pth file in site-packages.""" from .develop import execute recipe_dir = _ensure_list(recipe_dir) return execute(recipe_dir, prefix, no_pth_file, build_ext, clean, uninstall)
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, variants=None, stats=None, **kwargs): """Run the build step. If recipe paths are provided, renders recipe before building. Tests built packages by default. notest=True to skip test.""" import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe assert post in (None, True, False), ("post must be boolean or None. Remember, you must pass " "other arguments (config) by keyword.") config = get_or_merge_config(config, **kwargs) # if people don't pass in an object to capture stats in, they won't get them returned. # We'll still track them, though. if not stats: stats = {} recipe_paths_or_metadata = _ensure_list(recipe_paths_or_metadata) for recipe in recipe_paths_or_metadata: if not any((hasattr(recipe, "config"), isinstance(recipe, string_types))): raise ValueError("Recipe passed was unrecognized object: {}".format(recipe)) string_paths = [p for p in recipe_paths_or_metadata if isinstance(p, string_types)] paths = _expand_globs(string_paths, os.getcwd()) recipes = [] for recipe in paths: if (os.path.isdir(recipe) or (os.path.isfile(recipe) and os.path.basename(recipe) in ('meta.yaml', 'conda.yaml'))): try: recipes.append(find_recipe(recipe)) except IOError: continue metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')] recipes.extend(metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config"): absolute_recipes.append(recipe) else: if not os.path.isabs(recipe): recipe = os.path.normpath(os.path.join(os.getcwd(), recipe)) if not os.path.exists(recipe): raise ValueError("Path to recipe did not exist: {}".format(recipe)) absolute_recipes.append(recipe) if not absolute_recipes: raise ValueError('No valid recipes found for input: {}'.format(recipe_paths_or_metadata)) return build_tree(absolute_recipes, config, stats, build_only=build_only, post=post, notest=notest, need_source_download=need_source_download, variants=variants)
def skeletonize(packages, repo, output_dir=".", version=None, recursive=False, config=None, **kwargs): """Generate a conda recipe from an external repo. Translates metadata from external sources into expected conda recipe format.""" version = getattr(config, "version", version) if version: kwargs.update({'version': version}) if recursive: kwargs.update({'recursive': recursive}) if output_dir != ".": output_dir = expanduser(output_dir) kwargs.update({'output_dir': output_dir}) # here we're dumping all extra kwargs as attributes on the config object. We'll extract # only relevant ones below config = get_or_merge_config(config, **kwargs) config.compute_build_id('skeleton') packages = _ensure_list(packages) # This is a little bit of black magic. The idea is that for any keyword argument that # we inspect from the given module's skeletonize funtion, we should hoist the argument # off of the config object, and pass it as a keyword argument. This is sort of the # inverse of what we do in the CLI code - there we take CLI arguments and dangle them # all on the config object as attributes. module = getattr( __import__("conda_build.skeletons", globals=globals(), locals=locals(), fromlist=[repo]), repo) func_args = module.skeletonize.__code__.co_varnames kwargs = { name: getattr(config, name) for name in dir(config) if name in func_args } kwargs.update( {name: value for name, value in kwargs.items() if name in func_args}) # strip out local arguments that we pass directly for arg in skeletonize.__code__.co_varnames: if arg in kwargs: del kwargs[arg] with config: skeleton_return = module.skeletonize(packages, output_dir=output_dir, version=version, recursive=recursive, config=config, **kwargs) return skeleton_return
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, variants=None, stats=None, **kwargs): """Run the build step. If recipe paths are provided, renders recipe before building. Tests built packages by default. notest=True to skip test.""" import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe assert post in (None, True, False), ("post must be boolean or None. Remember, you must pass " "other arguments (config) by keyword.") config = get_or_merge_config(config, **kwargs) # if people don't pass in an object to capture stats in, they won't get them returned. # We'll still track them, though. if not stats: stats = {} recipe_paths_or_metadata = _ensure_list(recipe_paths_or_metadata) for recipe in recipe_paths_or_metadata: if not any((hasattr(recipe, "config"), isinstance(recipe, string_types))): raise ValueError("Recipe passed was unrecognized object: {}".format(recipe)) string_paths = [p for p in recipe_paths_or_metadata if isinstance(p, string_types)] paths = _expand_globs(string_paths, os.getcwd()) recipes = [] for recipe in paths: if (os.path.isdir(recipe) or (os.path.isfile(recipe) and os.path.basename(recipe) in ('meta.yaml', 'conda.yaml'))): try: recipes.append(find_recipe(recipe)) except IOError: continue metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')] recipes.extend(metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config"): absolute_recipes.append(recipe) else: if not os.path.isabs(recipe): recipe = os.path.normpath(os.path.join(os.getcwd(), recipe)) if not os.path.exists(recipe): raise ValueError("Path to recipe did not exist: {}".format(recipe)) absolute_recipes.append(recipe) if not absolute_recipes: raise ValueError('No valid recipes found for input: {}'.format(recipe_paths_or_metadata)) return build_tree(sorted(absolute_recipes), config, stats, build_only=build_only, post=post, notest=notest, variants=variants)
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, **kwargs): import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe assert post in (None, True, False), ( "post must be boolean or None. Remember, you must pass " "other arguments (config) by keyword.") config = get_or_merge_config(config, **kwargs) recipe_paths_or_metadata = _ensure_list(recipe_paths_or_metadata) for recipe in recipe_paths_or_metadata: if not any( (hasattr(recipe, "config"), isinstance(recipe, string_types))): raise ValueError( "Recipe passed was unrecognized object: {}".format(recipe)) string_paths = [ p for p in recipe_paths_or_metadata if isinstance(p, string_types) ] paths = _expand_globs(string_paths, os.getcwd()) recipes = [] for recipe in paths: try: recipes.append(find_recipe(recipe)) except IOError: pass metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')] recipes.extend(metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config"): absolute_recipes.append(recipe) else: if not os.path.isabs(recipe): recipe = os.path.normpath(os.path.join(os.getcwd(), recipe)) if not os.path.exists(recipe): raise ValueError( "Path to recipe did not exist: {}".format(recipe)) absolute_recipes.append(recipe) return build_tree(absolute_recipes, build_only=build_only, post=post, notest=notest, need_source_download=need_source_download, config=config)
def inspect_prefix_length(packages, min_prefix_length=_prefix_length): from conda_build.tarcheck import check_prefix_lengths packages = _ensure_list(packages) prefix_lengths = check_prefix_lengths(packages, min_prefix_length) if prefix_lengths: print("Packages with binary prefixes shorter than %d characters:" % min_prefix_length) for fn, length in prefix_lengths.items(): print("{0} ({1} chars)".format(fn, length)) else: print("No packages found with binary prefixes shorter than %d characters." % min_prefix_length) return len(prefix_lengths) == 0
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, variants=None, stats=None, **kwargs): """Run the build step. If recipe paths are provided, renders recipe before building. Tests built packages by default. notest=True to skip test.""" import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe assert post in (None, True, False), ( "post must be boolean or None. Remember, you must pass " "other arguments (config) by keyword.") recipes = [] for recipe in _ensure_list(recipe_paths_or_metadata): if isinstance(recipe, string_types): for recipe in _expand_globs(recipe, os.getcwd()): try: recipe = find_recipe(recipe) except IOError: continue recipes.append(recipe) elif hasattr(recipe, "config"): recipes.append(recipe) else: raise ValueError( "Recipe passed was unrecognized object: {}".format(recipe)) if not recipes: raise ValueError('No valid recipes found for input: {}'.format( recipe_paths_or_metadata)) return build_tree( sorted(recipes), config=get_or_merge_config(config, **kwargs), # If people don't pass in an object to capture stats in, they won't get them returned. # We'll still track them, though. stats=stats or {}, build_only=build_only, post=post, notest=notest, variants=variants)
def inspect_prefix_length(packages, min_prefix_length=_prefix_length): from conda_build.tarcheck import check_prefix_lengths packages = _ensure_list(packages) prefix_lengths = check_prefix_lengths(packages, min_prefix_length) if prefix_lengths: print("Packages with binary prefixes shorter than %d characters:" % min_prefix_length) for fn, length in prefix_lengths.items(): print("{0} ({1} chars)".format(fn, length)) else: print( "No packages found with binary prefixes shorter than %d characters." % min_prefix_length) return len(prefix_lengths) == 0
def inspect_linkages(packages, prefix=_sys.prefix, untracked=False, all_packages=False, show_files=False, groupby='package'): from .inspect import inspect_linkages packages = _ensure_list(packages) return inspect_linkages(packages, prefix=prefix, untracked=untracked, all_packages=all_packages, show_files=show_files, groupby=groupby)
def update_index(dir_paths, config=None, force=False, check_md5=False, remove=False): from locale import getpreferredencoding import os from .conda_interface import PY3 from conda_build.index import update_index dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)] # Don't use byte strings in Python 2 if not PY3: dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths] if not config: config = Config() for path in dir_paths: update_index(path, config, force=force, check_md5=check_md5, remove=remove)
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, **kwargs): import os from conda_build.build import build_tree config = get_or_merge_config(config, **kwargs) recipes = _ensure_list(recipe_paths_or_metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config"): absolute_recipes.append(recipe) elif os.path.isabs(recipe): absolute_recipes.append(recipe) else: absolute_recipes.append(os.path.normpath(os.path.join(os.getcwd(), recipe))) return build_tree(absolute_recipes, build_only=build_only, post=post, notest=notest, need_source_download=need_source_download, config=config)
def update_index(dir_paths, config=None, force=False, check_md5=False, remove=False, channel_name=None, subdir=None, threads=None, patch_generator=None, verbose=False, progress=False, hotfix_source_repo=None, shared_format_cache=True, current_index_versions=None, **kwargs): import yaml from locale import getpreferredencoding import os from .conda_interface import PY3, string_types from conda_build.index import update_index from conda_build.utils import ensure_list dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)] # Don't use byte strings in Python 2 if not PY3: dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths] if isinstance(current_index_versions, string_types): with open(current_index_versions) as f: current_index_versions = yaml.safe_load(f) for path in dir_paths: update_index(path, check_md5=check_md5, channel_name=channel_name, patch_generator=patch_generator, threads=threads, verbose=verbose, progress=progress, hotfix_source_repo=hotfix_source_repo, subdirs=ensure_list(subdir), shared_format_cache=shared_format_cache, current_index_versions=current_index_versions)
def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=False, notest=False, config=None, **kwargs): import os from conda_build.build import build_tree from conda_build.conda_interface import string_types from conda_build.utils import find_recipe config = get_or_merge_config(config, **kwargs) recipe_paths_or_metadata = _ensure_list(recipe_paths_or_metadata) string_paths = [ p for p in recipe_paths_or_metadata if isinstance(p, string_types) ] paths = _expand_globs(string_paths, os.getcwd()) recipes = [] for recipe in paths[:]: try: recipes.append(find_recipe(recipe)) except IOError: pass metadata = [m for m in recipe_paths_or_metadata if hasattr(m, 'config')] recipes.extend(metadata) absolute_recipes = [] for recipe in recipes: if hasattr(recipe, "config") or os.path.isabs(recipe): absolute_recipes.append(recipe) else: absolute_recipes.append( os.path.normpath(os.path.join(os.getcwd(), recipe))) return build_tree(absolute_recipes, build_only=build_only, post=post, notest=notest, need_source_download=need_source_download, config=config)
def skeletonize(packages, repo, output_dir=".", version=None, recursive=False, config=None, **kwargs): """Generate a conda recipe from an external repo. Translates metadata from external sources into expected conda recipe format.""" version = getattr(config, "version", version) if version: kwargs.update({'version': version}) if recursive: kwargs.update({'recursive': recursive}) if output_dir != ".": output_dir = expanduser(output_dir) kwargs.update({'output_dir': output_dir}) # here we're dumping all extra kwargs as attributes on the config object. We'll extract # only relevant ones below config = get_or_merge_config(config, **kwargs) config.compute_build_id('skeleton') packages = _ensure_list(packages) # This is a little bit of black magic. The idea is that for any keyword argument that # we inspect from the given module's skeletonize funtion, we should hoist the argument # off of the config object, and pass it as a keyword argument. This is sort of the # inverse of what we do in the CLI code - there we take CLI arguments and dangle them # all on the config object as attributes. module = getattr(__import__("conda_build.skeletons", globals=globals(), locals=locals(), fromlist=[repo]), repo) func_args = module.skeletonize.__code__.co_varnames kwargs = {name: getattr(config, name) for name in dir(config) if name in func_args} kwargs.update({name: value for name, value in kwargs.items() if name in func_args}) # strip out local arguments that we pass directly for arg in skeletonize.__code__.co_varnames: if arg in kwargs: del kwargs[arg] with config: skeleton_return = module.skeletonize(packages, output_dir=output_dir, version=version, recursive=recursive, config=config, **kwargs) return skeleton_return
def update_index(dir_paths, config=None, force=False, check_md5=False, remove=False, channel_name=None, subdir=None, threads=None, patch_generator=None, verbose=False, progress=False, hotfix_source_repo=None, current_index_versions=None, **kwargs): import yaml from locale import getpreferredencoding import os from .conda_interface import PY3, string_types from conda_build.index import update_index from conda_build.utils import ensure_list dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)] # Don't use byte strings in Python 2 if not PY3: dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths] if isinstance(current_index_versions, string_types): with open(current_index_versions) as f: current_index_versions = yaml.safe_load(f) for path in dir_paths: update_index(path, check_md5=check_md5, channel_name=channel_name, patch_generator=patch_generator, threads=threads, verbose=verbose, progress=progress, hotfix_source_repo=hotfix_source_repo, subdirs=ensure_list(subdir), current_index_versions=current_index_versions)
def inspect_objects(packages, prefix=_sys.prefix, groupby='filename'): from .inspect import inspect_objects packages = _ensure_list(packages) return inspect_objects(packages, prefix=prefix, groupby=groupby)
def inspect_linkages(packages, prefix=_sys.prefix, untracked=False, all_packages=False, show_files=False, groupby='package', sysroot=''): from .inspect_pkg import inspect_linkages packages = _ensure_list(packages) return inspect_linkages(packages, prefix=prefix, untracked=untracked, all_packages=all_packages, show_files=show_files, groupby=groupby, sysroot=sysroot)
def inspect_objects(packages, prefix=_sys.prefix, groupby='filename'): from .inspect_pkg import inspect_objects packages = _ensure_list(packages) return inspect_objects(packages, prefix=prefix, groupby=groupby)