def built_package_path(recipe, env=None): """ Returns the path to which a recipe would be built. Does not necessarily exist; equivalent to `conda build --output recipename` but without the subprocess. """ if env is None: env = {} env = dict(env) # Ensure CONDA_PY is an integer (needed by conda-build 2.0.4) py = env.get('CONDA_PY', None) env = dict(env) if py is not None: env['CONDA_PY'] = _string_or_float_to_integer_python(py) with temp_env(env): # Disabling set_build_id prevents the creation of uniquely-named work # directories just for checking the output file. # It needs to be done within the context manager so that it sees the # os.environ. config = api.Config( no_download_source=True, set_build_id=False) meta = MetaData(recipe, config=config) meta.parse_again() path = api.get_output_file_path(meta, config=config) return path
def load_conda_build_config(platform=None, trim_skip=True): """ Load conda build config while considering global pinnings from conda-forge. """ config = api.Config( no_download_source=True, set_build_id=False) # get environment root env_root = PurePath(shutil.which("bioconda-utils")).parents[1] # set path to pinnings from conda forge package config.exclusive_config_file = os.path.join(env_root, "conda_build_config.yaml") config.variant_config_files = [ os.path.join( os.path.dirname(__file__), 'bioconda_utils-conda_build_config.yaml') ] for cfg in config.variant_config_files: assert os.path.exists(cfg), ('error: {0} does not exist'.format(cfg)) assert os.path.exists(config.exclusive_config_file), ( "error: conda_build_config.yaml not found in environment root") if platform: config.platform = platform config.trim_skip = trim_skip return config
def load_conda_build_config(platform=None, trim_skip=True): """ load_conda_build_config ======================= Load conda build config while considering global pinnings from conda-forge. Parameters: ----------- 1) platform: (str) The platform to use. Example: noarch, linux-64, etc. (Default = None) 2) trim_skip: (bool) What to set conda build config trim skip to. (Default = True) Return: ++++++ 1) The conda build config object """ config = api.Config(no_download_source=True, set_build_id=False) ## Hardset to the bioconda_utils-conda_build_config.yaml file in the .circleci dir ### Will need to change this later if os.path.basename(os.getcwd()) == "ggd-recipes": config.exclusive_config_files = [ os.path.join(os.getcwd(), "bioconda_utils-conda_build_config.yaml") ] else: config.exclusive_config_files = [] for cfg in chain(config.exclusive_config_files, config.variant_config_files or []): assert os.path.exists(cfg), "error: {0} does not exist".format(cfg) if platform: config.platform = platform config.trim_skip = trim_skip return config
def test_filter_recipes_extra_in_build_string(): """ If CONDA_EXTRA is in os.environ, the pkg name should still be identifiable. This helps test env vars that don't have other defaults like CONDA_PY does (e.g., CONDA_BOOST in bioconda) """ r = Recipes(""" one: meta.yaml: | package: name: one version: "0.1" build: number: 0 string: {{CONDA_EXTRA}}_{{PKG_BUILDNUM}} """, from_string=True) r.write_recipes() recipe = r.recipe_dirs['one'] from conda_build.render import bldpkg_path metadata = MetaData(recipe, api.Config(**dict(CONDA_EXTRA='asdf'))) print(bldpkg_path(metadata, metadata.config)) os.environ['CONDA_EXTRA'] = 'asdf' pkg = utils.built_package_path(recipe) assert os.path.basename(pkg) == 'one-0.1-asdf_0.tar.bz2'
def test_env_creation_with_short_prefix_does_not_deadlock(testing_workdir, caplog): tempdir = '/tmp' if platform.system() == 'Darwin' else tempfile.gettempdir() config = api.Config(croot=os.path.join(tempdir, 'cb'), anaconda_upload=False, verbose=True, set_build_id=False, _prefix_length=80) recipe_path = os.path.join(metadata_dir, "has_prefix_files") metadata = api.render(recipe_path, config=config)[0][0] output = api.build(metadata)[0] assert not api.inspect_prefix_length(output, 255) config.prefix_length = 255 environ.create_env(config.build_prefix, specs_or_actions=["python", metadata.name()], env='build', config=config, subdir=subdir) assert 'One or more of your package dependencies needs to be rebuilt' in caplog.text
def load_meta(recipe, env): """ Load metadata for a specific environment. """ with temp_env(env): # Disabling set_build_id prevents the creation of uniquely-named work # directories just for checking the output file. # It needs to be done within the context manager so that it sees the # os.environ. config = api.Config(no_download_source=True, set_build_id=False) meta = MetaData(recipe, config=config) meta.parse_again() return meta.meta
def test_purge_all(test_metadata): """ purge-all clears out build folders as well as build packages in the osx-64 folders and such """ # override config to be default, so that output path lines up with default # config used by main_build test_metadata.config = api.Config() api.build(test_metadata) fn = api.get_output_file_path(test_metadata) args = ['purge-all'] main_build.execute(args) assert not get_build_folders(test_metadata.config.croot) assert not os.path.isfile(fn)
def test_source_cache_build(testing_workdir): recipe = os.path.join(metadata_dir, 'source_git_jinja2') config = api.Config(src_cache_root=testing_workdir) api.build(recipe, notest=True, config=config) git_cache_directory = '{}/git_cache'.format(testing_workdir) assert os.path.isdir(git_cache_directory) files = [ filename for _, _, filenames in os.walk(git_cache_directory) for filename in filenames ] assert len(files) > 0
def test_build_expands_wildcards(mocker, testing_workdir): build_tree = mocker.patch("conda_build.build.build_tree") config = api.Config() files = ['abc', 'acb'] for f in files: os.makedirs(f) with open(os.path.join(f, 'meta.yaml'), 'w') as fh: fh.write('\n') api.build(["a*"], config=config) output = [os.path.join(os.getcwd(), path, 'meta.yaml') for path in files] build_tree.assert_called_once_with(output, post=None, need_source_download=True, build_only=False, notest=False, config=config)
def test_catch_openssl_legacy_short_prefix_error(test_metadata, caplog): config = api.Config(anaconda_upload=False, verbose=True, python="2.6") test_metadata.config = api.get_or_merge_config(test_metadata.config, python='2.6') cmd = """ import os prefix = os.environ['PREFIX'] fn = os.path.join(prefix, 'binary-has-prefix') with open(fn, 'wb') as f: f.write(prefix.encode('utf-8') + b'\x00\x00') """ test_metadata.meta['build']['script'] = 'python -c "{0}"'.format(cmd) api.build(test_metadata) assert "Falling back to legacy prefix" in caplog.text()
def test_env_creation_with_short_prefix_does_not_deadlock( testing_workdir, caplog): config = api.Config(croot=testing_workdir, anaconda_upload=False, verbose=True, set_build_id=False, _prefix_length=80) recipe_path = os.path.join(metadata_dir, "has_prefix_files") metadata = api.render(recipe_path, config=config)[0][0] try: output = api.build(metadata)[0] assert not api.inspect_prefix_length(output, 255) config.prefix_length = 255 environ.create_env(config.build_prefix, specs=["python", metadata.name()], config=config, subdir=subdir) except: raise assert 'One or more of your package dependencies needs to be rebuilt' in caplog.text
def test_env_creation_with_prefix_fallback_disabled(): test_base = os.path.expanduser("~/cbtmp") config = api.Config(croot=test_base, anaconda_upload=False, verbose=True, prefix_length_fallback=False, _prefix_length=80) recipe_path = os.path.join(metadata_dir, "has_prefix_files") metadata = api.render(recipe_path, config=config)[0][0] fn = api.get_output_file_path(metadata)[0] if os.path.isfile(fn): os.remove(fn) with pytest.raises((SystemExit, PaddingError, LinkError, CondaError)): output = api.build(metadata)[0] assert not api.inspect_prefix_length(output, 255) config.prefix_length = 255 environ.create_env(config.build_prefix, specs=["python", metadata.name()], config=config, subdir=subdir)
def test_env_creation_with_prefix_fallback_disabled(): test_base = os.path.expanduser("~/cbtmp") config = api.Config(croot=test_base, anaconda_upload=False, verbose=True, prefix_length_fallback=False) recipe_path = os.path.join(metadata_dir, "has_prefix_files") metadata, _, _ = api.render(recipe_path, config=config) metadata.meta['package']['name'] = 'test_env_creation_with_short_prefix' fn = api.get_output_file_path(metadata) if os.path.isfile(fn): os.remove(fn) config.prefix_length = 80 with pytest.raises((SystemExit, PaddingError, LinkError)): api.build(metadata) pkg_name = os.path.basename(fn).replace("-1.0-0.tar.bz2", "") assert not api.inspect_prefix_length(fn, 255) config.prefix_length = 255 build.create_env(config.build_prefix, specs=["python", pkg_name], config=config)
def test_inspect_prefix_length(testing_workdir, capfd): from conda_build import api # build our own known-length package here test_base = os.path.expanduser("~/cbtmp") config = api.Config(croot=test_base, anaconda_upload=False, verbose=True) recipe_path = os.path.join(metadata_dir, "has_prefix_files") config.prefix_length = 80 outputs = api.build(recipe_path, config=config, notest=True) args = ['prefix-lengths'] + outputs with pytest.raises(SystemExit): main_inspect.execute(args) output, error = capfd.readouterr() assert 'Packages with binary prefixes shorter than' in output assert all(fn in output for fn in outputs) config.prefix_length = 255 # reset the build id so that a new one is computed config._build_id = "" api.build(recipe_path, config=config, notest=True) main_inspect.execute(args) output, error = capfd.readouterr() assert 'No packages found with binary prefixes shorter' in output
def test_env_creation_with_short_prefix_does_not_deadlock(caplog): test_base = os.path.expanduser("~/cbtmp") config = api.Config(croot=test_base, anaconda_upload=False, verbose=True) recipe_path = os.path.join(metadata_dir, "has_prefix_files") metadata, _, _ = api.render(recipe_path, config=config) metadata.meta['package']['name'] = 'test_env_creation_with_short_prefix' fn = api.get_output_file_path(metadata) if os.path.isfile(fn): os.remove(fn) config.prefix_length = 80 try: api.build(metadata) pkg_name = os.path.basename(fn).replace("-1.0-0.tar.bz2", "") assert not api.inspect_prefix_length(fn, 255) config.prefix_length = 255 build.create_env(config.build_prefix, specs=["python", pkg_name], config=config) except: raise finally: rm_rf(test_base) assert 'One or more of your package dependencies needs to be rebuilt' in caplog.text
def test_inspect_prefix_length(testing_workdir, capfd): from conda_build import api # build our own known-length package here test_base = os.path.expanduser("~/cbtmp") config = api.Config(croot=test_base, anaconda_upload=False, verbose=True) recipe_path = os.path.join(metadata_dir, "has_prefix_files") fn = api.get_output_file_path(recipe_path, config=config) if os.path.isfile(fn): os.remove(fn) config.prefix_length = 80 api.build(recipe_path, config=config) args = ['prefix-lengths', fn] with pytest.raises(SystemExit): main_inspect.execute(args) output, error = capfd.readouterr() assert 'Packages with binary prefixes shorter than' in output assert fn in output config.prefix_length = 255 api.build(recipe_path, config=config) main_inspect.execute(args) output, error = capfd.readouterr() assert 'No packages found with binary prefixes shorter' in output
import logging import os import pkg_resources import re import subprocess import networkx as nx from conda_build import api, conda_interface from conda_build.metadata import find_recipe, MetaData from conda_build.utils import HashableDict log = logging.getLogger(__file__) CONDA_BUILD_CACHE = os.environ.get("CONDA_BUILD_CACHE") hash_length = api.Config().hash_length def package_key(metadata, worker_label, run='build'): # get the build string from whatever conda-build makes of the configuration used_loop_vars = metadata.get_used_loop_vars() build_vars = '-'.join([ k + '_' + str(metadata.config.variant[k]) for k in used_loop_vars if k != 'target_platform' ]) # kind of a special case. Target platform determines a lot of output behavior, but may not be # explicitly listed in the recipe. tp = metadata.config.variant.get('target_platform') if tp and tp != metadata.config.subdir and 'target_platform' not in build_vars: build_vars += '-target_' + tp key = [metadata.name(), metadata.version()]
def parse_args(args): p = ArgumentParser( description='Tools for inspecting conda packages.', epilog=""" Run --help on the subcommands like 'conda inspect linkages --help' to see the options available. """, ) subcommand = p.add_subparsers(dest='subcommand', ) linkages_help = """ Investigates linkages of binary libraries in a package (works in Linux and OS X). This is an advanced command to aid building packages that link against C libraries. Aggregates the output of ldd (on Linux) and otool -L (on OS X) by dependent packages. Useful for finding broken links, or links against system libraries that ought to be dependent conda packages. """ linkages = subcommand.add_parser( "linkages", # help controls conda inspect -h and description controls conda # inspect linkages -h help=linkages_help, description=linkages_help, ) linkages.add_argument( 'packages', action='store', nargs='*', help='Conda packages to inspect.', ) linkages.add_argument( '--untracked', action='store_true', help= """Inspect the untracked files in the environment. This is useful when used in conjunction with conda build --build-only.""", ) linkages.add_argument( '--show-files', action="store_true", help="Show the files in the package that link to each library", ) linkages.add_argument( '--groupby', action='store', default='package', choices=('package', 'dependency'), help="""Attribute to group by (default: %(default)s). Useful when used in conjunction with --all.""", ) linkages.add_argument( '--sysroot', action='store', help='System root in which to look for system libraries.', default='', ) linkages.add_argument( '--all', action='store_true', help="Generate a report for all packages in the environment.", ) add_parser_prefix(linkages) objects_help = """ Investigate binary object files in a package (only works on OS X). This is an advanced command to aid building packages that have compiled libraries. Aggregates the output of otool on all the binary object files in a package. """ objects = subcommand.add_parser( "objects", help=objects_help, description=objects_help, ) objects.add_argument( 'packages', action='store', nargs='*', help='Conda packages to inspect.', ) objects.add_argument( '--untracked', action='store_true', help= """Inspect the untracked files in the environment. This is useful when used in conjunction with conda build --build-only.""", ) # TODO: Allow groupby to include the package (like for --all) objects.add_argument( '--groupby', action='store', default='filename', choices=('filename', 'filetype', 'rpath'), help='Attribute to group by (default: %(default)s).', ) objects.add_argument( '--all', action='store_true', help="Generate a report for all packages in the environment.", ) add_parser_prefix(objects) channels_help = """ Tools for investigating conda channels. """ channels = subcommand.add_parser( "channels", help=channels_help, description=channels_help, ) channels.add_argument( '--verbose', action='store_true', help="""Show verbose output. Note that error output to stderr will always be shown regardless of this flag. """, ) channels.add_argument( '--test-installable', '-t', action='store_true', help="""Test every package in the channel to see if it is installable by conda.""", ) channels.add_argument( "channel", nargs='?', default="defaults", help="The channel to test. The default is %(default)s.") prefix_lengths = subcommand.add_parser( "prefix-lengths", help="""Inspect packages in given path, finding those with binary prefixes shorter than specified""", description=linkages_help, ) prefix_lengths.add_argument( 'packages', action='store', nargs='+', help='Conda packages to inspect.', ) prefix_lengths.add_argument( '--min-prefix-length', '-m', help= 'Minimum length. Only packages with prefixes below this are shown.', default=api.Config().prefix_length, type=int, ) hash_inputs = subcommand.add_parser( "hash-inputs", help="Show data used to compute hash identifier (h????) for package", description= "Show data used to compute hash identifier (h????) for package", ) hash_inputs.add_argument( 'packages', action='store', nargs='*', help='Conda packages to inspect.', ) args = p.parse_args(args) return p, args