def test_no_satisfiable_variants_raises_error(): recipe = os.path.join(recipe_dir, '01_basic_templating') with pytest.raises(exceptions.DependencyNeedsBuildingError): api.render(recipe, permit_unsatisfiable_variants=False) # the packages are not installable anyway, so this should show a warning that recipe can't # be finalized api.render(recipe, permit_unsatisfiable_variants=True)
def test_ensure_valid_spec_on_run_and_test(testing_workdir, testing_config, caplog): recipe = os.path.join(recipe_dir, '14_variant_in_run_and_test') api.render(recipe, config=testing_config) text = caplog.text assert "Adding .* to spec 'pytest 3.2'" in text assert "Adding .* to spec 'click 6'" in text assert "Adding .* to spec 'pytest-cov 2.3'" not in text assert "Adding .* to spec 'pytest-mock 1.6'" not in text
def test_cross_recipe_with_only_build_section(testing_config): recipe = os.path.join(metadata_dir, '_cross_prefix_elision') metadata = api.render(recipe, config=testing_config, bypass_env_check=True)[0][0] assert metadata.config.host_subdir != subdir assert metadata.config.build_prefix == metadata.config.host_prefix assert metadata.config.build_is_host recipe = os.path.join(metadata_dir, '_cross_prefix_elision_compiler_used') metadata = api.render(recipe, config=testing_config, bypass_env_check=True)[0][0] assert metadata.config.host_subdir != subdir assert metadata.config.build_prefix != metadata.config.host_prefix assert not metadata.config.build_is_host
def test_variant_with_ignore_numpy_version_reduces_matrix(numpy_version_ignored): # variants are defined in yaml file in this folder # there are two python versions and two numpy versions. However, because numpy is not pinned, # the numpy dimensions should get collapsed. recipe = os.path.join(recipe_dir, '03_numpy_matrix') metadata = api.render(recipe, variants=numpy_version_ignored, finalize=False) assert len(metadata) == 2, metadata
def test_token_upload(testing_workdir): folder_uuid = uuid.uuid4().hex # generated with conda_test_account user, command: # anaconda auth --create --name CONDA_BUILD_UPLOAD_TEST --scopes 'api repos conda' args = AnacondaClientArgs(specs="conda_test_account/empty_sections_" + folder_uuid, token="co-79de533f-926f-4e5e-a766-d393e33ae98f", force=True) with pytest.raises(NotFound): show.main(args) metadata, _, _ = api.render(empty_sections) metadata.meta['package']['name'] = '_'.join([metadata.name(), folder_uuid]) metadata.config.token = args.token # the folder with the test recipe to upload api.build(metadata) # make sure that the package is available (should raise if it doesn't) show.main(args) # clean up - we don't actually want this package to exist remove.main(args) # verify cleanup: with pytest.raises(NotFound): show.main(args)
def execute(args): p, args = parse_args(args) config = get_or_merge_config(None, **args.__dict__) variants = get_package_variants(args.recipe, config) set_language_env_vars(variants) channel_urls = args.__dict__.get('channel') or args.__dict__.get('channels') or () config.channel_urls = [] for url in channel_urls: # allow people to specify relative or absolute paths to local channels # These channels still must follow conda rules - they must have the # appropriate platform-specific subdir (e.g. win-64) if os.path.isdir(url): if not os.path.isabs(url): url = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), url))) url = url_path(url) config.channel_urls.append(url) config.override_channels = args.override_channels metadata_tuples = api.render(args.recipe, config=config, no_download_source=args.no_source) if args.output: with LoggingContext(logging.CRITICAL + 1): config.verbose = False config.debug = False paths = api.get_output_file_paths(metadata_tuples, config=config) print('\n'.join(sorted(paths))) else: logging.basicConfig(level=logging.INFO) for (m, _, _) in metadata_tuples: print(api.output_yaml(m, args.file))
def test_pin_subpackage_exact(testing_config): recipe = os.path.join(metadata_dir, '_pin_subpackage_exact') ms = api.render(recipe, config=testing_config) assert any(re.match(r'run_exports_subpkg 1.0 h[a-f0-9]{%s}_0' % testing_config.hash_length, req) for (m, _, _) in ms for req in m.meta['requirements']['run']) api.build(recipe, config=testing_config)
def test_exclusive_config_files(testing_workdir): with open('conda_build_config.yaml', 'w') as f: yaml.dump({'abc': ['someval'], 'cwd': ['someval']}, f, default_flow_style=False) os.makedirs('config_dir') with open(os.path.join('config_dir', 'config-0.yaml'), 'w') as f: yaml.dump({'abc': ['super_0'], 'exclusive_0': ['0'], 'exclusive_both': ['0']}, f, default_flow_style=False) with open(os.path.join('config_dir', 'config-1.yaml'), 'w') as f: yaml.dump({'abc': ['super_1'], 'exclusive_1': ['1'], 'exclusive_both': ['1']}, f, default_flow_style=False) exclusive_config_files = ( os.path.join('config_dir', 'config-0.yaml'), os.path.join('config_dir', 'config-1.yaml'), ) output = api.render(os.path.join(recipe_dir, 'exclusive_config_file'), exclusive_config_files=exclusive_config_files)[0][0] variant = output.config.variant # is cwd ignored? assert 'cwd' not in variant # did we load the exclusive configs? assert variant['exclusive_0'] == '0' assert variant['exclusive_1'] == '1' # does later exclusive config override initial one? assert variant['exclusive_both'] == '1' # does recipe config override exclusive? assert 'unique_to_recipe' in variant assert variant['abc'] == '123'
def test_variant_with_numpy_pinned_has_matrix(): # variants are defined in yaml file in this folder # there are two python versions and two numpy versions. However, because numpy is not pinned, # the numpy dimensions should get collapsed. recipe = os.path.join(recipe_dir, '04_numpy_matrix_pinned') metadata = api.render(recipe) assert len(metadata) == 4
def test_native_compiler_metadata_win(testing_config, py_ver, mocker): testing_config.platform = 'win' metadata = api.render(os.path.join(metadata_dir, '_compiler_jinja2'), config=testing_config, variants={'python': py_ver[0], 'target_platform': 'win-x86_64'}, permit_unsatisfiable_variants=True, finalize=False)[0][0] assert any(dep.startswith(py_ver[1]) for dep in metadata.meta['requirements']['build'])
def test_intradep_with_templated_output_name(testing_config): recipe = os.path.join(subpackage_dir, '_intradep_with_templated_output_name') metadata = api.render(recipe, config=testing_config) assert len(metadata) == 3 expected_names = {'test_templated_subpackage_name', 'templated_subpackage_nameabc', 'depends_on_templated'} assert set((m.name() for (m, _, _) in metadata)) == expected_names
def test_about_metadata(testing_config): recipe = os.path.join(subpackage_dir, '_about_metadata') metadata = api.render(recipe, config=testing_config) assert len(metadata) == 2 for m, _, _ in metadata: if m.name() == 'abc': assert 'summary' in m.meta['about'] assert m.meta['about']['summary'] == 'weee' assert 'home' not in m.meta['about'] elif m.name() == 'def': assert 'home' in m.meta['about'] assert 'summary' not in m.meta['about'] assert m.meta['about']['home'] == 'http://not.a.url' outs = api.build(recipe, config=testing_config) for out in outs: about_meta = utils.package_has_file(out, 'info/about.json') assert about_meta info = json.loads(about_meta) if os.path.basename(out).startswith('abc'): assert 'summary' in info assert info['summary'] == 'weee' assert 'home' not in info elif os.path.basename(out).startswith('def'): assert 'home' in info assert 'summary' not in info assert info['home'] == 'http://not.a.url'
def test_numpy_setup_py_data(testing_config): recipe_path = os.path.join(metadata_dir, '_numpy_setup_py_data') _hash = api.render(recipe_path, config=testing_config, numpy="1.11")[0][0]._hash_dependencies() assert os.path.basename(api.get_output_file_path(recipe_path, config=testing_config, numpy="1.11")[0]) == \ "load_setup_py_test-1.0a1-py{0}{1}np111{2}_1.tar.bz2".format( sys.version_info.major, sys.version_info.minor, _hash)
def test_git_describe_info_on_branch(testing_config): recipe_path = os.path.join(metadata_dir, "_git_describe_number_branch") output = api.get_output_file_path(recipe_path)[0] _hash = api.render(recipe_path, config=testing_config)[0][0]._hash_dependencies() test_path = os.path.join(sys.prefix, "conda-bld", testing_config.host_subdir, "git_describe_number_branch-1.20.2.0-{}_1_g82c6ba6.tar.bz2".format(_hash)) assert test_path == output
def test_skeleton_pypi_arguments_work(testing_workdir): """ These checks whether skeleton executes without error when these options are specified on the command line AND whether the underlying functionality works as a regression test for: https://github.com/conda/conda-build/pull/1384 """ args = ['pypi', 'msumastro', '--pin-numpy'] main_skeleton.execute(args) assert os.path.isdir('msumastro') # Deliberately bypass metadata reading in conda build to get as # close to the "ground truth" as possible. with open(os.path.join('msumastro', 'meta.yaml')) as f: assert f.read().count('numpy x.x') == 2 args = ['pypi', 'photutils', '--version=0.2.2', '--setup-options=--offline'] main_skeleton.execute(args) assert os.path.isdir('photutils') # Check that the setup option occurs in bld.bat and build.sh. m = api.render('photutils')[0][0] assert '--offline' in m.meta['build']['script'] assert m.version() == '0.2.2'
def output_action(recipe, config): with LoggingContext(logging.CRITICAL + 1): metadata, _, _ = api.render(recipe, config=config) if metadata.skip(): print_skip_message(metadata) else: print(bldpkg_path(metadata))
def test_self_reference_run_exports_pin_subpackage_picks_up_version_correctly(): recipe = os.path.join(metadata_dir, '_self_reference_run_exports') m = api.render(recipe)[0][0] run_exports = m.meta.get('build', {}).get('run_exports', []) assert run_exports assert len(run_exports) == 1 assert run_exports[0].split()[1] == '>=1.0.0,<2.0a0'
def test_pypi_with_extra_specs(testing_workdir): # regression test for https://github.com/conda/conda-build/issues/1697 api.skeletonize('bigfile', 'pypi', extra_specs=["cython", "mpi4py"], version='0.1.24', python="3.6") m = api.render('bigfile')[0][0] assert parse_version(m.version()) == parse_version("0.1.24") assert any('cython' in req for req in m.meta['requirements']['host']) assert any('mpi4py' in req for req in m.meta['requirements']['host'])
def test_setting_condarc_vars_with_env_var_expansion(testing_workdir): os.makedirs('config') # python won't be used - the stuff in the recipe folder will override it python_versions = ['2.6', '3.4', '3.10'] config = {'python': python_versions, 'bzip2': ['0.9', '1.0']} with open(os.path.join('config', 'conda_build_config.yaml'), 'w') as f: yaml.dump(config, f, default_flow_style=False) cc_conda_build_backup = cc_conda_build.copy() # hacky equivalent of changing condarc # careful, this is global and affects other tests! make sure to clear it! cc_conda_build.update({'config_file': '${TEST_WORKDIR}/config/conda_build_config.yaml'}) os.environ['TEST_WORKDIR'] = testing_workdir try: m = api.render(os.path.join(thisdir, 'test-recipes', 'variants', '19_used_variables'), bypass_env_check=True, finalize=False)[0][0] # this one should have gotten clobbered by the values in the recipe assert m.config.variant['python'] not in python_versions # this confirms that we loaded the config file correctly assert len(m.config.squished_variants['bzip2']) == 2 finally: cc_conda_build.clear() cc_conda_build.update(cc_conda_build_backup)
def test_host_entries_finalized(testing_config): recipe = os.path.join(metadata_dir, '_host_entries_finalized') metadata = api.render(recipe, config=testing_config) assert len(metadata) == 2 outputs = api.get_output_file_paths(recipe, config=testing_config) assert any('py27' in out for out in outputs) assert any('py36' in out for out in outputs)
def output_action(recipe, config): silence_loggers(show_warnings_and_errors=False) metadata, _, _ = api.render(recipe, config=config) if metadata.skip(): print_skip_message(metadata) else: print(bldpkg_path(metadata))
def test_pypi_with_version_inconsistency(testing_workdir, testing_config): # regression test for https://github.com/conda/conda-build/issues/189 # For mpi4py: testing_config.channel_urls.append('https://repo.anaconda.com/pkgs/free') api.skeletonize('mpi4py_test', 'pypi', extra_specs=["mpi4py"], version='0.0.10', python="3.6", config=testing_config) m = api.render('mpi4py_test')[0][0] assert parse_version(m.version()) == parse_version("0.0.10")
def test_native_compiler_metadata_win(testing_config, py_ver, mocker): testing_config.platform = 'win' metadata = api.render(os.path.join(metadata_dir, '_compiler_jinja2'), config=testing_config, variants={'target_platform': 'win-x86_64'}, permit_unsatisfiable_variants=True, finalize=False, bypass_env_check=True, python=py_ver[0])[0][0] # see parameterization - py_ver[1] is the compiler package name assert any(dep.startswith(py_ver[1]) for dep in metadata.meta['requirements']['build'])
def test_get_used_loop_vars(testing_config): m = api.render(os.path.join(recipe_dir, '19_used_variables'), finalize=False, bypass_env_check=True)[0][0] # conda_build_config.yaml has 4 loop variables defined, but only 3 are used. # python and zlib are both implicitly used (depend on name matching), while # some_package is explicitly used as a jinja2 variable assert m.get_used_loop_vars() == {'python', 'some_package'} # these are all used vars - including those with only one value (and thus not loop vars) assert m.get_used_vars() == {'python', 'some_package', 'zlib', 'pthread_stubs'}
def test_hash_no_apply_to_custom_build_string(testing_metadata, testing_workdir): testing_metadata.meta['build']['string'] = 'steve' testing_metadata.meta['requirements']['build'] = ['zlib 1.2.8'] api.output_yaml(testing_metadata, 'meta.yaml') metadata = api.render(testing_workdir)[0][0] assert metadata.build_id() == 'steve'
def test_output_build_path_git_source(testing_workdir, testing_config): recipe_path = os.path.join(metadata_dir, "source_git_jinja2") output = api.get_output_file_path(recipe_path, config=testing_config)[0] _hash = api.render(recipe_path, config=testing_config)[0][0]._hash_dependencies() test_path = os.path.join(testing_config.croot, testing_config.host_subdir, "conda-build-test-source-git-jinja2-1.20.2-py{}{}{}_0_g262d444.tar.bz2".format( sys.version_info.major, sys.version_info.minor, _hash)) assert output == test_path
def test_run_exports_with_pin_compatible_in_subpackages(testing_config): recipe = os.path.join(metadata_dir, '_run_exports_in_outputs') ms = api.render(recipe, config=testing_config) for m, _, _ in ms: if m.name().startswith('gfortran_'): run_exports = set(m.meta.get('build', {}).get('run_exports', {}).get('strong', [])) assert len(run_exports) == 1 assert all(len(export.split()) > 1 for export in run_exports), run_exports
def test_detect_variables_in_build_and_output_scripts(testing_config): ms = api.render(os.path.join(recipe_dir, '24_test_used_vars_in_scripts'), platform='linux', arch='64') for m, _, _ in ms: if m.name() == 'test_find_used_variables_in_scripts': used_vars = m.get_used_vars() assert used_vars assert 'SELECTOR_VAR' in used_vars assert 'OUTPUT_SELECTOR_VAR' not in used_vars assert 'BASH_VAR1' in used_vars assert 'BASH_VAR2' in used_vars assert 'BAT_VAR' not in used_vars assert 'OUTPUT_VAR' not in used_vars else: used_vars = m.get_used_vars() assert used_vars assert 'SELECTOR_VAR' not in used_vars assert 'OUTPUT_SELECTOR_VAR' in used_vars assert 'BASH_VAR1' not in used_vars assert 'BASH_VAR2' not in used_vars assert 'BAT_VAR' not in used_vars assert 'OUTPUT_VAR' in used_vars # on windows, we find variables in bat scripts as well as shell scripts ms = api.render(os.path.join(recipe_dir, '24_test_used_vars_in_scripts'), platform='win', arch='64') for m, _, _ in ms: if m.name() == 'test_find_used_variables_in_scripts': used_vars = m.get_used_vars() assert used_vars assert 'SELECTOR_VAR' in used_vars assert 'OUTPUT_SELECTOR_VAR' not in used_vars assert 'BASH_VAR1' in used_vars assert 'BASH_VAR2' in used_vars # bat is in addition to bash, not instead of assert 'BAT_VAR' in used_vars assert 'OUTPUT_VAR' not in used_vars else: used_vars = m.get_used_vars() assert used_vars assert 'SELECTOR_VAR' not in used_vars assert 'OUTPUT_SELECTOR_VAR' in used_vars assert 'BASH_VAR1' not in used_vars assert 'BASH_VAR2' not in used_vars assert 'BAT_VAR' not in used_vars assert 'OUTPUT_VAR' in used_vars
def test_remove_workdir_default(testing_config, caplog): recipe = os.path.join(metadata_dir, '_keep_work_dir') # make a metadata object - otherwise the build folder is computed within the build, but does # not alter the config object that is passed in. This is by design - we always make copies # of the config object rather than edit it in place, so that variants don't clobber one # another metadata = api.render(recipe, config=testing_config)[0][0] api.build(metadata) assert not glob(os.path.join(metadata.config.work_dir, '*'))
def test_native_compiler_metadata_osx(testing_config, mocker): testing_config.platform = 'osx' metadata = api.render(os.path.join(metadata_dir, '_compiler_jinja2'), config=testing_config, permit_unsatisfiable_variants=True, finalize=False)[0][0] _64 = '_64' if conda_interface.bits == 64 else '' assert any(dep.startswith('clang_osx-109-x86' + _64) for dep in metadata.meta['requirements']['build']) assert any(dep.startswith('clangxx_osx-109-x86' + _64) for dep in metadata.meta['requirements']['build']) assert any(dep.startswith('gfortran_osx-109-x86' + _64) for dep in metadata.meta['requirements']['build'])
def execute(args): p, args = parse_args(args) config = get_or_merge_config(None, **args.__dict__) variants = get_package_variants(args.recipe, config, variants=args.variants) set_language_env_vars(variants) channel_urls = args.__dict__.get('channel') or args.__dict__.get( 'channels') or () config.channel_urls = [] for url in channel_urls: # allow people to specify relative or absolute paths to local channels # These channels still must follow conda rules - they must have the # appropriate platform-specific subdir (e.g. win-64) if os.path.isdir(url): if not os.path.isabs(url): url = os.path.normpath( os.path.abspath(os.path.join(os.getcwd(), url))) url = url_path(url) config.channel_urls.append(url) config.override_channels = args.override_channels if args.output: config.verbose = False config.debug = False metadata_tuples = api.render(args.recipe, config=config, no_download_source=args.no_source, variants=args.variants) if args.output: with LoggingContext(logging.CRITICAL + 1): paths = api.get_output_file_paths(metadata_tuples, config=config) print('\n'.join(sorted(paths))) else: logging.basicConfig(level=logging.INFO) for (m, _, _) in metadata_tuples: print("--------------") print("Hash contents:") print("--------------") pprint(m.get_hash_contents()) print("----------") print("meta.yaml:") print("----------") print(api.output_yaml(m, args.file))
def test_render_output_build_path(testing_workdir, testing_metadata, capfd, caplog): api.output_yaml(testing_metadata, 'meta.yaml') metadata = api.render(testing_workdir)[0][0] args = ['--output', os.path.join(testing_workdir)] main_render.execute(args) _hash = metadata._hash_dependencies() test_path = os.path.join( sys.prefix, "conda-bld", testing_metadata.config.host_subdir, "test_render_output_build_path-1.0-py{}{}{}_1.tar.bz2".format( sys.version_info.major, sys.version_info.minor, _hash)) output, error = capfd.readouterr() assert output.rstrip() == test_path, error assert error == ""
def test_build_output_build_path(testing_workdir, testing_metadata, testing_config, capfd): api.output_yaml(testing_metadata, 'meta.yaml') testing_config.verbose = False testing_config.debug = False metadata = api.render(testing_workdir, config=testing_config)[0][0] args = ['--output', os.path.join(testing_workdir)] main_build.execute(args) _hash = metadata._hash_dependencies() test_path = os.path.join(sys.prefix, "conda-bld", testing_config.host_subdir, "test_build_output_build_path-1.0-py{}{}{}_1.tar.bz2".format( sys.version_info.major, sys.version_info.minor, _hash)) output, error = capfd.readouterr() # assert error == "" assert test_path in output.rstrip(), error
def test_run_exports_with_pin_compatible_in_subpackages(testing_config): recipe = os.path.join(metadata_dir, '_run_exports_in_outputs') ms = api.render(recipe, config=testing_config) for m, _, _ in ms: if m.name().startswith('gfortran_'): run_exports = set( m.meta.get('build', {}).get('run_exports', {}).get('strong', [])) assert len(run_exports) == 1 # len after splitting should be more than one because of pin_compatible. If it's only zlib, we've lost the # compatibility bound info. This is generally due to lack of rendering of an output, such that the # compatibility bounds just aren't added in. assert all(len(export.split()) > 1 for export in run_exports), run_exports
def test_pypi_with_extra_specs(testing_workdir, testing_config): # regression test for https://github.com/conda/conda-build/issues/1697 # For mpi4py: testing_config.channel_urls.append('https://repo.anaconda.com/pkgs/free') api.skeletonize('bigfile', 'pypi', extra_specs=["cython", "mpi4py"], version='0.1.24', python="3.6", config=testing_config) m = api.render('bigfile')[0][0] assert parse_version(m.version()) == parse_version("0.1.24") assert any('cython' in req for req in m.meta['requirements']['host']) assert any('mpi4py' in req for req in m.meta['requirements']['host'])
def test_resolved_packages_recipe(testing_config): recipe_dir = os.path.join(metadata_dir, '_resolved_packages_host_build') metadata = api.render(recipe_dir, config=testing_config)[0][0] assert all( len(pkg.split()) == 3 for pkg in metadata.get_value('requirements/run')) run_requirements = set(x.split()[0] for x in metadata.get_value('requirements/run')) for package in [ 'curl', # direct dependency 'numpy', # direct dependency 'zlib', # indirect dependency of curl 'python', # indirect dependency of numpy ]: assert package in run_requirements
def test_get_package_variants_from_dictionary_of_lists(testing_config): testing_config.ignore_system_config = True variants = global_specs.copy() variants['ignore_version'] = ['numpy'] # Note: variant is coming from up above: global_specs metadata = api.render(os.path.join(thisdir, "variant_recipe"), no_download_source=False, config=testing_config, variants=variants) # one for each Python version. Numpy is not strictly pinned and should present only 1 dimension assert len(metadata) == 2 assert sum('python >=2.7,<2.8' in req for (m, _, _) in metadata for req in m.meta['requirements']['run']) == 1 assert sum('python >=3.5,<3.6' in req for (m, _, _) in metadata for req in m.meta['requirements']['run']) == 1
def test_pypi_with_version_inconsistency(testing_workdir, testing_config): # regression test for https://github.com/conda/conda-build/issues/189 # For mpi4py: extra_specs = ['mpi4py'] if not on_win: extra_specs.append('nomkl') testing_config.channel_urls.append('https://repo.anaconda.com/pkgs/free') api.skeletonize('mpi4py_test', 'pypi', extra_specs=extra_specs, version='0.0.10', python="3.6", config=testing_config) m = api.render('mpi4py_test')[0][0] assert parse_version(m.version()) == parse_version("0.0.10")
def test_cran_license(package, license_id, license_family, license_files, testing_workdir, testing_config): api.skeletonize(packages=package, repo='cran', output_dir=testing_workdir, config=testing_config) m = api.render(os.path.join(package, 'meta.yaml'))[0][0] m_license_id = m.get_value('about/license') assert m_license_id == license_id m_license_family = m.get_value('about/license_family') assert m_license_family == license_family m_license_files = ensure_list(m.get_value('about/license_file', '')) license_files = ensure_list(license_files) for m_license_file in m_license_files: assert os.path.basename(m_license_file) in license_files
def test_keep_workdir(testing_config, caplog): recipe = os.path.join(metadata_dir, '_keep_work_dir') # make a metadata object - otherwise the build folder is computed within the build, but does # not alter the config object that is passed in. This is by design - we always make copies # of the config object rather than edit it in place, so that variants don't clobber one # another metadata = api.render(recipe, config=testing_config, dirty=True, remove_work_dir=False, debug=True)[0][0] api.build(metadata) assert "Not removing work directory after build" in caplog.text assert glob(os.path.join(metadata.config.work_dir, '*')) testing_config.clean()
def test_pypi_with_setup_options(testing_workdir, testing_config): # Use photutils package below because skeleton will fail unless the setup.py is given # the flag --offline because of a bootstrapping a helper file that # occurs by default. # Test that the setup option is used in constructing the skeleton. api.skeletonize(packages='photutils', repo='pypi', version='0.2.2', setup_options='--offline', config=testing_config) # Check that the setup option occurs in bld.bat and build.sh. m = api.render('photutils')[0][0] assert '--offline' in m.meta['build']['script']
def _get_or_render_metadata(meta_file_or_recipe_dir, worker, finalize, config=None): global _rendered_recipes platform = worker['platform'] arch = str(worker['arch']) if (meta_file_or_recipe_dir, platform, arch) not in _rendered_recipes: print("rendering {0} for {1}".format(meta_file_or_recipe_dir, worker['label'])) _rendered_recipes[(meta_file_or_recipe_dir, platform, arch)] = \ api.render(meta_file_or_recipe_dir, platform=platform, arch=arch, verbose=False, permit_undefined_jinja=True, bypass_env_check=True, config=config, finalize=finalize) return _rendered_recipes[(meta_file_or_recipe_dir, platform, arch)]
def test_exclusive_config_file(testing_workdir): with open('conda_build_config.yaml', 'w') as f: yaml.dump({'abc': ['someval'], 'cwd': ['someval']}, f, default_flow_style=False) os.makedirs('config_dir') with open(os.path.join('config_dir', 'config.yaml'), 'w') as f: yaml.dump({'abc': ['super'], 'exclusive': ['someval']}, f, default_flow_style=False) output = api.render(os.path.join(recipe_dir, 'exclusive_config_file'), exclusive_config_file=os.path.join('config_dir', 'config.yaml'))[0][0] variant = output.config.variant # is cwd ignored? assert 'cwd' not in variant # did we load the exclusive config assert 'exclusive' in variant # does recipe config override exclusive? assert 'unique_to_recipe' in variant assert variant['abc'] == '123'
def test_get_package_variants_from_file(testing_workdir, testing_config): with open('variant_example.yaml', 'w') as f: yaml.dump(global_specs, f) testing_config.variant_config_files = [ os.path.join(testing_workdir, 'variant_example.yaml') ] testing_config.ignore_system_config = True metadata = api.render(os.path.join(thisdir, "variant_recipe"), no_download_source=False, config=testing_config) # one for each Python version. Numpy is not strictly pinned and should present only 1 dimension assert len(metadata) == 2 assert sum('python >=2.7,<2.8' in req for (m, _, _) in metadata for req in m.meta['requirements']['run']) == 1 assert sum('python >=3.5,<3.6' in req for (m, _, _) in metadata for req in m.meta['requirements']['run']) == 1
def test_pypi_with_basic_environment_markers(testing_workdir): # regression test for https://github.com/conda/conda-build/issues/1974 api.skeletonize('coconut', 'pypi', version='1.2.2') m = api.render('coconut')[0][0] build_reqs = str(m.meta['requirements']['host']) run_reqs = str(m.meta['requirements']['run']) # should include the right dependencies for the right version assert "futures" not in build_reqs assert "futures" not in run_reqs if sys.version_info >= (2, 7): assert "pygments" in build_reqs assert "pygments" in run_reqs else: assert "pygments" not in build_reqs assert "pygments" not in run_reqs
def test_native_compiler_metadata_osx(testing_config, mocker): testing_config.platform = 'osx' metadata = api.render(os.path.join(metadata_dir, '_compiler_jinja2'), config=testing_config, permit_unsatisfiable_variants=True, finalize=False)[0][0] _64 = '_64' if conda_interface.bits == 64 else '' assert any( dep.startswith('clang_osx-109-x86' + _64) for dep in metadata.meta['requirements']['build']) assert any( dep.startswith('clangxx_osx-109-x86' + _64) for dep in metadata.meta['requirements']['build']) assert any( dep.startswith('gfortran_osx-109-x86' + _64) for dep in metadata.meta['requirements']['build'])
def execute(args, print_results=True): p, args = parse_args(args) config = get_or_merge_config(None, **args.__dict__) variants = get_package_variants(args.recipe, config, variants=args.variants) from conda_build.build import get_all_replacements get_all_replacements(variants) set_language_env_vars(variants) config.channel_urls = get_channel_urls(args.__dict__) config.override_channels = args.override_channels if args.output: config.verbose = False config.debug = False metadata_tuples = api.render(args.recipe, config=config, no_download_source=args.no_source, variants=args.variants) if args.file and len(metadata_tuples) > 1: log.warning("Multiple variants rendered. " "Only one will be written to the file you specified ({}).".format(args.file)) if print_results: if args.output: with LoggingContext(logging.CRITICAL + 1): paths = api.get_output_file_paths(metadata_tuples, config=config) print('\n'.join(sorted(paths))) if args.file: m = metadata_tuples[-1][0] api.output_yaml(m, args.file, suppress_outputs=True) else: logging.basicConfig(level=logging.INFO) for (m, _, _) in metadata_tuples: print("--------------") print("Hash contents:") print("--------------") pprint(m.get_hash_contents()) print("----------") print("meta.yaml:") print("----------") print(api.output_yaml(m, args.file, suppress_outputs=True)) else: return metadata_tuples
def test_merge_build_host_applies_in_outputs(testing_config): recipe = os.path.join(subpackage_dir, '_merge_build_host') ms = api.render(recipe, config=testing_config) for m, _, _ in ms: # top level if m.name() == 'test_build_host_merge': assert not m.meta.get('requirements', {}).get('run') # output else: run_exports = set(m.meta.get('build', {}).get('run_exports', [])) assert len(run_exports) == 2 assert all(len(export.split()) > 1 for export in run_exports) run_deps = set(m.meta.get('requirements', {}).get('run', [])) assert len(run_deps) == 2 assert all(len(dep.split()) > 1 for dep in run_deps) api.build(recipe, config=testing_config)
def test_native_compiler_metadata_linux(testing_config, mocker): testing_config.platform = 'linux' metadata = api.render(os.path.join(metadata_dir, '_compiler_jinja2'), config=testing_config, permit_unsatisfiable_variants=True, finalize=False, bypass_env_check=True)[0][0] _64 = '64' if conda_interface.bits == 64 else '32' assert any( dep.startswith('gcc_linux-' + _64) for dep in metadata.meta['requirements']['build']) assert any( dep.startswith('gxx_linux-' + _64) for dep in metadata.meta['requirements']['build']) assert any( dep.startswith('gfortran_linux-' + _64) for dep in metadata.meta['requirements']['build'])
def execute(args): p, args = parse_args(args) config = get_or_merge_config(None, **args.__dict__) variants = get_package_variants(args.recipe, config) set_language_env_vars(variants) metadata_tuples = api.render(args.recipe, config=config, no_download_source=args.no_source) if args.output: with LoggingContext(logging.CRITICAL + 1): paths = api.get_output_file_path(metadata_tuples) print('\n'.join(paths)) else: logging.basicConfig(level=logging.INFO) for (m, _, _) in metadata_tuples: print(api.output_yaml(m, args.file))
def test_env_creation_with_short_prefix_does_not_deadlock( testing_workdir, caplog): config = api.Config(croot=testing_workdir, anaconda_upload=False, verbose=True, set_build_id=False, _prefix_length=80) recipe_path = os.path.join(metadata_dir, "has_prefix_files") metadata = api.render(recipe_path, config=config)[0][0] output = api.build(metadata)[0] assert not api.inspect_prefix_length(output, 255) config.prefix_length = 255 environ.create_env(config.build_prefix, specs_or_actions=["python", metadata.name()], config=config, subdir=subdir) assert 'One or more of your package dependencies needs to be rebuilt' in caplog.text
def test_exclusive_config_files(testing_workdir): with open('conda_build_config.yaml', 'w') as f: yaml.dump({ 'abc': ['someval'], 'cwd': ['someval'] }, f, default_flow_style=False) os.makedirs('config_dir') with open(os.path.join('config_dir', 'config-0.yaml'), 'w') as f: yaml.dump( { 'abc': ['super_0'], 'exclusive_0': ['0'], 'exclusive_both': ['0'] }, f, default_flow_style=False) with open(os.path.join('config_dir', 'config-1.yaml'), 'w') as f: yaml.dump( { 'abc': ['super_1'], 'exclusive_1': ['1'], 'exclusive_both': ['1'] }, f, default_flow_style=False) exclusive_config_files = ( os.path.join('config_dir', 'config-0.yaml'), os.path.join('config_dir', 'config-1.yaml'), ) output = api.render(os.path.join(recipe_dir, 'exclusive_config_file'), exclusive_config_files=exclusive_config_files)[0][0] variant = output.config.variant # is cwd ignored? assert 'cwd' not in variant # did we load the exclusive configs? assert variant['exclusive_0'] == '0' assert variant['exclusive_1'] == '1' # does later exclusive config override initial one? assert variant['exclusive_both'] == '1' # does recipe config override exclusive? assert 'unique_to_recipe' in variant assert variant['abc'] == '123'
def test_get_output_file_path_jinja2(testing_workdir, testing_config): # If this test does not raise, it's an indicator that the workdir is not # being cleaned as it should. # First get metadata with a recipe that is known to need a download: with pytest.raises((ValueError, SystemExit)): build_path = api.get_output_file_path(os.path.join(metadata_dir, "source_git_jinja2"), config=testing_config, no_download_source=True)[0] metadata, need_download, need_reparse_in_env = api.render( os.path.join(metadata_dir, "source_git_jinja2"), config=testing_config, no_download_source=False)[0] build_path = api.get_output_file_path(metadata)[0] _hash = metadata._hash_dependencies() python = ''.join(metadata.config.variant['python'].split('.')[:2]) assert build_path == os.path.join(testing_config.croot, testing_config.host_subdir, "conda-build-test-source-git-jinja2-1.20.2-" "py{0}{1}_0_g262d444.tar.bz2".format(python, _hash))
def test_render_output_build_path_set_python(testing_workdir, testing_metadata, capfd): api.output_yaml(testing_metadata, 'meta.yaml') # build the other major thing, whatever it is if sys.version_info.major == 3: version = "2.7" else: version = "3.5" api.output_yaml(testing_metadata, 'meta.yaml') metadata = api.render(testing_workdir, python=version)[0][0] args = ['--output', testing_workdir, '--python', version] main_render.execute(args) _hash = metadata._hash_dependencies() test_path = "test_render_output_build_path_set_python-1.0-py{}{}{}_1.tar.bz2".format( version.split('.')[0], version.split('.')[1], _hash) output, error = capfd.readouterr() assert os.path.basename(output.rstrip()) == test_path, error
def execute(args, print_results=True): p, args = parse_args(args) config = get_or_merge_config(None, **args.__dict__) variants = get_package_variants(args.recipe, config, variants=args.variants) set_language_env_vars(variants) config.channel_urls = get_channel_urls(args.__dict__) config.override_channels = args.override_channels if args.output: config.verbose = False config.debug = False metadata_tuples = api.render(args.recipe, config=config, no_download_source=args.no_source, variants=args.variants) if print_results: if args.output: with LoggingContext(logging.CRITICAL + 1): paths = api.get_output_file_paths(metadata_tuples, config=config) print('\n'.join(sorted(paths))) else: logging.basicConfig(level=logging.INFO) for (m, _, _) in metadata_tuples: print("--------------") print("Hash contents:") print("--------------") pprint(m.get_hash_contents()) print("----------") print("meta.yaml:") print("----------") print(api.output_yaml(m, args.file, suppress_outputs=True)) else: return metadata_tuples
def _get_source_code(recipe_dir): try: from conda_build.api import render from conda_build.config import Config from conda_build.source import provide # Use conda build to do all the downloading/extracting bits md = render( recipe_dir, config=Config(**CB_CONFIG), finalize=False, bypass_env_check=True, ) if not md: return None md = md[0][0] # provide source dir return provide(md) except (SystemExit, Exception) as e: raise RuntimeError("conda build src exception:" + str(e))
def _get_source_code(recipe_dir): from conda_build.api import render from conda_build.config import Config from conda_build.source import provide # Use conda build to do all the downloading/extracting bits md = render( recipe_dir, config=Config(**CB_CONFIG), finalize=False, bypass_env_check=True, ) if not md: return None md = md[0][0] # provide source dir try: return provide(md) except SystemExit: raise RuntimeError(f"Could not download source for {recipe_dir}!")
def test_variant_subkeys_retained(testing_config): m = api.render(os.path.join(recipe_dir, '31_variant_subkeys'), finalize=False, bypass_env_check=True)[0][0] found_replacements = False for variant in m.config.variants: if 'replacements' in variant: found_replacements = True replacements = variant['replacements'] assert isinstance(replacements, (dict, OrderedDict)), "Found `replacements` {}," \ "but it is not a dict".format( replacements) assert 'all_replacements' in replacements, "Found `replacements` {}, but it" \ "doesn't contain `all_replacements`".format(replacements) assert isinstance(replacements['all_replacements'], list), "Found `all_replacements` {}," \ "but it is not a list".format( replacements) for index, replacement in enumerate(replacements['all_replacements']): assert 'tag' in replacement, "Found `all_replacements[{}]` {}," \ "but it has no `tag` key.".format( replacements[index, 'all_replacements'][index]) assert found_replacements, "Did not find replacements"
def _filter_environment_with_metadata(build_recipe, version_dicts): def del_key(version_dicts, key): if key == 'python': key = 'py' elif key == 'numpy': key = 'npy' elif key == 'r-base': key = 'r' del version_dicts['CONDA_' + key.upper()] return version_dicts with set_conda_env_vars(version_dicts): metadata, _, _ = render(build_recipe) for name in (u'numpy', u'python', u'perl', u'lua', u'r-base'): for req in metadata.get_value('requirements/run'): if hasattr(req, 'decode'): req = req.decode('utf-8') req_parts = req.split(u' ') if req_parts[0] == name: # logic here: if a version is provided, then ignore the build matrix - except # numpy. If numpy has x.x, that is the only way that it is considered part # of the build matrix. # # Break = keep the recipe (since we don't fall through to del_key for this name) if len(req_parts) > 1: if name == 'numpy' and 'x.x' in req_parts: break # we have a version specified for something other than numpy. This means # we are overriding our build matrix. Do not consider this variable. # Ignore coverage because Python optimizes the continue out, and it is never # covered. continue # pragma: no cover # fall through for numpy when it does not have any associated x.x if name == 'numpy': continue break else: version_dicts = del_key(version_dicts, name) return version_dicts
def test_env_creation_with_prefix_fallback_disabled(testing_config): tempdir = '/tmp' if platform.system() == 'Darwin' else tempfile.gettempdir() testing_config.croot = os.path.join(tempdir, 'cb') testing_config.anaconda_upload = False testing_config.anaconda_upload = False testing_config.prefix_length_fallback = False testing_config.prefix_length = 80 recipe_path = os.path.join(metadata_dir, "has_prefix_files") metadata = api.render(recipe_path, config=testing_config)[0][0] fn = api.get_output_file_paths(metadata)[0] if os.path.isfile(fn): os.remove(fn) with pytest.raises((SystemExit, PaddingError, LinkError, CondaError)): output = api.build(metadata)[0] assert not api.inspect_prefix_length(output, 255) testing_config.prefix_length = 255 environ.create_env(testing_config.build_prefix, specs_or_actions=["python", metadata.name()], env='build', config=testing_config, subdir=subdir)