def test_zip_fields(): """Zipping keys together allows people to tie different versions as sets of combinations.""" v = {'python': ['2.7', '3.5'], 'vc': ['9', '14'], 'zip_keys': [('python', 'vc')]} ld = variants.dict_of_lists_to_list_of_dicts(v) assert len(ld) == 2 assert ld[0]['python'] == '2.7' assert ld[0]['vc'] == '9' assert ld[1]['python'] == '3.5' assert ld[1]['vc'] == '14' # allow duplication of values, but lengths of lists must always match v = {'python': ['2.7', '2.7'], 'vc': ['9', '14'], 'zip_keys': [('python', 'vc')]} ld = variants.dict_of_lists_to_list_of_dicts(v) assert len(ld) == 2 assert ld[0]['python'] == '2.7' assert ld[0]['vc'] == '9' assert ld[1]['python'] == '2.7' assert ld[1]['vc'] == '14' # mismatched lengths should raise an error v = {'python': ['2.7', '3.5', '3.4'], 'vc': ['9', '14'], 'zip_keys': [('python', 'vc')]} with pytest.raises(ValueError): ld = variants.dict_of_lists_to_list_of_dicts(v) # WHEN one is completely missing, it's OK. The zip_field for the set gets ignored. v = {'python': ['2.7', '3.5'], 'zip_keys': [('python', 'vc')]} ld = variants.dict_of_lists_to_list_of_dicts(v) assert len(ld) == 2 assert 'vc' not in ld[0].keys() assert 'vc' not in ld[1].keys()
def test_variant_input_with_zip_keys_keeps_zip_keys_list(): variants_ = {'scipy': ['0.17', '0.19'], 'sqlite': ['3'], 'zlib': ['1.2'], 'xz': ['5'], 'zip_keys': ['macos_min_version', 'macos_machine', 'MACOSX_DEPLOYMENT_TARGET', 'CONDA_BUILD_SYSROOT'], 'pin_run_as_build': {'python': {'min_pin': 'x.x', 'max_pin': 'x.x'}}} variant_list = variants.dict_of_lists_to_list_of_dicts(variants_, extend_keys=variants.DEFAULT_VARIANTS['extend_keys']) assert len(variant_list) == 2 assert 'zip_keys' in variant_list[0] and variant_list[0]['zip_keys']
def test_zip_fields(): """Zipping keys together allows people to tie different versions as sets of combinations.""" v = { 'python': ['2.7', '3.5'], 'vc': ['9', '14'], 'zip_keys': [('python', 'vc')] } ld = variants.dict_of_lists_to_list_of_dicts(v) assert len(ld) == 2 assert ld[0]['python'] == '2.7' assert ld[0]['vc'] == '9' assert ld[1]['python'] == '3.5' assert ld[1]['vc'] == '14' # allow duplication of values, but lengths of lists must always match v = { 'python': ['2.7', '2.7'], 'vc': ['9', '14'], 'zip_keys': [('python', 'vc')] } ld = variants.dict_of_lists_to_list_of_dicts(v) assert len(ld) == 2 assert ld[0]['python'] == '2.7' assert ld[0]['vc'] == '9' assert ld[1]['python'] == '2.7' assert ld[1]['vc'] == '14' # mismatched lengths should raise an error v = { 'python': ['2.7', '3.5', '3.4'], 'vc': ['9', '14'], 'zip_keys': [('python', 'vc')] } with pytest.raises(ValueError): ld = variants.dict_of_lists_to_list_of_dicts(v) # when one is completely missing, it's OK. The zip_field for the set gets ignored. v = {'python': ['2.7', '3.5'], 'zip_keys': [('python', 'vc')]} ld = variants.dict_of_lists_to_list_of_dicts(v) assert len(ld) == 2 assert 'vc' not in ld[0].keys() assert 'vc' not in ld[1].keys()
def test_variant_input_with_zip_keys_keeps_zip_keys_list(): variants_ = [{ 'icu': '58', 'jpeg': '9', 'libdap4': '3.19', 'libkml': '1.3', 'libnetcdf': '4.4', 'libpng': '1.6', 'libtiff': '4.0', 'libxml2': '2.9', 'mkl': '2018', 'openblas': '0.2.19', 'proj4': '4', 'scipy': '0.17', 'sqlite': '3', 'zlib': '1.2', 'xz': '5', 'zip_keys': [ 'macos_min_version', 'macos_machine', 'MACOSX_DEPLOYMENT_TARGET', 'CONDA_BUILD_SYSROOT' ], 'pin_run_as_build': { 'python': { 'min_pin': 'x.x', 'max_pin': 'x.x' } }, 'macos_min_version': '10.9', 'macos_machine': 'x86_64-apple-darwin13.4.0', 'MACOSX_DEPLOYMENT_TARGET': '10.9', 'CONDA_BUILD_SYSROOT': '/opt/MacOSX10.9.sdk' }] variant_list = variants.dict_of_lists_to_list_of_dicts(variants_) assert len(variant_list) == 1
def test_variant_input_with_zip_keys_keeps_zip_keys_list(): spec = { 'scipy': ['0.17', '0.19'], 'sqlite': ['3'], 'zlib': ['1.2'], 'xz': ['5'], 'zip_keys': ['sqlite', 'zlib', 'xz'], 'pin_run_as_build': {'python': {'min_pin': 'x.x', 'max_pin': 'x.x'}} } vrnts = variants.dict_of_lists_to_list_of_dicts(spec) assert len(vrnts) == 2 assert vrnts[0].get("zip_keys") == spec["zip_keys"]
def test_variant_input_with_zip_keys_keeps_zip_keys_list(): variants_ = { 'scipy': ['0.17', '0.19'], 'sqlite': ['3'], 'zlib': ['1.2'], 'xz': ['5'], 'zip_keys': [ 'macos_min_version', 'macos_machine', 'MACOSX_DEPLOYMENT_TARGET', 'CONDA_BUILD_SYSROOT' ], 'pin_run_as_build': { 'python': { 'min_pin': 'x.x', 'max_pin': 'x.x' } } } variant_list = variants.dict_of_lists_to_list_of_dicts( variants_, extend_keys=variants.DEFAULT_VARIANTS['extend_keys']) assert len(variant_list) == 2 assert 'zip_keys' in variant_list[0] and variant_list[0]['zip_keys']
def render_recipe(recipe_path, config, no_download_source=False, variants=None, permit_unsatisfiable_variants=True, reset_build_id=True): arg = recipe_path # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True elif arg.endswith('.yaml'): recipe_dir = os.path.dirname(arg) need_cleanup = False else: print("Ignoring non-recipe: %s" % arg) return None, None else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir, config=config) except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) if config.set_build_id: m.config.compute_build_id(m.name(), reset=reset_build_id) if m.needs_source_for_render and (not os.path.isdir(m.config.work_dir) or len(os.listdir(m.config.work_dir)) == 0): try_download(m, no_download_source=no_download_source) rendered_metadata = {} if m.final: rendered_metadata = [ (m, False, False), ] index = None else: variants = dict_of_lists_to_list_of_dicts( variants) if variants else get_package_variants(m) index = get_build_index(m.config, m.config.build_subdir) rendered_metadata = distribute_variants( m, variants, index, permit_unsatisfiable_variants=permit_unsatisfiable_variants) if not rendered_metadata: raise ValueError( "No variants were satisfiable - no valid recipes could be rendered." ) if need_cleanup: utils.rm_rf(recipe_dir) return rendered_metadata, index
def render_recipe(recipe_path, config, no_download_source=False, variants=None, permit_unsatisfiable_variants=True, reset_build_id=True, bypass_env_check=False): """Returns a list of tuples, each consisting of (metadata-object, needs_download, needs_render_in_env) You get one tuple per variant. Outputs are not factored in here (subpackages won't affect these results returned here.) """ arg = recipe_path # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True elif arg.endswith('.yaml'): recipe_dir = os.path.dirname(arg) need_cleanup = False else: print("Ignoring non-recipe: %s" % arg) return None, None else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir, config=config) except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) rendered_metadata = {} # important: set build id *before* downloading source. Otherwise source goes into a different # build folder. if config.set_build_id: m.config.compute_build_id(m.name(), reset=reset_build_id) # this source may go into a folder that doesn't match the eventual build folder. # There's no way around it AFAICT. We must download the source to be able to render # the recipe (from anything like GIT_FULL_HASH), but we can't know the final build # folder until rendering is complete, because package names can have variant jinja2 in them. if m.needs_source_for_render and (not os.path.isdir(m.config.work_dir) or len(os.listdir(m.config.work_dir)) == 0): try_download(m, no_download_source=no_download_source) if m.final: if not hasattr(m.config, 'variants'): m.config.variants = [m.config.variant] rendered_metadata = [ (m, False, False), ] else: index, index_ts = get_build_index(m.config.build_subdir, bldpkgs_dir=m.config.bldpkgs_dir, output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking, timeout=m.config.timeout) # when building, we don't want to fully expand all outputs into metadata, only expand # whatever variants we have. variants = (dict_of_lists_to_list_of_dicts(variants) if variants else get_package_variants(m)) rendered_metadata = distribute_variants( m, variants, permit_unsatisfiable_variants=permit_unsatisfiable_variants, allow_no_other_outputs=True, bypass_env_check=bypass_env_check) if need_cleanup: utils.rm_rf(recipe_dir) return rendered_metadata
def render_recipe(recipe_path, config, no_download_source=False, variants=None, permit_unsatisfiable_variants=True, reset_build_id=True): """Returns a list of tuples, each consisting of (metadata-object, needs_download, needs_render_in_env) You get one tuple per variant. Outputs are not factored in here (subpackages won't affect these results returned here.) """ arg = recipe_path # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True elif arg.endswith('.yaml'): recipe_dir = os.path.dirname(arg) need_cleanup = False else: print("Ignoring non-recipe: %s" % arg) return None, None else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir, config=config) except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) if config.set_build_id: m.config.compute_build_id(m.name(), reset=reset_build_id) if m.needs_source_for_render and (not os.path.isdir(m.config.work_dir) or len(os.listdir(m.config.work_dir)) == 0): try_download(m, no_download_source=no_download_source) rendered_metadata = {} if m.final: rendered_metadata = [ (m, False, False), ] else: index, index_ts = get_build_index(m.config, m.config.build_subdir) # when building, we don't want to fully expand all outputs into metadata, only expand # whatever variants we have. variants = (dict_of_lists_to_list_of_dicts(variants) if variants else get_package_variants(m)) rendered_metadata = distribute_variants( m, variants, permit_unsatisfiable_variants=permit_unsatisfiable_variants, stub_subpackages=True) if need_cleanup: utils.rm_rf(recipe_dir) return rendered_metadata