def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False, allow_no_other_outputs=False, bypass_env_check=False): rendered_metadata = {} need_source_download = True # don't bother distributing python if it's a noarch package, and figure out # which python version we prefer. `python_age` can use used to tweak which # python gets used here. if metadata.noarch or metadata.noarch_python: from .conda_interface import VersionOrder age = int( metadata.get_value('build/noarch_python_build_age', metadata.config.noarch_python_build_age)) versions = [] for variant in variants: if 'python' in variant: vo = variant['python'] if vo not in versions: versions.append(vo) version_indices = sorted( range(len(versions)), key=lambda k: VersionOrder(versions[k].split(' ')[0])) if age < 0: age = 0 elif age > len(versions) - 1: age = len(versions) - 1 build_ver = versions[version_indices[len(versions) - 1 - age]] variants = filter_by_key_value(variants, 'python', build_ver, 'noarch_python_reduction') # store these for reference later metadata.config.variants = variants # These are always the full set. just 'variants' is the one that gets # used mostly, and can be reduced metadata.config.input_variants = variants recipe_requirements = metadata.extract_requirements_text() recipe_package_and_build_text = metadata.extract_package_and_build_text() recipe_text = recipe_package_and_build_text + recipe_requirements if PY3 and hasattr(recipe_text, 'decode'): recipe_text = recipe_text.decode() elif not PY3 and hasattr(recipe_text, 'encode'): recipe_text = recipe_text.encode() metadata.config.variant = variants[0] used_variables = metadata.get_used_loop_vars(force_global=False) top_loop = metadata.get_reduced_variant_set(used_variables) for variant in top_loop: from conda_build.build import get_all_replacements get_all_replacements(variant) mv = metadata.copy() mv.config.variant = variant pin_run_as_build = variant.get('pin_run_as_build', {}) if mv.numpy_xx and 'numpy' not in pin_run_as_build: pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'} conform_dict = {} for key in used_variables: # We use this variant in the top-level recipe. # constrain the stored variants to only this version in the output # variant mapping conform_dict[key] = variant[key] for key, values in conform_dict.items(): mv.config.variants = (filter_by_key_value( mv.config.variants, key, values, 'distribute_variants_reduction') or mv.config.variants) get_all_replacements(mv.config.variants) pin_run_as_build = variant.get('pin_run_as_build', {}) if mv.numpy_xx and 'numpy' not in pin_run_as_build: pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'} numpy_pinned_variants = [] for _variant in mv.config.variants: _variant['pin_run_as_build'] = pin_run_as_build numpy_pinned_variants.append(_variant) mv.config.variants = numpy_pinned_variants mv.config.squished_variants = list_of_dicts_to_dict_of_lists( mv.config.variants) if mv.needs_source_for_render and mv.variant_in_source: mv.parse_again() utils.rm_rf(mv.config.work_dir) source.provide(mv) mv.parse_again() try: mv.parse_until_resolved( allow_no_other_outputs=allow_no_other_outputs, bypass_env_check=bypass_env_check) except SystemExit: pass need_source_download = (not mv.needs_source_for_render or not mv.source_provided) rendered_metadata[(mv.dist(), mv.config.variant.get('target_platform', mv.config.subdir), tuple((var, mv.config.variant.get(var)) for var in mv.get_used_vars()))] = \ (mv, need_source_download, None) # list of tuples. # each tuple item is a tuple of 3 items: # metadata, need_download, need_reparse_in_env return list(rendered_metadata.values())
def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False, allow_no_other_outputs=False, bypass_env_check=False): rendered_metadata = {} need_source_download = True # don't bother distributing python if it's a noarch package if metadata.noarch or metadata.noarch_python: variants = filter_by_key_value(variants, 'python', variants[0]['python'], 'noarch_reduction') # store these for reference later metadata.config.variants = variants # These are always the full set. just 'variants' is the one that gets # used mostly, and can be reduced metadata.config.input_variants = variants recipe_requirements = metadata.extract_requirements_text() recipe_package_and_build_text = metadata.extract_package_and_build_text() recipe_text = recipe_package_and_build_text + recipe_requirements if PY3 and hasattr(recipe_text, 'decode'): recipe_text = recipe_text.decode() elif not PY3 and hasattr(recipe_text, 'encode'): recipe_text = recipe_text.encode() for variant in variants: mv = metadata.copy() mv.config.variant = variant used_variables = mv.get_used_loop_vars() conform_dict = {} for key in used_variables: # We use this variant in the top-level recipe. # constrain the stored variants to only this version in the output # variant mapping conform_dict[key] = variant[key] build_reqs = mv.meta.get('requirements', {}).get('build', []) host_reqs = mv.meta.get('requirements', {}).get('host', []) if 'python' in build_reqs or 'python' in host_reqs: conform_dict['python'] = variant['python'] if 'r-base' in build_reqs or 'r-base' in host_reqs: conform_dict['r_base'] = variant['r_base'] pin_run_as_build = variant.get('pin_run_as_build', {}) if mv.numpy_xx and 'numpy' not in pin_run_as_build: pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'} for key, values in conform_dict.items(): mv.config.variants = (filter_by_key_value( mv.config.variants, key, values, 'distribute_variants_reduction') or mv.config.variants) numpy_pinned_variants = [] for _variant in mv.config.variants: _variant['pin_run_as_build'] = pin_run_as_build numpy_pinned_variants.append(_variant) mv.config.variants = numpy_pinned_variants mv.config.squished_variants = list_of_dicts_to_dict_of_lists( mv.config.variants) if mv.needs_source_for_render and mv.variant_in_source: mv.parse_again() utils.rm_rf(mv.config.work_dir) source.provide(mv) mv.parse_again() mv.parse_until_resolved(allow_no_other_outputs=allow_no_other_outputs, bypass_env_check=bypass_env_check) need_source_download = (not mv.needs_source_for_render or not mv.source_provided) rendered_metadata[(mv.dist(), mv.config.variant.get('target_platform', mv.config.subdir), tuple((var, mv.config.variant[var]) for var in mv.get_used_vars()))] = \ (mv, need_source_download, None) # list of tuples. # each tuple item is a tuple of 3 items: # metadata, need_download, need_reparse_in_env return list(rendered_metadata.values())
def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False, allow_no_other_outputs=False, bypass_env_check=False): rendered_metadata = {} need_source_download = True # don't bother distributing python if it's a noarch package if metadata.noarch or metadata.noarch_python: variants = filter_by_key_value(variants, 'python', variants[0]['python'], 'noarch_reduction') # store these for reference later metadata.config.variants = variants # These are always the full set. just 'variants' is the one that gets # used mostly, and can be reduced metadata.config.input_variants = variants squished_variants = list_of_dicts_to_dict_of_lists(variants) recipe_requirements = metadata.extract_requirements_text() recipe_package_and_build_text = metadata.extract_package_and_build_text() recipe_text = recipe_package_and_build_text + recipe_requirements if PY3 and hasattr(recipe_text, 'decode'): recipe_text = recipe_text.decode() elif not PY3 and hasattr(recipe_text, 'encode'): recipe_text = recipe_text.encode() for variant in variants: mv = metadata.copy() # this determines which variants were used, and thus which ones should be locked for # future rendering mv.final = False mv.config.variant = {} mv.parse_again(permit_undefined_jinja=True, allow_no_other_outputs=True, bypass_env_check=True) vars_in_recipe = set(mv.undefined_jinja_vars) mv.config.variant = variant conform_dict = {} for key in vars_in_recipe: # We use this variant in the top-level recipe. # constrain the stored variants to only this version in the output # variant mapping if re.search(r"\s*\{\{\s*%s\s*(?:.*?)?\}\}" % key, recipe_text): if key in variant: conform_dict[key] = variant[key] conform_dict.update({ key: val for key, val in variant.items() if key in utils.ensure_list( mv.meta.get('requirements', {}).get('build', [])) + utils.ensure_list(mv.meta.get('requirements', {}).get('host', [])) }) compiler_matches = re.findall( r"\{\{\s*compiler\([\'\"](.*)[\'\"].*\)\s*\}\}", recipe_requirements) if compiler_matches: from conda_build.jinja_context import native_compiler for match in compiler_matches: compiler_key = '{}_compiler'.format(match) conform_dict[compiler_key] = variant.get( compiler_key, native_compiler(match, mv.config)) # target_platform is *always* a locked dimension, because top-level recipe is always # particular to a platform. conform_dict['target_platform'] = variant.get('target_platform', metadata.config.subdir) # handle grouping from zip_keys for everything in conform_dict if 'zip_keys' in variant: zip_key_groups = variant['zip_keys'] if zip_key_groups and not isinstance(zip_key_groups[0], list): zip_key_groups = [zip_key_groups] for key in list(conform_dict.keys()): zipped_keys = None for group in zip_key_groups: if key in group: zipped_keys = group if zipped_keys: # here we zip the values of the keys, so that we can match the combination zipped_values = list( zip(*[ squished_variants[key] for key in zipped_keys ])) variant_index = zipped_values.index( tuple(variant[key] for key in zipped_keys)) for zipped_key in zipped_keys: conform_dict[zipped_key] = squished_variants[ zipped_key][variant_index] build_reqs = mv.meta.get('requirements', {}).get('build', []) host_reqs = mv.meta.get('requirements', {}).get('host', []) if 'python' in build_reqs or 'python' in host_reqs: conform_dict['python'] = variant['python'] if 'r-base' in build_reqs or 'r-base' in host_reqs: conform_dict['r_base'] = variant['r_base'] pin_run_as_build = variant.get('pin_run_as_build', {}) if mv.numpy_xx and 'numpy' not in pin_run_as_build: pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'} for key, values in conform_dict.items(): mv.config.variants = (filter_by_key_value( mv.config.variants, key, values, 'distribute_variants_reduction') or mv.config.variants) numpy_pinned_variants = [] for _variant in mv.config.variants: _variant['pin_run_as_build'] = pin_run_as_build numpy_pinned_variants.append(_variant) mv.config.variants = numpy_pinned_variants mv.config.squished_variants = list_of_dicts_to_dict_of_lists( mv.config.variants) if mv.needs_source_for_render and mv.variant_in_source: mv.parse_again() utils.rm_rf(mv.config.work_dir) source.provide(mv) mv.parse_again() mv.parse_until_resolved(allow_no_other_outputs=allow_no_other_outputs, bypass_env_check=bypass_env_check) need_source_download = (not mv.needs_source_for_render or not mv.source_provided) # if python is in the build specs, but doesn't have a specific associated # version, make sure to add one to newly parsed 'requirements/build'. for env in ('build', 'host', 'run'): utils.insert_variant_versions(mv.meta.get('requirements', {}), mv.config.variant, env) fm = mv.copy() # HACK: trick conda-build into thinking this is final, and computing a hash based # on the current meta.yaml. The accuracy doesn't matter, all that matters is # our ability to differentiate configurations fm.final = True rendered_metadata[(fm.dist(), fm.config.variant.get('target_platform', fm.config.subdir), tuple((var, fm.config.variant[var]) for var in fm.get_used_loop_vars()))] = \ (mv, need_source_download, None) # list of tuples. # each tuple item is a tuple of 3 items: # metadata, need_download, need_reparse_in_env return list(rendered_metadata.values())
def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False, allow_no_other_outputs=False, bypass_env_check=False): rendered_metadata = {} need_source_download = True # don't bother distributing python if it's a noarch package if metadata.noarch or metadata.noarch_python: variants = filter_by_key_value(variants, 'python', variants[0]['python'], 'noarch_reduction') # store these for reference later metadata.config.variants = variants # These are always the full set. just 'variants' is the one that gets # used mostly, and can be reduced metadata.config.input_variants = variants recipe_requirements = metadata.extract_requirements_text() recipe_package_and_build_text = metadata.extract_package_and_build_text() recipe_text = recipe_package_and_build_text + recipe_requirements if PY3 and hasattr(recipe_text, 'decode'): recipe_text = recipe_text.decode() elif not PY3 and hasattr(recipe_text, 'encode'): recipe_text = recipe_text.encode() metadata.config.variant = variants[0] used_variables = metadata.get_used_loop_vars(force_global=False) top_loop = metadata.get_reduced_variant_set(used_variables) for variant in top_loop: mv = metadata.copy() mv.config.variant = variant pin_run_as_build = variant.get('pin_run_as_build', {}) if mv.numpy_xx and 'numpy' not in pin_run_as_build: pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'} conform_dict = {} for key in used_variables: # We use this variant in the top-level recipe. # constrain the stored variants to only this version in the output # variant mapping conform_dict[key] = variant[key] for key, values in conform_dict.items(): mv.config.variants = (filter_by_key_value(mv.config.variants, key, values, 'distribute_variants_reduction') or mv.config.variants) pin_run_as_build = variant.get('pin_run_as_build', {}) if mv.numpy_xx and 'numpy' not in pin_run_as_build: pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'} numpy_pinned_variants = [] for _variant in mv.config.variants: _variant['pin_run_as_build'] = pin_run_as_build numpy_pinned_variants.append(_variant) mv.config.variants = numpy_pinned_variants mv.config.squished_variants = list_of_dicts_to_dict_of_lists(mv.config.variants) if mv.needs_source_for_render and mv.variant_in_source: mv.parse_again() utils.rm_rf(mv.config.work_dir) source.provide(mv) mv.parse_again() try: mv.parse_until_resolved(allow_no_other_outputs=allow_no_other_outputs, bypass_env_check=bypass_env_check) except SystemExit: pass need_source_download = (not mv.needs_source_for_render or not mv.source_provided) rendered_metadata[(mv.dist(), mv.config.variant.get('target_platform', mv.config.subdir), tuple((var, mv.config.variant.get(var)) for var in mv.get_used_vars()))] = \ (mv, need_source_download, None) # list of tuples. # each tuple item is a tuple of 3 items: # metadata, need_download, need_reparse_in_env return list(rendered_metadata.values())