def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False): """Fully render a recipe. Fill in versions for build/host dependencies.""" if not parent_metadata: parent_metadata = m if m.skip(): m.final = True else: exclude_pattern = None excludes = set(m.config.variant.get('ignore_version', [])) for key in m.config.variant.get('pin_run_as_build', {}).keys(): if key in excludes: excludes.remove(key) output_excludes = set() if hasattr(m, 'other_outputs'): output_excludes = set(name for (name, variant) in m.other_outputs.keys()) if excludes or output_excludes: exclude_pattern = re.compile(r'|'.join( r'(?:^{}(?:\s|$|\Z))'.format(exc) for exc in excludes | output_excludes)) parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {}) # extract the topmost section where variables are defined, and put it on top of the # requirements for a particular output # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff output = parent_metadata.get_rendered_output(m.name(), variant=m.config.variant) is_top_level = True if output: if 'package' in output or 'name' not in output: # it's just a top-level recipe output = {'name': m.name()} else: is_top_level = False if not parent_recipe or parent_recipe['name'] == m.name(): combine_top_level_metadata_with_output(m, output) requirements = utils.expand_reqs(output.get('requirements', {})) m.meta['requirements'] = requirements if m.meta.get('requirements'): utils.insert_variant_versions(m.meta['requirements'], m.config.variant, 'build') utils.insert_variant_versions(m.meta['requirements'], m.config.variant, 'host') m = parent_metadata.get_output_metadata(m.get_rendered_output( m.name())) build_unsat, host_unsat = add_upstream_pins( m, permit_unsatisfiable_variants, exclude_pattern) # getting this AFTER add_upstream_pins is important, because that function adds deps # to the metadata. requirements = m.meta.get('requirements', {}) # here's where we pin run dependencies to their build time versions. This happens based # on the keys in the 'pin_run_as_build' key in the variant, which is a list of package # names to have this behavior. if output_excludes: exclude_pattern = re.compile(r'|'.join( r'(?:^{}(?:\s|$|\Z))'.format(exc) for exc in output_excludes)) pinning_env = 'host' if m.is_cross else 'build' build_reqs = requirements.get(pinning_env, []) # if python is in the build specs, but doesn't have a specific associated # version, make sure to add one if build_reqs and 'python' in build_reqs: build_reqs.append('python {}'.format(m.config.variant['python'])) m.meta['requirements'][pinning_env] = build_reqs full_build_deps, _, _ = get_env_dependencies( m, pinning_env, m.config.variant, exclude_pattern=exclude_pattern, permit_unsatisfiable_variants=permit_unsatisfiable_variants) full_build_dep_versions = { dep.split()[0]: " ".join(dep.split()[1:]) for dep in full_build_deps } if isfile(m.requirements_path) and not requirements.get('run'): requirements['run'] = specs_from_url(m.requirements_path) run_deps = requirements.get('run', []) versioned_run_deps = [ get_pin_from_build(m, dep, full_build_dep_versions) for dep in run_deps ] versioned_run_deps = [ utils.ensure_valid_spec(spec, warn=True) for spec in versioned_run_deps ] requirements[pinning_env] = full_build_deps requirements['run'] = versioned_run_deps m.meta['requirements'] = requirements # append other requirements, such as python.app, appropriately m.append_requirements() if m.pin_depends == 'strict': m.meta['requirements']['run'] = environ.get_pinned_deps(m, 'run') test_deps = m.get_value('test/requires') if test_deps: versioned_test_deps = list({ get_pin_from_build(m, dep, full_build_dep_versions) for dep in test_deps }) versioned_test_deps = [ utils.ensure_valid_spec(spec, warn=True) for spec in versioned_test_deps ] m.meta['test']['requires'] = versioned_test_deps extra = m.meta.get('extra', {}) extra['copy_test_source_files'] = m.config.copy_test_source_files m.meta['extra'] = extra # if source/path is relative, then the output package makes no sense at all. The next # best thing is to hard-code the absolute path. This probably won't exist on any # system other than the original build machine, but at least it will work there. if m.meta.get('source'): if 'path' in m.meta['source']: source_path = m.meta['source']['path'] os.path.expanduser(source_path) if not os.path.isabs(source_path): m.meta['source']['path'] = os.path.normpath( os.path.join(m.path, source_path)) elif ('git_url' in m.meta['source'] and not ( # absolute paths are not relative paths os.path.isabs(m.meta['source']['git_url']) or # real urls are not relative paths ":" in m.meta['source']['git_url'])): m.meta['source']['git_url'] = os.path.normpath( os.path.join(m.path, m.meta['source']['git_url'])) if not m.meta.get('build'): m.meta['build'] = {} _simplify_to_exact_constraints(m) if build_unsat or host_unsat: m.final = False log = utils.get_logger(__name__) log.warn( "Returning non-final recipe for {}; one or more dependencies " "was unsatisfiable:".format(m.dist())) if build_unsat: log.warn("Build: {}".format(build_unsat)) if host_unsat: log.warn("Host: {}".format(host_unsat)) else: m.final = True if is_top_level: parent_metadata = m return m
def finalize_metadata(m, permit_unsatisfiable_variants=False): """Fully render a recipe. Fill in versions for build/host dependencies.""" exclude_pattern = None excludes = set(m.config.variant.get('ignore_version', [])) for key in m.config.variant.get('pin_run_as_build', {}).keys(): if key in excludes: excludes.remove(key) output_excludes = set() if hasattr(m, 'other_outputs'): output_excludes = set(name for (name, variant) in m.other_outputs.keys()) if excludes or output_excludes: exclude_pattern = re.compile('|'.join('(?:^{}(?:\s|$|\Z))'.format(exc) for exc in excludes | output_excludes)) build_reqs = m.meta.get('requirements', {}).get('build', []) # if python is in the build specs, but doesn't have a specific associated # version, make sure to add one if build_reqs and 'python' in build_reqs: build_reqs.append('python {}'.format(m.config.variant['python'])) m.meta['requirements']['build'] = build_reqs # if we have host deps, they're more important than the build deps. build_deps, build_actions, build_unsat = get_env_dependencies( m, 'build', m.config.variant, exclude_pattern, permit_unsatisfiable_variants=permit_unsatisfiable_variants) extra_run_specs_from_build = get_upstream_pins(m, build_actions, 'build') # is there a 'host' section? if m.is_cross: host_reqs = m.get_value('requirements/host') # if python is in the build specs, but doesn't have a specific associated # version, make sure to add one if host_reqs: if 'python' in host_reqs: host_reqs.append('python {}'.format( m.config.variant['python'])) host_reqs.extend(extra_run_specs_from_build.get('strong', [])) m.meta['requirements']['host'] = [ utils.ensure_valid_spec(spec) for spec in host_reqs ] host_deps, host_actions, host_unsat = get_env_dependencies( m, 'host', m.config.variant, exclude_pattern, permit_unsatisfiable_variants=permit_unsatisfiable_variants) # extend host deps with strong build run exports. This is important for things like # vc feature activation to work correctly in the host env. extra_run_specs_from_host = get_upstream_pins(m, host_actions, 'host') extra_run_specs = set( extra_run_specs_from_host.get('strong', []) + extra_run_specs_from_host.get('weak', []) + extra_run_specs_from_build.get('strong', [])) else: m.config.build_prefix_override = not m.uses_new_style_compiler_activation host_deps = [] host_unsat = None extra_run_specs = (extra_run_specs_from_build.get('strong', []) + extra_run_specs_from_build.get('weak', [])) # here's where we pin run dependencies to their build time versions. This happens based # on the keys in the 'pin_run_as_build' key in the variant, which is a list of package # names to have this behavior. requirements = m.meta.get('requirements', {}) run_deps = requirements.get('run', []) if output_excludes: exclude_pattern = re.compile('|'.join('(?:^{}(?:\s|$|\Z))'.format(exc) for exc in output_excludes)) pinning_env = 'host' if m.is_cross else 'build' full_build_deps, _, _ = get_env_dependencies( m, pinning_env, m.config.variant, exclude_pattern=exclude_pattern, permit_unsatisfiable_variants=permit_unsatisfiable_variants) full_build_dep_versions = { dep.split()[0]: " ".join(dep.split()[1:]) for dep in full_build_deps } versioned_run_deps = [ get_pin_from_build(m, dep, full_build_dep_versions) for dep in run_deps ] versioned_run_deps.extend(extra_run_specs) versioned_run_deps = [ utils.ensure_valid_spec(spec, warn=True) for spec in versioned_run_deps ] for _env, values in (('build', build_deps), ('host', host_deps), ('run', versioned_run_deps)): if values: requirements[_env] = list({strip_channel(dep) for dep in values}) rendered_metadata = m.copy() rendered_metadata.meta['requirements'] = requirements if rendered_metadata.pin_depends == 'strict': rendered_metadata.meta['requirements'][ 'run'] = environ.get_pinned_deps(rendered_metadata, 'run') test_deps = rendered_metadata.get_value('test/requires') if test_deps: versioned_test_deps = list({ get_pin_from_build(m, dep, full_build_dep_versions) for dep in test_deps }) versioned_test_deps = [ utils.ensure_valid_spec(spec, warn=True) for spec in versioned_test_deps ] rendered_metadata.meta['test']['requires'] = versioned_test_deps rendered_metadata.meta['extra'][ 'copy_test_source_files'] = m.config.copy_test_source_files # if source/path is relative, then the output package makes no sense at all. The next # best thing is to hard-code the absolute path. This probably won't exist on any # system other than the original build machine, but at least it will work there. if m.meta.get('source'): if 'path' in m.meta['source'] and not os.path.isabs( m.meta['source']['path']): rendered_metadata.meta['source']['path'] = os.path.normpath( os.path.join(m.path, m.meta['source']['path'])) elif ('git_url' in m.meta['source'] and not ( # absolute paths are not relative paths os.path.isabs(m.meta['source']['git_url']) or # real urls are not relative paths ":" in m.meta['source']['git_url'])): rendered_metadata.meta['source']['git_url'] = os.path.normpath( os.path.join(m.path, m.meta['source']['git_url'])) if not rendered_metadata.meta.get('build'): rendered_metadata.meta['build'] = {} if build_unsat or host_unsat: rendered_metadata.final = False log = utils.get_logger(__name__) log.warn("Returning non-final recipe for {}; one or more dependencies " "was unsatisfiable:\nBuild: {}\nHost: {}".format( rendered_metadata.dist(), build_unsat, host_unsat)) else: rendered_metadata.final = True return rendered_metadata
def finalize_metadata(m, permit_unsatisfiable_variants=False): """Fully render a recipe. Fill in versions for build/host dependencies.""" exclude_pattern = None excludes = set(m.config.variant.get('ignore_version', [])) for key in m.config.variant.get('pin_run_as_build', {}).keys(): if key in excludes: excludes.remove(key) output_excludes = set() if hasattr(m, 'other_outputs'): output_excludes = set(name for (name, variant) in m.other_outputs.keys()) if excludes or output_excludes: exclude_pattern = re.compile('|'.join('(?:^{}(?:\s|$|\Z))'.format(exc) for exc in excludes | output_excludes)) # extract the topmost section where variables are defined, and put it on top of the # requirements for a particular output # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff extract_pattern = r'(.*)package:' template_string = '\n'.join(( m.get_recipe_text(extract_pattern=extract_pattern, force_top_level=True), # second item: the requirements text for this particular metadata # object (might be output) m.extract_requirements_text())) requirements = (yaml.safe_load( m._get_contents(permit_undefined_jinja=False, template_string=template_string)) or {}).get('requirements', {}) requirements = utils.expand_reqs(requirements) if isfile(m.requirements_path) and not requirements.get('run'): requirements['run'] = specs_from_url(m.requirements_path) rendered_metadata = m.copy() rendered_metadata.meta['requirements'] = requirements utils.insert_variant_versions(rendered_metadata.meta['requirements'], rendered_metadata.config.variant, 'build') utils.insert_variant_versions(rendered_metadata.meta['requirements'], rendered_metadata.config.variant, 'host') build_unsat, host_unsat = add_upstream_pins(rendered_metadata, permit_unsatisfiable_variants, exclude_pattern) # here's where we pin run dependencies to their build time versions. This happens based # on the keys in the 'pin_run_as_build' key in the variant, which is a list of package # names to have this behavior. if output_excludes: exclude_pattern = re.compile('|'.join('(?:^{}(?:\s|$|\Z))'.format(exc) for exc in output_excludes)) pinning_env = 'host' if m.is_cross else 'build' build_reqs = requirements.get(pinning_env, []) # if python is in the build specs, but doesn't have a specific associated # version, make sure to add one if build_reqs and 'python' in build_reqs: build_reqs.append('python {}'.format(m.config.variant['python'])) rendered_metadata.meta['requirements'][pinning_env] = build_reqs full_build_deps, _, _ = get_env_dependencies( rendered_metadata, pinning_env, rendered_metadata.config.variant, exclude_pattern=exclude_pattern, permit_unsatisfiable_variants=permit_unsatisfiable_variants) full_build_dep_versions = { dep.split()[0]: " ".join(dep.split()[1:]) for dep in full_build_deps } run_deps = rendered_metadata.meta.get('requirements', {}).get('run', []) versioned_run_deps = [ get_pin_from_build(rendered_metadata, dep, full_build_dep_versions) for dep in run_deps ] versioned_run_deps = [ utils.ensure_valid_spec(spec, warn=True) for spec in versioned_run_deps ] requirements = rendered_metadata.meta.get('requirements', {}) requirements['run'] = versioned_run_deps rendered_metadata.meta['requirements'] = requirements # append other requirements, such as python.app, appropriately rendered_metadata.append_requirements() if rendered_metadata.pin_depends == 'strict': rendered_metadata.meta['requirements'][ 'run'] = environ.get_pinned_deps(rendered_metadata, 'run') test_deps = rendered_metadata.get_value('test/requires') if test_deps: versioned_test_deps = list({ get_pin_from_build(m, dep, full_build_dep_versions) for dep in test_deps }) versioned_test_deps = [ utils.ensure_valid_spec(spec, warn=True) for spec in versioned_test_deps ] rendered_metadata.meta['test']['requires'] = versioned_test_deps rendered_metadata.meta['extra'][ 'copy_test_source_files'] = m.config.copy_test_source_files # if source/path is relative, then the output package makes no sense at all. The next # best thing is to hard-code the absolute path. This probably won't exist on any # system other than the original build machine, but at least it will work there. if m.meta.get('source'): if 'path' in m.meta['source'] and not os.path.isabs( m.meta['source']['path']): rendered_metadata.meta['source']['path'] = os.path.normpath( os.path.join(m.path, m.meta['source']['path'])) elif ('git_url' in m.meta['source'] and not ( # absolute paths are not relative paths os.path.isabs(m.meta['source']['git_url']) or # real urls are not relative paths ":" in m.meta['source']['git_url'])): rendered_metadata.meta['source']['git_url'] = os.path.normpath( os.path.join(m.path, m.meta['source']['git_url'])) if not rendered_metadata.meta.get('build'): rendered_metadata.meta['build'] = {} if build_unsat or host_unsat: rendered_metadata.final = False log = utils.get_logger(__name__) log.warn("Returning non-final recipe for {}; one or more dependencies " "was unsatisfiable:\nBuild: {}\nHost: {}".format( rendered_metadata.dist(), build_unsat, host_unsat)) else: rendered_metadata.final = True return rendered_metadata
def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False): """Fully render a recipe. Fill in versions for build/host dependencies.""" if not parent_metadata: parent_metadata = m if m.skip(): rendered_metadata = m.copy() rendered_metadata.final = True else: exclude_pattern = None excludes = set(m.config.variant.get('ignore_version', [])) for key in m.config.variant.get('pin_run_as_build', {}).keys(): if key in excludes: excludes.remove(key) output_excludes = set() if hasattr(m, 'other_outputs'): output_excludes = set(name for (name, variant) in m.other_outputs.keys()) if excludes or output_excludes: exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc) for exc in excludes | output_excludes)) parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {}) # extract the topmost section where variables are defined, and put it on top of the # requirements for a particular output # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff parent_metadata = parent_metadata.copy() parent_metadata.config.variant = m.config.variant output = parent_metadata.get_rendered_output(m.name()) if output: if 'package' in output or 'name' not in output: # it's just a top-level recipe output = {'name': m.name()} if not parent_recipe or parent_recipe['name'] == m.name(): combine_top_level_metadata_with_output(m, output) requirements = utils.expand_reqs(output.get('requirements', {})) m.meta['requirements'] = requirements if m.meta.get('requirements'): utils.insert_variant_versions(m.meta['requirements'], m.config.variant, 'build') utils.insert_variant_versions(m.meta['requirements'], m.config.variant, 'host') m = parent_metadata.get_output_metadata(m.get_rendered_output(m.name())) build_unsat, host_unsat = add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern) # getting this AFTER add_upstream_pins is important, because that function adds deps # to the metadata. requirements = m.meta.get('requirements', {}) # this is hacky, but it gets the jinja2 things like pin_compatible from the rendered output # rerendered_output = parent_metadata.get_output_metadata(m.get_rendered_output(m.name())) # run_reqs = utils.expand_reqs(rerendered_output.meta.get('requirements', {})) # run_reqs = run_reqs.get('run', []) # if run_reqs: # requirements['run'] = run_reqs # m.meta['requirements'] = requirements # m.meta['build'] = rerendered_output.meta.get('build', {}) # here's where we pin run dependencies to their build time versions. This happens based # on the keys in the 'pin_run_as_build' key in the variant, which is a list of package # names to have this behavior. if output_excludes: exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc) for exc in output_excludes)) pinning_env = 'host' if m.is_cross else 'build' build_reqs = requirements.get(pinning_env, []) # if python is in the build specs, but doesn't have a specific associated # version, make sure to add one if build_reqs and 'python' in build_reqs: build_reqs.append('python {}'.format(m.config.variant['python'])) m.meta['requirements'][pinning_env] = build_reqs full_build_deps, _, _ = get_env_dependencies(m, pinning_env, m.config.variant, exclude_pattern=exclude_pattern, permit_unsatisfiable_variants=permit_unsatisfiable_variants) full_build_dep_versions = {dep.split()[0]: " ".join(dep.split()[1:]) for dep in full_build_deps} if isfile(m.requirements_path) and not requirements.get('run'): requirements['run'] = specs_from_url(m.requirements_path) run_deps = requirements.get('run', []) versioned_run_deps = [get_pin_from_build(m, dep, full_build_dep_versions) for dep in run_deps] versioned_run_deps = [utils.ensure_valid_spec(spec, warn=True) for spec in versioned_run_deps] requirements[pinning_env] = full_build_deps requirements['run'] = versioned_run_deps m.meta['requirements'] = requirements # append other requirements, such as python.app, appropriately m.append_requirements() if m.pin_depends == 'strict': m.meta['requirements']['run'] = environ.get_pinned_deps( m, 'run') test_deps = m.get_value('test/requires') if test_deps: versioned_test_deps = list({get_pin_from_build(m, dep, full_build_dep_versions) for dep in test_deps}) versioned_test_deps = [utils.ensure_valid_spec(spec, warn=True) for spec in versioned_test_deps] m.meta['test']['requires'] = versioned_test_deps extra = m.meta.get('extra', {}) extra['copy_test_source_files'] = m.config.copy_test_source_files m.meta['extra'] = extra # if source/path is relative, then the output package makes no sense at all. The next # best thing is to hard-code the absolute path. This probably won't exist on any # system other than the original build machine, but at least it will work there. if m.meta.get('source'): if 'path' in m.meta['source']: source_path = m.meta['source']['path'] os.path.expanduser(source_path) if not os.path.isabs(source_path): m.meta['source']['path'] = os.path.normpath( os.path.join(m.path, source_path)) elif ('git_url' in m.meta['source'] and not ( # absolute paths are not relative paths os.path.isabs(m.meta['source']['git_url']) or # real urls are not relative paths ":" in m.meta['source']['git_url'])): m.meta['source']['git_url'] = os.path.normpath( os.path.join(m.path, m.meta['source']['git_url'])) if not m.meta.get('build'): m.meta['build'] = {} _simplify_to_exact_constraints(m) if build_unsat or host_unsat: m.final = False log = utils.get_logger(__name__) log.warn("Returning non-final recipe for {}; one or more dependencies " "was unsatisfiable:".format(m.dist())) if build_unsat: log.warn("Build: {}".format(build_unsat)) if host_unsat: log.warn("Host: {}".format(host_unsat)) else: m.final = True return m