def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False): """Fully render a recipe. Fill in versions for build/host dependencies.""" if not parent_metadata: parent_metadata = m if m.skip(): m.final = True else: exclude_pattern = None excludes = set(m.config.variant.get('ignore_version', [])) for key in m.config.variant.get('pin_run_as_build', {}).keys(): if key in excludes: excludes.remove(key) output_excludes = set() if hasattr(m, 'other_outputs'): output_excludes = set(name for (name, variant) in m.other_outputs.keys()) if excludes or output_excludes: exclude_pattern = re.compile(r'|'.join( r'(?:^{}(?:\s|$|\Z))'.format(exc) for exc in excludes | output_excludes)) parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {}) # extract the topmost section where variables are defined, and put it on top of the # requirements for a particular output # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff output = parent_metadata.get_rendered_output(m.name(), variant=m.config.variant) is_top_level = True if output: if 'package' in output or 'name' not in output: # it's just a top-level recipe output = {'name': m.name()} else: is_top_level = False if not parent_recipe or parent_recipe['name'] == m.name(): combine_top_level_metadata_with_output(m, output) requirements = utils.expand_reqs(output.get('requirements', {})) m.meta['requirements'] = requirements if m.meta.get('requirements'): utils.insert_variant_versions(m.meta['requirements'], m.config.variant, 'build') utils.insert_variant_versions(m.meta['requirements'], m.config.variant, 'host') m = parent_metadata.get_output_metadata(m.get_rendered_output( m.name())) build_unsat, host_unsat = add_upstream_pins( m, permit_unsatisfiable_variants, exclude_pattern) # getting this AFTER add_upstream_pins is important, because that function adds deps # to the metadata. requirements = m.meta.get('requirements', {}) # here's where we pin run dependencies to their build time versions. This happens based # on the keys in the 'pin_run_as_build' key in the variant, which is a list of package # names to have this behavior. if output_excludes: exclude_pattern = re.compile(r'|'.join( r'(?:^{}(?:\s|$|\Z))'.format(exc) for exc in output_excludes)) pinning_env = 'host' if m.is_cross else 'build' build_reqs = requirements.get(pinning_env, []) # if python is in the build specs, but doesn't have a specific associated # version, make sure to add one if build_reqs and 'python' in build_reqs: build_reqs.append('python {}'.format(m.config.variant['python'])) m.meta['requirements'][pinning_env] = build_reqs full_build_deps, _, _ = get_env_dependencies( m, pinning_env, m.config.variant, exclude_pattern=exclude_pattern, permit_unsatisfiable_variants=permit_unsatisfiable_variants) full_build_dep_versions = { dep.split()[0]: " ".join(dep.split()[1:]) for dep in full_build_deps } if isfile(m.requirements_path) and not requirements.get('run'): requirements['run'] = specs_from_url(m.requirements_path) run_deps = requirements.get('run', []) versioned_run_deps = [ get_pin_from_build(m, dep, full_build_dep_versions) for dep in run_deps ] versioned_run_deps = [ utils.ensure_valid_spec(spec, warn=True) for spec in versioned_run_deps ] requirements[pinning_env] = full_build_deps requirements['run'] = versioned_run_deps m.meta['requirements'] = requirements # append other requirements, such as python.app, appropriately m.append_requirements() if m.pin_depends == 'strict': m.meta['requirements']['run'] = environ.get_pinned_deps(m, 'run') test_deps = m.get_value('test/requires') if test_deps: versioned_test_deps = list({ get_pin_from_build(m, dep, full_build_dep_versions) for dep in test_deps }) versioned_test_deps = [ utils.ensure_valid_spec(spec, warn=True) for spec in versioned_test_deps ] m.meta['test']['requires'] = versioned_test_deps extra = m.meta.get('extra', {}) extra['copy_test_source_files'] = m.config.copy_test_source_files m.meta['extra'] = extra # if source/path is relative, then the output package makes no sense at all. The next # best thing is to hard-code the absolute path. This probably won't exist on any # system other than the original build machine, but at least it will work there. if m.meta.get('source'): if 'path' in m.meta['source']: source_path = m.meta['source']['path'] os.path.expanduser(source_path) if not os.path.isabs(source_path): m.meta['source']['path'] = os.path.normpath( os.path.join(m.path, source_path)) elif ('git_url' in m.meta['source'] and not ( # absolute paths are not relative paths os.path.isabs(m.meta['source']['git_url']) or # real urls are not relative paths ":" in m.meta['source']['git_url'])): m.meta['source']['git_url'] = os.path.normpath( os.path.join(m.path, m.meta['source']['git_url'])) if not m.meta.get('build'): m.meta['build'] = {} _simplify_to_exact_constraints(m) if build_unsat or host_unsat: m.final = False log = utils.get_logger(__name__) log.warn( "Returning non-final recipe for {}; one or more dependencies " "was unsatisfiable:".format(m.dist())) if build_unsat: log.warn("Build: {}".format(build_unsat)) if host_unsat: log.warn("Host: {}".format(host_unsat)) else: m.final = True if is_top_level: parent_metadata = m return m
def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False): """Fully render a recipe. Fill in versions for build/host dependencies.""" if not parent_metadata: parent_metadata = m if m.skip(): rendered_metadata = m.copy() rendered_metadata.final = True else: exclude_pattern = None excludes = set(m.config.variant.get('ignore_version', [])) for key in m.config.variant.get('pin_run_as_build', {}).keys(): if key in excludes: excludes.remove(key) output_excludes = set() if hasattr(m, 'other_outputs'): output_excludes = set(name for (name, variant) in m.other_outputs.keys()) if excludes or output_excludes: exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc) for exc in excludes | output_excludes)) parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {}) # extract the topmost section where variables are defined, and put it on top of the # requirements for a particular output # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff parent_metadata = parent_metadata.copy() parent_metadata.config.variant = m.config.variant output = parent_metadata.get_rendered_output(m.name()) if output: if 'package' in output or 'name' not in output: # it's just a top-level recipe output = {'name': m.name()} if not parent_recipe or parent_recipe['name'] == m.name(): combine_top_level_metadata_with_output(m, output) requirements = utils.expand_reqs(output.get('requirements', {})) m.meta['requirements'] = requirements if m.meta.get('requirements'): utils.insert_variant_versions(m.meta['requirements'], m.config.variant, 'build') utils.insert_variant_versions(m.meta['requirements'], m.config.variant, 'host') m = parent_metadata.get_output_metadata(m.get_rendered_output(m.name())) build_unsat, host_unsat = add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern) # getting this AFTER add_upstream_pins is important, because that function adds deps # to the metadata. requirements = m.meta.get('requirements', {}) # this is hacky, but it gets the jinja2 things like pin_compatible from the rendered output # rerendered_output = parent_metadata.get_output_metadata(m.get_rendered_output(m.name())) # run_reqs = utils.expand_reqs(rerendered_output.meta.get('requirements', {})) # run_reqs = run_reqs.get('run', []) # if run_reqs: # requirements['run'] = run_reqs # m.meta['requirements'] = requirements # m.meta['build'] = rerendered_output.meta.get('build', {}) # here's where we pin run dependencies to their build time versions. This happens based # on the keys in the 'pin_run_as_build' key in the variant, which is a list of package # names to have this behavior. if output_excludes: exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc) for exc in output_excludes)) pinning_env = 'host' if m.is_cross else 'build' build_reqs = requirements.get(pinning_env, []) # if python is in the build specs, but doesn't have a specific associated # version, make sure to add one if build_reqs and 'python' in build_reqs: build_reqs.append('python {}'.format(m.config.variant['python'])) m.meta['requirements'][pinning_env] = build_reqs full_build_deps, _, _ = get_env_dependencies(m, pinning_env, m.config.variant, exclude_pattern=exclude_pattern, permit_unsatisfiable_variants=permit_unsatisfiable_variants) full_build_dep_versions = {dep.split()[0]: " ".join(dep.split()[1:]) for dep in full_build_deps} if isfile(m.requirements_path) and not requirements.get('run'): requirements['run'] = specs_from_url(m.requirements_path) run_deps = requirements.get('run', []) versioned_run_deps = [get_pin_from_build(m, dep, full_build_dep_versions) for dep in run_deps] versioned_run_deps = [utils.ensure_valid_spec(spec, warn=True) for spec in versioned_run_deps] requirements[pinning_env] = full_build_deps requirements['run'] = versioned_run_deps m.meta['requirements'] = requirements # append other requirements, such as python.app, appropriately m.append_requirements() if m.pin_depends == 'strict': m.meta['requirements']['run'] = environ.get_pinned_deps( m, 'run') test_deps = m.get_value('test/requires') if test_deps: versioned_test_deps = list({get_pin_from_build(m, dep, full_build_dep_versions) for dep in test_deps}) versioned_test_deps = [utils.ensure_valid_spec(spec, warn=True) for spec in versioned_test_deps] m.meta['test']['requires'] = versioned_test_deps extra = m.meta.get('extra', {}) extra['copy_test_source_files'] = m.config.copy_test_source_files m.meta['extra'] = extra # if source/path is relative, then the output package makes no sense at all. The next # best thing is to hard-code the absolute path. This probably won't exist on any # system other than the original build machine, but at least it will work there. if m.meta.get('source'): if 'path' in m.meta['source']: source_path = m.meta['source']['path'] os.path.expanduser(source_path) if not os.path.isabs(source_path): m.meta['source']['path'] = os.path.normpath( os.path.join(m.path, source_path)) elif ('git_url' in m.meta['source'] and not ( # absolute paths are not relative paths os.path.isabs(m.meta['source']['git_url']) or # real urls are not relative paths ":" in m.meta['source']['git_url'])): m.meta['source']['git_url'] = os.path.normpath( os.path.join(m.path, m.meta['source']['git_url'])) if not m.meta.get('build'): m.meta['build'] = {} _simplify_to_exact_constraints(m) if build_unsat or host_unsat: m.final = False log = utils.get_logger(__name__) log.warn("Returning non-final recipe for {}; one or more dependencies " "was unsatisfiable:".format(m.dist())) if build_unsat: log.warn("Build: {}".format(build_unsat)) if host_unsat: log.warn("Host: {}".format(host_unsat)) else: m.final = True return m