Beispiel #1
0
def create_py_files(m):
    tf, tf_exists = _create_test_files(m, '.py')

    # Ways in which we can mark imports as none python imports
    # 1. preface package name with r-, lua- or perl-
    # 2. use list of dicts for test/imports, and have lang set in those dicts
    pkg_name = m.name()
    likely_r_pkg = pkg_name.startswith('r-')
    likely_lua_pkg = pkg_name.startswith('lua-')
    likely_perl_pkg = pkg_name.startswith('perl-')
    likely_non_python_pkg = likely_r_pkg or likely_lua_pkg or likely_perl_pkg

    if likely_non_python_pkg:
        imports = ensure_list(m.get_value('test/imports', []))
        for import_item in imports:
            # add any imports specifically marked as python
            if (hasattr(import_item, 'keys') and 'lang' in import_item and
                    import_item['lang'] == 'python'):
                imports = import_item['imports']
                break
    else:
        imports = ensure_list(m.get_value('test/imports', []))
        imports = [item for item in imports if (not hasattr(item, 'keys') or
                                                'lang' in item and item['lang'] == 'python')]
    if imports:
        with open(tf, 'a+') as fo:
            for name in imports:
                fo.write('print("import: %r")\n' % name)
                fo.write('import %s\n' % name)
                fo.write('\n')
    return tf if (tf_exists or imports) else False
Beispiel #2
0
def create_pl_files(m):
    tf, tf_exists = _create_test_files(m, '.pl')
    imports = None
    if m.name().startswith('perl-'):
        imports = ensure_list(m.get_value('test/imports', []))
    else:
        for import_item in ensure_list(m.get_value('test/imports', [])):
            if (hasattr(import_item, 'keys') and 'lang' in import_item and
                    import_item['lang'] == 'perl'):
                imports = import_item['imports']
                break
    if tf or imports:
        with open(tf, 'a+') as fo:
            print(r'my $expected_version = "%s";' % m.version().rstrip('0'),
                    file=fo)
            if imports:
                for name in imports:
                    print(r'print("import: %s\n");' % name, file=fo)
                    print('use %s;\n' % name, file=fo)
                    # Don't try to print version for complex imports
                    if ' ' not in name:
                        print(("if (defined {0}->VERSION) {{\n" +
                                "\tmy $given_version = {0}->VERSION;\n" +
                                "\t$given_version =~ s/0+$//;\n" +
                                "\tdie('Expected version ' . $expected_version . ' but" +
                                " found ' . $given_version) unless ($expected_version " +
                                "eq $given_version);\n" +
                                "\tprint('\tusing version ' . {0}->VERSION . '\n');\n" +
                                "\n}}").format(name), file=fo)
    return tf if (tf_exists or imports) else False
def create_files(dir_path, m, config):
    """
    Create the test files for pkg in the directory given.  The resulting
    test files are configuration (i.e. platform, architecture, Python and
    numpy version, ...) independent.
    Return False, if the package has no tests (for any configuration), and
    True if it has.
    """
    has_files = False
    for fn in ensure_list(m.get_value('test/files', [])):
        has_files = True
        path = join(m.path, fn)
        copy_into(path, join(dir_path, fn), config.timeout)
    # need to re-download source in order to do tests
    if m.get_value('test/source_files') and not isdir(config.work_dir):
        source.provide(m.path, m.get_section('source'), config=config)
    for pattern in ensure_list(m.get_value('test/source_files', [])):
        if on_win and '\\' in pattern:
            raise RuntimeError("test/source_files paths must use / "
                                "as the path delimiter on Windows")
        has_files = True
        files = glob.glob(join(config.work_dir, pattern))
        if not files:
            raise RuntimeError("Did not find any source_files for test with pattern %s", pattern)
        for f in files:
            copy_into(f, f.replace(config.work_dir, config.test_dir), config.timeout)
        for ext in '.pyc', '.pyo':
            for f in get_ext_files(config.test_dir, ext):
                os.remove(f)
    return has_files
Beispiel #4
0
def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern):
    """Applies run_exports from any build deps to host and run sections"""
    # if we have host deps, they're more important than the build deps.
    requirements = m.meta.get('requirements', {})
    build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files(m, 'build',
                                                    permit_unsatisfiable_variants, exclude_pattern)

    # is there a 'host' section?
    if m.is_cross:
        # this must come before we read upstream pins, because it will enforce things
        #      like vc version from the compiler.
        host_reqs = utils.ensure_list(m.get_value('requirements/host'))
        # ensure host_reqs is present, so in-place modification below is actually in-place
        requirements = m.meta.setdefault('requirements', {})
        requirements['host'] = host_reqs

        if not host_reqs:
            matching_output = [out for out in m.meta.get('outputs', []) if
                               out.get('name') == m.name()]
            if matching_output:
                requirements = utils.expand_reqs(matching_output[0].get('requirements', {}))
                matching_output[0]['requirements'] = requirements
                host_reqs = requirements.setdefault('host', [])
        # in-place modification of above thingie
        host_reqs.extend(extra_run_specs_from_build.get('strong', []))

        host_deps, host_unsat, extra_run_specs_from_host = _read_upstream_pin_files(m, 'host',
                                                    permit_unsatisfiable_variants, exclude_pattern)
        extra_run_specs = set(extra_run_specs_from_host.get('strong', []) +
                              extra_run_specs_from_host.get('weak', []) +
                              extra_run_specs_from_build.get('strong', []))
    else:
        host_deps = []
        host_unsat = []
        extra_run_specs = set(extra_run_specs_from_build.get('strong', []))
        if m.build_is_host:
            extra_run_specs.update(extra_run_specs_from_build.get('weak', []))
            build_deps = set(build_deps or []).update(extra_run_specs_from_build.get('weak', []))
        else:
            host_deps = set(extra_run_specs_from_build.get('strong', []))

    run_deps = extra_run_specs | set(utils.ensure_list(requirements.get('run')))

    for section, deps in (('build', build_deps), ('host', host_deps), ('run', run_deps)):
        if deps:
            requirements[section] = list(deps)

    m.meta['requirements'] = requirements
    return build_unsat, host_unsat
Beispiel #5
0
def get_package_variants(recipedir_or_metadata, config=None, variants=None):
    if hasattr(recipedir_or_metadata, 'config'):
        config = recipedir_or_metadata.config
    if not config:
        from conda_build.config import Config
        config = Config()
    files = find_config_files(recipedir_or_metadata, ensure_list(config.variant_config_files),
                              ignore_system_config=config.ignore_system_variants,
                              exclusive_config_files=config.exclusive_config_files)

    specs = OrderedDict(internal_defaults=get_default_variant(config))

    for f in files:
        specs[f] = parse_config_file(f, config)

    # this is the override of the variants from files and args with values from CLI or env vars
    if hasattr(config, 'variant') and config.variant:
        specs['config.variant'] = config.variant
    if variants:
        specs['argument_variants'] = variants

    for f, spec in specs.items():
        try:
            validate_spec(spec)
        except ValueError as e:
            raise ValueError("Error in config {}: {}".format(f, str(e)))

    # this merges each of the specs, providing a debug message when a given setting is overridden
    #      by a later spec
    combined_spec = combine_specs(specs, log_output=config.verbose)

    extend_keys = set(ensure_list(combined_spec.get('extend_keys')))
    extend_keys.update({'zip_keys', 'extend_keys'})

    # delete the default specs, so that they don't unnecessarily limit the matrix
    specs = specs.copy()
    del specs['internal_defaults']

    combined_spec = dict_of_lists_to_list_of_dicts(combined_spec, extend_keys=extend_keys)
    for source, source_specs in reversed(specs.items()):
        for k, vs in source_specs.items():
            if k not in extend_keys:
                # when filtering ends up killing off all variants, we just ignore that.  Generally,
                #    this arises when a later variant config overrides, rather than selects a
                #    subspace of earlier configs
                combined_spec = (filter_by_key_value(combined_spec, k, vs, source_name=source) or
                                 combined_spec)
    return combined_spec
Beispiel #6
0
def execute_download_actions(m, actions, env, package_subset=None, require_files=False):
    index, _, _ = get_build_index(getattr(m.config, '{}_subdir'.format(env)), bldpkgs_dir=m.config.bldpkgs_dir,
                                  output_folder=m.config.output_folder, channel_urls=m.config.channel_urls,
                                  debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking,
                                  timeout=m.config.timeout)

    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly
    if 'FETCH' in actions or 'EXTRACT' in actions:
        # this is to force the download
        execute_actions(actions, index, verbose=m.config.debug)

    pkg_files = {}

    packages = actions.get('LINK', [])
    package_subset = utils.ensure_list(package_subset)
    selected_packages = set()
    if package_subset:
        for pkg in package_subset:
            if hasattr(pkg, 'name'):
                if pkg in packages:
                    selected_packages.add(pkg)
            else:
                pkg_name = pkg.split()[0]
                for link_pkg in packages:
                    if pkg_name == link_pkg.name:
                        selected_packages.add(link_pkg)
                        break
        packages = selected_packages

    for pkg in packages:
        if hasattr(pkg, 'dist_name'):
            pkg_dist = pkg.dist_name
        else:
            pkg = strip_channel(pkg)
            pkg_dist = pkg.split(' ')[0]
        pkg_loc = find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist, m, files_only=require_files)

        # ran through all pkgs_dirs, and did not find package or folder.  Download it.
        # TODO: this is a vile hack reaching into conda's internals. Replace with
        #    proper conda API when available.
        if not pkg_loc and conda_43:
            try:
                pkg_record = [_ for _ in index if _.dist_name == pkg_dist][0]
                # the conda 4.4 API uses a single `link_prefs` kwarg
                # whereas conda 4.3 used `index` and `link_dists` kwargs
                pfe = ProgressiveFetchExtract(link_prefs=(index[pkg_record],))
            except TypeError:
                # TypeError: __init__() got an unexpected keyword argument 'link_prefs'
                pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index)
            with utils.LoggingContext():
                pfe.execute()
            for pkg_dir in pkgs_dirs:
                _loc = os.path.join(pkg_dir, index[pkg].fn)
                if os.path.isfile(_loc):
                    pkg_loc = _loc
                    break
        pkg_files[pkg] = pkg_loc, pkg_dist

    return pkg_files
Beispiel #7
0
def _get_zip_dict_of_lists(combined_variant, list_of_strings):
    used_keys = [key for key in list_of_strings if key in combined_variant]
    out = {}

    if used_keys:
        dict_key = ",".join(list_of_strings)
        length = len(ensure_list(combined_variant[used_keys[0]]))
        for key in used_keys:
            if not len(ensure_list(combined_variant[key])) == length:
                raise ValueError("zip field {} length does not match zip field {} length.  All zip "
                                 "fields within a group must be the same length."
                                 .format(used_keys[0], key))
        values = list(zip(*[ensure_list(combined_variant[key]) for key in used_keys]))
        values = [','.join(value) for value in values]
        out = {dict_key: values}
    return out
Beispiel #8
0
def get_package_variants(recipedir_or_metadata, config=None):
    if hasattr(recipedir_or_metadata, 'config'):
        config = recipedir_or_metadata.config
    if not config:
        from conda_build.config import Config
        config = Config()
    files = find_config_files(recipedir_or_metadata, ensure_list(config.variant_config_files),
                              ignore_system_config=config.ignore_system_variants)

    specs = get_default_variants(config.platform) + [parse_config_file(f, config) for f in files]

    # this is the override of the variants from files and args with values from CLI or env vars
    if config.variant:
        combined_spec, extend_keys = combine_specs(specs + [config.variant])
    else:
        # this tweaks behavior from clobbering to appending/extending
        combined_spec, extend_keys = combine_specs(specs)

    # clobber the variant with anything in the config (stuff set via CLI flags or env vars)
    for k, v in config.variant.items():
        if k in extend_keys:
            if hasattr(combined_spec[k], 'keys'):
                combined_spec[k].update(v)
            else:
                combined_spec[k].extend(v)
        else:
            combined_spec[k] = [v]

    validate_variant(combined_spec)
    return dict_of_lists_to_list_of_dicts(combined_spec, config.platform)
Beispiel #9
0
def create_py_files(dir_path, m):
    has_tests = False
    with open(join(dir_path, 'run_test.py'), 'w') as fo:
        fo.write("# tests for %s (this is a generated file)\n" % m.dist())
        fo.write(header + '\n')
        fo.write("print('===== testing package: %s =====')\n" % m.dist())

        for name in ensure_list(m.get_value('test/imports', [])):
            fo.write('print("import: %r")\n' % name)
            fo.write('import %s\n' % name)
            fo.write('\n')
            has_tests = True

        try:
            with open(join(m.path, 'run_test.py')) as fi:
                fo.write("print('running run_test.py')\n")
                fo.write("# --- run_test.py (begin) ---\n")
                fo.write(fi.read())
                fo.write("# --- run_test.py (end) ---\n")
            has_tests = True
        except IOError:
            fo.write("# no run_test.py exists for this package\n")
        except AttributeError:
            fo.write("# tests were not packaged with this module, and cannot be run\n")
        fo.write("\nprint('===== %s OK =====')\n" % m.dist())

    return has_tests
Beispiel #10
0
def dict_of_lists_to_list_of_dicts(dict_of_lists, extend_keys=None):
    # http://stackoverflow.com/a/5228294/1170370
    # end result is a collection of dicts, like [{'python': 2.7, 'numpy': 1.11},
    #                                            {'python': 3.5, 'numpy': 1.11}]
    dicts = []
    pass_through_keys = (['extend_keys', 'zip_keys'] + list(ensure_list(extend_keys)) +
                         list(_get_zip_key_set(dict_of_lists)))
    dimensions = {k: v for k, v in dict_of_lists.items() if k not in pass_through_keys}
    # here's where we add in the zipped dimensions.  Zipped stuff is concatenated strings, to avoid
    #      being distributed in the product.
    for group in _get_zip_groups(dict_of_lists):
        dimensions.update(group)

    # in case selectors nullify any groups - or else zip reduces whole set to nil
    trim_empty_keys(dimensions)

    for x in product(*dimensions.values()):
        remapped = dict(six.moves.zip(dimensions, x))
        for col in pass_through_keys:
            v = dict_of_lists.get(col)
            if v:
                remapped[col] = v
        # split out zipped keys
        for k, v in remapped.copy().items():
            if isinstance(k, string_types) and isinstance(v, string_types):
                keys = k.split('#')
                values = v.split('#')
                for (_k, _v) in zip(keys, values):
                    remapped[_k] = _v
                if '#' in k:
                    del remapped[k]
        dicts.append(remapped)
    return dicts
Beispiel #11
0
def build_string_from_metadata(metadata):
    res = []
    version_pat = re.compile(r'(?:==)?(\d+)\.(\d+)')
    for name, s in (('numpy', 'np'), ('python', 'py'),
                    ('perl', 'pl'), ('lua', 'lua'),
                    ('r', 'r'), ('r-base', 'r')):
        for ms in metadata.ms_depends():
            if ms.name == name:
                try:
                    v = ms.spec.split()[1]
                except IndexError:
                    if name not in ['numpy']:
                        res.append(s)
                    break
                if any(i in v for i in ',|>!<'):
                    break
                if name not in ['perl', 'lua', 'r', 'r-base']:
                    match = version_pat.match(v)
                    if match:
                        res.append(s + match.group(1) + match.group(2))
                else:
                    res.append(s + v.strip('*'))
                break

    features = ensure_list(metadata.get_value('build/features', []))
    if res:
        res.append('_')
    if features:
        res.extend(('_'.join(features), '_'))
    res.append('{0}'.format(metadata.build_number() if metadata.build_number() else 0))
    return "".join(res)
Beispiel #12
0
def provide(metadata):
    """
    given a recipe_dir:
      - download (if necessary)
      - unpack
      - apply patches (if any)
    """
    meta = metadata.get_section('source')
    if not os.path.isdir(metadata.config.build_folder):
        os.makedirs(metadata.config.build_folder)
    git = None

    if hasattr(meta, 'keys'):
        dicts = [meta]
    else:
        dicts = meta

    try:
        for source_dict in dicts:
            folder = source_dict.get('folder')
            src_dir = (os.path.join(metadata.config.work_dir, folder) if folder else
                    metadata.config.work_dir)
            if any(k in source_dict for k in ('fn', 'url')):
                unpack(source_dict, src_dir, metadata.config.src_cache, recipe_path=metadata.path,
                    croot=metadata.config.croot, verbose=metadata.config.verbose,
                    timeout=metadata.config.timeout, locking=metadata.config.locking)
            elif 'git_url' in source_dict:
                git = git_source(source_dict, metadata.config.git_cache, src_dir, metadata.path,
                                verbose=metadata.config.verbose)
            # build to make sure we have a work directory with source in it. We
            #    want to make sure that whatever version that is does not
            #    interfere with the test we run next.
            elif 'hg_url' in source_dict:
                hg_source(source_dict, src_dir, metadata.config.hg_cache,
                        verbose=metadata.config.verbose)
            elif 'svn_url' in source_dict:
                svn_source(source_dict, src_dir, metadata.config.svn_cache,
                        verbose=metadata.config.verbose, timeout=metadata.config.timeout,
                        locking=metadata.config.locking)
            elif 'path' in source_dict:
                path = normpath(abspath(join(metadata.path, source_dict['path'])))
                if metadata.config.verbose:
                    print("Copying %s to %s" % (path, src_dir))
                # careful here: we set test path to be outside of conda-build root in setup.cfg.
                #    If you don't do that, this is a recursive function
                copy_into(path, src_dir, metadata.config.timeout, symlinks=True,
                        locking=metadata.config.locking, clobber=True)
            else:  # no source
                if not isdir(src_dir):
                    os.makedirs(src_dir)

            patches = ensure_list(source_dict.get('patches', []))
            for patch in patches:
                apply_patch(src_dir, join(metadata.path, patch), metadata.config, git)

    except CalledProcessError:
        os.rename(metadata.config.work_dir, metadata.config.work_dir + '_failed_provide')
        raise

    return metadata.config.work_dir
Beispiel #13
0
def create_shell_files(m):
    has_tests = False
    ext = '.bat' if sys.platform == 'win32' else '.sh'
    name = 'no-file'

    # the way this works is that each output needs to explicitly define a test script to run.
    #   They do not automatically pick up run_test.*, but can be pointed at that explicitly.
    for out in m.meta.get('outputs', []):
        if m.name() == out.get('name'):
            out_test_script = out.get('test', {}).get('script', 'no-file')
            if os.path.splitext(out_test_script)[1].lower() == ext:
                name = out_test_script
                break
    else:
        name = "run_test{}".format(ext)

    if exists(join(m.path, name)):
        copy_into(join(m.path, name), m.config.test_dir, m.config.timeout, locking=m.config.locking)
        has_tests = True

    commands = ensure_list(m.get_value('test/commands', []))
    if commands:
        with open(join(m.config.test_dir, name), 'a') as f:
            f.write('\n\n')
            for cmd in commands:
                f.write(cmd)
                f.write('\n')
                if sys.platform == 'win32':
                    f.write("if errorlevel 1 exit 1\n")
                has_tests = True

    return has_tests
Beispiel #14
0
def _simplify_to_exact_constraints(metadata):
    """
    For metapackages that are pinned exactly, we want to bypass all dependencies that may
    be less exact.
    """
    requirements = metadata.meta.get('requirements', {})
    # collect deps on a per-section basis
    for section in 'build', 'host', 'run':
        deps = utils.ensure_list(requirements.get(section, []))
        deps_dict = defaultdict(list)
        for dep in deps:
            spec_parts = utils.ensure_valid_spec(dep).split()
            name = spec_parts[0]
            if len(spec_parts) > 1:
                deps_dict[name].append(spec_parts[1:])
            else:
                deps_dict[name].append([])

        deps_list = []
        for name, values in deps_dict.items():
            exact_pins = [dep for dep in values if len(dep) > 1]
            if len(values) == 1 and not any(values):
                deps_list.append(name)
            elif exact_pins:
                if not all(pin == exact_pins[0] for pin in exact_pins):
                    raise ValueError("Conflicting exact pins: {}".format(exact_pins))
                else:
                    deps_list.append(' '.join([name] + exact_pins[0]))
            else:
                deps_list.extend(' '.join([name] + dep) for dep in values if dep)
        if section in requirements and deps_list:
            requirements[section] = deps_list
    metadata.meta['requirements'] = requirements
Beispiel #15
0
def inspect_objects(packages, prefix=sys.prefix, groupby='package'):
    installed = _installed(prefix)

    output_string = ""
    for pkg in ensure_list(packages):
        if pkg == untracked_package:
            dist = untracked_package
        elif pkg not in installed:
            raise ValueError("Package %s is not installed in %s" % (pkg, prefix))
        else:
            dist = installed[pkg]

        output_string += _underlined_text(pkg)

        if not sys.platform.startswith('darwin'):
            sys.exit("Error: conda inspect objects is only implemented in OS X")

        if dist == untracked_package:
            obj_files = get_untracked_obj_files(prefix)
        else:
            obj_files = get_package_obj_files(dist, prefix)

        info = []
        for f in obj_files:
            f_info = {}
            path = join(prefix, f)
            f_info['filetype'] = human_filetype(path)
            f_info['rpath'] = ':'.join(get_rpaths(path))
            f_info['filename'] = f
            info.append(f_info)

        output_string += print_object_info(info, groupby)
    if hasattr(output_string, 'decode'):
        output_string = output_string.decode('utf-8')
    return output_string
Beispiel #16
0
def create_files(m, test_dir=None):
    """
    Create the test files for pkg in the directory given.  The resulting
    test files are configuration (i.e. platform, architecture, Python and
    numpy version, ...) independent.
    Return False, if the package has no tests (for any configuration), and
    True if it has.
    """
    if not test_dir:
        test_dir = m.config.test_dir
    has_files = False
    if not os.path.isdir(test_dir):
        os.makedirs(test_dir)

    recipe_dir = m.path or m.meta.get('extra', {}).get('parent_recipe', {}).get('path')

    for fn in ensure_list(m.get_value('test/files', [])):
        has_files = True
        path = join(recipe_dir, fn)
        if not os.path.isdir(os.path.dirname(path)):
            os.makedirs(os.path.dirname(path))
        # disable locking to avoid locking a temporary directory (the extracted test folder)
        copy_into(path, join(test_dir, fn), m.config.timeout, locking=False,
                  clobber=True)
    return has_files
def create_shell_files(dir_path, m, config):
    has_tests = False
    ext = '.bat' if sys.platform == 'win32' else '.sh'
    name = 'no-file'

    for out in m.meta.get('outputs', []):
        if m.name() == out['name']:
            out_test_script = out.get('test', {}).get('script', 'no-file')
            if os.path.splitext(out_test_script)[1].lower() == ext:
                name = out_test_script
                break
    else:
        name = "run_test{}".format(ext)

    if exists(join(m.path, name)):
        copy_into(join(m.path, name), dir_path, config.timeout)
        has_tests = True

    with open(join(dir_path, name), 'a') as f:
        f.write('\n\n')
        for cmd in ensure_list(m.get_value('test/commands', [])):
            f.write(cmd)
            f.write('\n')
            if sys.platform == 'win32':
                f.write("if errorlevel 1 exit 1\n")
            has_tests = True

    return has_tests
def create_py_files(dir_path, m):
    has_tests = False
    with open(join(dir_path, 'run_test.py'), 'w') as fo:
        fo.write("# tests for %s (this is a generated file)\n" % m.dist())
        fo.write(header + '\n')
        fo.write("print('===== testing package: %s =====')\n" % m.dist())

        for name in ensure_list(m.get_value('test/imports', [])):
            fo.write('print("import: %r")\n' % name)
            fo.write('import %s\n' % name)
            fo.write('\n')
            has_tests = True

        try:
            name = 'run_test.py'
            for out in m.meta.get('outputs', []):
                if m.name() == out['name']:
                    out_test_script = out.get('test', {}).get('script', 'no-file')
                    name = out_test_script if out_test_script.endswith('.py') else 'no-file'

            with open(join(m.path, name)) as fi:
                fo.write("print('running run_test.py')\n")
                fo.write("# --- run_test.py (begin) ---\n")
                fo.write(fi.read())
                fo.write("# --- run_test.py (end) ---\n")
            has_tests = True
        except IOError:
            fo.write("# no run_test.py exists for this package\n")
        except AttributeError:
            fo.write("# tests were not packaged with this module, and cannot be run\n")
        fo.write("\nprint('===== %s OK =====')\n" % m.dist())

    return has_tests
Beispiel #19
0
def create_lua_files(m):
    tf, tf_exists = _create_test_files(m, '.lua')
    imports = None
    if m.name().startswith('lua-'):
        imports = ensure_list(m.get_value('test/imports', []))
    else:
        for import_item in ensure_list(m.get_value('test/imports', [])):
            if (hasattr(import_item, 'keys') and 'lang' in import_item and
                    import_item['lang'] == 'lua'):
                imports = import_item['imports']
                break
    if imports:
        with open(tf, 'a+') as fo:
            for name in imports:
                print(r'print("require \"%s\"\n");' % name, file=fo)
                print('require "%s"\n' % name, file=fo)
    return tf if (tf_exists or imports) else False
Beispiel #20
0
 def always_include_files(self):
     files = ensure_list(self.get_value('build/always_include_files', []))
     if any('\\' in i for i in files):
         raise RuntimeError("build/always_include_files paths must use / "
                            "as the path delimiter on Windows")
     if on_win:
         files = [f.replace("/", "\\") for f in files]
     return files
Beispiel #21
0
 def always_include_files(self):
     files = ensure_list(self.get_value('build/always_include_files', []))
     if any('\\' in i for i in files):
         raise RuntimeError("build/always_include_files paths must use / "
                             "as the path delimiter on Windows")
     if on_win:
         files = [f.replace("/", "\\") for f in files]
     return files
Beispiel #22
0
def create_lua_files(m):
    tf, tf_exists = _create_test_files(m, '.lua')
    imports = None
    if m.name().startswith('lua-'):
        imports = ensure_list(m.get_value('test/imports', []))
    else:
        for import_item in ensure_list(m.get_value('test/imports', [])):
            if (hasattr(import_item, 'keys') and 'lang' in import_item
                    and import_item['lang'] == 'lua'):
                imports = import_item['imports']
                break
    if imports:
        with open(tf, 'a+') as fo:
            for name in imports:
                print(r'print("require \"%s\"\n");' % name, file=fo)
                print('require "%s"\n' % name, file=fo)
    return tf if (tf_exists or imports) else False
Beispiel #23
0
def get_env_dependencies(m,
                         env,
                         variant,
                         exclude_pattern=None,
                         permit_unsatisfiable_variants=False,
                         merge_build_host_on_same_platform=True):
    specs = m.get_depends_top_and_out(env)
    # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x
    if env in ('build', 'host'):
        no_xx_specs = []
        for spec in specs:
            if ' x.x' in spec:
                pkg_name = spec.split()[0]
                no_xx_specs.append(' '.join(
                    (pkg_name, variant.get(pkg_name, ""))))
            else:
                no_xx_specs.append(spec)
        specs = no_xx_specs

    subpackages, dependencies, pass_through_deps = _categorize_deps(
        m, specs, exclude_pattern, variant)

    dependencies = set(dependencies)
    unsat = None
    random_string = ''.join(
        random.choice(string.ascii_uppercase + string.digits)
        for _ in range(10))
    with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir:
        try:
            actions = environ.get_install_actions(
                tmpdir,
                tuple(dependencies),
                env,
                subdir=getattr(m.config, '{}_subdir'.format(env)),
                debug=m.config.debug,
                verbose=m.config.verbose,
                locking=m.config.locking,
                bldpkgs_dirs=tuple(m.config.bldpkgs_dirs),
                timeout=m.config.timeout,
                disable_pip=m.config.disable_pip,
                max_env_retry=m.config.max_env_retry,
                output_folder=m.config.output_folder,
                channel_urls=tuple(m.config.channel_urls))
        except (UnsatisfiableError, DependencyNeedsBuildingError) as e:
            # we'll get here if the environment is unsatisfiable
            if hasattr(e, 'packages'):
                unsat = ', '.join(e.packages)
            else:
                unsat = e.message
            if permit_unsatisfiable_variants:
                actions = {}
            else:
                raise

    specs = actions_to_pins(actions)
    return (utils.ensure_list(
        (specs + subpackages + pass_through_deps)
        or m.meta.get('requirements', {}).get(env, [])), actions, unsat)
Beispiel #24
0
def create_files(m):
    """
    Create the test files for pkg in the directory given.  The resulting
    test files are configuration (i.e. platform, architecture, Python and
    numpy version, ...) independent.
    Return False, if the package has no tests (for any configuration), and
    True if it has.
    """
    has_files = False
    rm_rf(m.config.test_dir)
    for fn in ensure_list(m.get_value('test/files', [])):
        has_files = True
        path = join(m.path, fn)
        copy_into(path,
                  join(m.config.test_dir, fn),
                  m.config.timeout,
                  locking=m.config.locking,
                  clobber=True)
    # need to re-download source in order to do tests
    if m.get_value('test/source_files') and not isdir(m.config.work_dir):
        source.provide(m)
    for pattern in ensure_list(m.get_value('test/source_files', [])):
        if on_win and '\\' in pattern:
            raise RuntimeError("test/source_files paths must use / "
                               "as the path delimiter on Windows")
        has_files = True
        files = glob.glob(join(m.config.work_dir, pattern))
        if not files:
            raise RuntimeError(
                "Did not find any source_files for test with pattern %s",
                pattern)
        for f in files:
            try:
                copy_into(f,
                          f.replace(m.config.work_dir, m.config.test_dir),
                          m.config.timeout,
                          locking=m.config.locking)
            except OSError as e:
                log = logging.getLogger(__name__)
                log.warn("Failed to copy {0} into test files.  Error was: {1}".
                         format(f, str(e)))
        for ext in '.pyc', '.pyo':
            for f in get_ext_files(m.config.test_dir, ext):
                os.remove(f)
    return has_files
Beispiel #25
0
 def binary_has_prefix_files(self):
     ret = ensure_list(self.get_value('build/binary_has_prefix_files', []))
     if not isinstance(ret, list):
         raise RuntimeError('build/binary_has_prefix_files should be a list of paths')
     if sys.platform == 'win32':
         if any('\\' in i for i in ret):
             raise RuntimeError("build/binary_has_prefix_files paths must use / "
                                "as the path delimiter on Windows")
     return ret
Beispiel #26
0
def get_vars(variants, loop_only=False):
    """For purposes of naming/identifying, provide a way of identifying which variables contribute
    to the matrix dimensionality"""
    special_keys = {'pin_run_as_build', 'zip_keys', 'ignore_version'}
    special_keys.update(set(ensure_list(variants[0].get('extend_keys'))))
    loop_vars = [k for k in variants[0] if k not in special_keys and
                (not loop_only or
                any(variant[k] != variants[0][k] for variant in variants[1:]))]
    return loop_vars
Beispiel #27
0
def get_vars(variants, loop_only=False):
    """For purposes of naming/identifying, provide a way of identifying which variables contribute
    to the matrix dimensionality"""
    special_keys = {'pin_run_as_build', 'zip_keys', 'ignore_version'}
    special_keys.update(set(ensure_list(variants[0].get('extend_keys'))))
    loop_vars = [k for k in variants[0] if k not in special_keys and
                (not loop_only or
                any(variant[k] != variants[0][k] for variant in variants[1:]))]
    return loop_vars
Beispiel #28
0
 def binary_has_prefix_files(self):
     ret = ensure_list(self.get_value('build/binary_has_prefix_files', []))
     if not isinstance(ret, list):
         raise RuntimeError('build/binary_has_prefix_files should be a list of paths')
     if sys.platform == 'win32':
         if any('\\' in i for i in ret):
             raise RuntimeError("build/binary_has_prefix_files paths must use / "
                                "as the path delimiter on Windows")
     return expand_globs(ret, self.config.build_prefix)
Beispiel #29
0
def _get_zip_dict_of_lists(combined_variant, list_of_strings):
    used_keys = [key for key in list_of_strings if key in combined_variant]
    out = {}

    if used_keys:
        # The join value needs to be selected as something
        # that will not likely appear in any key or value.
        dict_key = "#".join(list_of_strings)
        length = len(ensure_list(combined_variant[used_keys[0]]))
        for key in used_keys:
            if not len(ensure_list(combined_variant[key])) == length:
                raise ValueError("zip field {} length does not match zip field {} length.  All zip "
                                 "fields within a group must be the same length."
                                 .format(used_keys[0], key))
        values = list(zip(*[ensure_list(combined_variant[key]) for key in used_keys]))
        values = ['#'.join(value) for value in values]
        out = {dict_key: values}
    return out
Beispiel #30
0
def _get_zip_dict_of_lists(combined_variant, list_of_strings):
    used_keys = [key for key in list_of_strings if key in combined_variant]
    out = {}

    if used_keys:
        dict_key = ",".join(list_of_strings)
        length = len(ensure_list(combined_variant[used_keys[0]]))
        for key in used_keys:
            if not len(ensure_list(combined_variant[key])) == length:
                raise ValueError(
                    "zip field {} length does not match zip field {} length.  All zip "
                    "fields within a group must be the same length.".format(
                        used_keys[0], key))
        values = list(
            zip(*[ensure_list(combined_variant[key]) for key in used_keys]))
        values = [','.join(value) for value in values]
        out = {dict_key: values}
    return out
Beispiel #31
0
def provide(metadata, patch=True):
    """
    given a recipe_dir:
      - download (if necessary)
      - unpack
      - apply patches (if any)
    """
    meta = metadata.get_section('source')
    if not os.path.isdir(metadata.config.build_folder):
        os.makedirs(metadata.config.build_folder)
    git = None

    meta = metadata.get_section('source')

    if hasattr(meta, 'keys'):
        dicts = [meta]
    else:
        dicts = meta

    for source_dict in dicts:
        folder = source_dict.get('folder')
        src_dir = (os.path.join(metadata.config.work_dir, folder) if folder else
                   metadata.config.work_dir)
        if any(k in source_dict for k in ('fn', 'url')):
            unpack(source_dict, src_dir, metadata.config.src_cache, recipe_path=metadata.path,
                   croot=metadata.config.croot, verbose=metadata.config.verbose,
                   timeout=metadata.config.timeout, locking=metadata.config.locking)
        elif 'git_url' in source_dict:
            git = git_source(source_dict, metadata.config.git_cache, src_dir, metadata.path,
                             verbose=metadata.config.verbose)
        # build to make sure we have a work directory with source in it.  We want to make sure that
        #    whatever version that is does not interfere with the test we run next.
        elif 'hg_url' in source_dict:
            hg_source(source_dict, src_dir, metadata.config.hg_cache,
                      verbose=metadata.config.verbose)
        elif 'svn_url' in source_dict:
            svn_source(source_dict, src_dir, metadata.config.svn_cache,
                       verbose=metadata.config.verbose, timeout=metadata.config.timeout,
                       locking=metadata.config.locking)
        elif 'path' in source_dict:
            path = normpath(abspath(join(metadata.path, metadata.get_value('source/path'))))
            if metadata.config.verbose:
                print("Copying %s to %s" % (path, src_dir))
            # careful here: we set test path to be outside of conda-build root in setup.cfg.
            #    If you don't do that, this is a recursive function
            copy_into(path, src_dir, metadata.config.timeout, symlinks=True,
                    locking=metadata.config.locking, clobber=True)
        else:  # no source
            if not isdir(src_dir):
                os.makedirs(src_dir)

        if patch:
            patches = ensure_list(source_dict.get('patches', []))
            for patch in patches:
                apply_patch(src_dir, join(metadata.path, patch), metadata.config, git)

    return metadata.config.work_dir
Beispiel #32
0
def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern):
    """Applies run_exports from any build deps to host and run sections"""
    # if we have host deps, they're more important than the build deps.
    requirements = m.meta.get('requirements', {})
    build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files(m, 'build',
                                                    permit_unsatisfiable_variants, exclude_pattern)

    # is there a 'host' section?
    if m.is_cross:
        # this must come before we read upstream pins, because it will enforce things
        #      like vc version from the compiler.
        host_reqs = utils.ensure_list(m.get_value('requirements/host'))
        if not host_reqs:
            matching_output = [out for out in m.meta.get('outputs', []) if
                               out.get('name') == m.name()]
            if matching_output:
                requirements = utils.expand_reqs(matching_output[0].get('requirements', {}))
                matching_output[0]['requirements'] = requirements
                host_reqs = requirements.get('host', [])
        # in-place modification of above thingie
        host_reqs.extend(extra_run_specs_from_build.get('strong', []))

        host_deps, host_unsat, extra_run_specs_from_host = _read_upstream_pin_files(m, 'host',
                                                    permit_unsatisfiable_variants, exclude_pattern)
        extra_run_specs = set(extra_run_specs_from_host.get('strong', []) +
                              extra_run_specs_from_host.get('weak', []) +
                              extra_run_specs_from_build.get('strong', []))
    else:
        host_deps = []
        host_unsat = []
        extra_run_specs = set(extra_run_specs_from_build.get('strong', []))
        if not m.uses_new_style_compiler_activation and not m.build_is_host:
            extra_run_specs.update(extra_run_specs_from_build.get('weak', []))
        else:
            host_deps = set(extra_run_specs_from_build.get('strong', []))

    run_deps = extra_run_specs | set(utils.ensure_list(requirements.get('run')))

    for section, deps in (('build', build_deps), ('host', host_deps), ('run', run_deps)):
        if deps:
            requirements[section] = list(deps)

    m.meta['requirements'] = requirements
    return build_unsat, host_unsat
Beispiel #33
0
    def info_index(self):
        arch = (
            "noarch" if self.config.target_subdir == "noarch" else self.config.host_arch
        )
        d = dict(
            name=self.name(),
            version=self.version(),
            build=self.build_id(),
            build_number=self.build_number() if self.build_number() else 0,
            platform=self.config.platform
            if (self.config.platform != "noarch" and arch != "noarch")
            else None,
            arch=ARCH_MAP.get(arch, arch),
            subdir=self.config.target_subdir,
            depends=sorted(
                " ".join(ms.final.split(" ")[:2]) for ms in self.ms_depends()
            ),
            timestamp=int(time.time() * 1000),
        )
        for key in ("license", "license_family"):
            value = self.get_value("about/" + key)
            if value:
                d[key] = value

        preferred_env = self.get_value("build/preferred_env")
        if preferred_env:
            d["preferred_env"] = preferred_env

        # conda 4.4+ optional dependencies
        constrains = ensure_list(self.get_value("requirements/run_constrained"))
        # filter None values
        constrains = [v for v in constrains if v]
        if constrains:
            d["constrains"] = constrains

        if self.get_value("build/features"):
            d["features"] = " ".join(self.get_value("build/features"))
        if self.get_value("build/track_features"):
            d["track_features"] = " ".join(self.get_value("build/track_features"))
        if self.get_value("build/provides_features"):
            d["provides_features"] = self.get_value("build/provides_features")
        if self.get_value("build/requires_features"):
            d["requires_features"] = self.get_value("build/requires_features")
        if self.noarch:
            d["platform"] = d["arch"] = None
            d["subdir"] = "noarch"
            # These are new-style noarch settings.  the self.noarch setting can be True in 2 ways:
            #    if noarch: True or if noarch_python: True.  This is disambiguation.
            build_noarch = self.get_value("build/noarch")
            if build_noarch:
                d["noarch"] = build_noarch

        # TODO
        # if self.is_app():
        #     d.update(self.app_meta())
        return d
Beispiel #34
0
def meta_vars(meta, config):
    d = {}
    for var_name in ensure_list(meta.get_value('build/script_env', [])):
        value = os.getenv(var_name)
        if value is None:
            warnings.warn(
                "The environment variable '%s' is undefined." % var_name,
                UserWarning)
        else:
            d[var_name] = value

    git_dir = join(config.work_dir, '.git')
    hg_dir = join(config.work_dir, '.hg')

    if not isinstance(git_dir, str):
        # On Windows, subprocess env can't handle unicode.
        git_dir = git_dir.encode(sys.getfilesystemencoding() or 'utf-8')

    git_exe = external.find_executable('git', config.build_prefix)
    if git_exe and os.path.exists(git_dir):
        # We set all 'source' metavars using the FIRST source entry in meta.yaml.
        git_url = meta.get_value('source/0/git_url')

        if os.path.exists(git_url):
            if sys.platform == 'win32':
                git_url = utils.convert_unix_path_to_win(git_url)
            # If git_url is a relative path instead of a url, convert it to an abspath
            git_url = normpath(join(meta.path, git_url))

        _x = False

        if git_url:
            _x = verify_git_repo(git_exe, git_dir, git_url,
                                 config.git_commits_since_tag, config.debug,
                                 meta.get_value('source/0/git_rev', 'HEAD'))

        if _x or meta.get_value('source/0/path'):
            d.update(get_git_info(git_exe, git_dir, config.debug))

    elif external.find_executable(
            'hg', config.build_prefix) and os.path.exists(hg_dir):
        d.update(get_hg_build_info(hg_dir))

    # use `get_value` to prevent early exit while name is still unresolved during rendering
    d['PKG_NAME'] = meta.get_value('package/name')
    d['PKG_VERSION'] = meta.version()
    d['PKG_BUILDNUM'] = str(meta.build_number() or 0)
    if meta.final:
        d['PKG_BUILD_STRING'] = str(meta.build_id())
        d['PKG_HASH'] = meta.hash_dependencies()
    else:
        d['PKG_BUILD_STRING'] = 'placeholder'
        d['PKG_HASH'] = '1234567'
    d['RECIPE_DIR'] = (meta.path if meta.path else meta.meta.get(
        'extra', {}).get('parent_recipe', {}).get('path', ''))
    return d
Beispiel #35
0
def _get_zip_dict_of_lists(combined_variant, list_of_strings):
    used_keys = [key for key in list_of_strings if key in combined_variant]
    out = {}

    if used_keys:
        # The join value needs to be selected as something
        # that will not likely appear in any key or value.
        dict_key = "#".join(list_of_strings)
        length = len(ensure_list(combined_variant[used_keys[0]]))
        for key in used_keys:
            if not len(ensure_list(combined_variant[key])) == length:
                raise ValueError("zip field {} ({}) length does not match zip field {} ({}) "
                                 "length.  All zip fields within a group must be the same length."
                                 .format(used_keys[0], combined_variant[used_keys[0]],
                                         key, combined_variant[key]))
        values = list(zip(*[ensure_list(combined_variant[key]) for key in used_keys]))
        values = ['#'.join(value) for value in values]
        out = {dict_key: values}
    return out
Beispiel #36
0
def dict_of_lists_to_list_of_dicts(dict_of_lists, extend_keys=None):
    # http://stackoverflow.com/a/5228294/1170370
    # end result is a collection of dicts, like [{'python': 2.7, 'numpy': 1.11},
    #                                            {'python': 3.5, 'numpy': 1.11}]
    dicts = []
    if not extend_keys:
        extend_keys = set(ensure_list(dict_of_lists.get('extend_keys')))
    pass_through_keys = set(['extend_keys', 'zip_keys', 'pin_run_as_build'] +
                            list(ensure_list(extend_keys)) +
                            list(_get_zip_key_set(dict_of_lists)))
    dimensions = {
        k: v
        for k, v in dict_of_lists.items() if k not in pass_through_keys
    }
    # here's where we add in the zipped dimensions.  Zipped stuff is concatenated strings, to avoid
    #      being distributed in the product.
    for group in _get_zip_groups(dict_of_lists):
        dimensions.update(group)

    # in case selectors nullify any groups - or else zip reduces whole set to nil
    trim_empty_keys(dimensions)

    for x in product(*dimensions.values()):
        remapped = dict(six.moves.zip(dimensions, x))
        for col in pass_through_keys:
            v = dict_of_lists.get(col)
            if v or v == '':
                remapped[col] = v
        # split out zipped keys
        to_del = set()
        for k, v in remapped.items():
            if isinstance(k, string_types) and isinstance(v, string_types):
                keys = _split_str(k, '#')
                values = _split_str(v, '#')
                for (_k, _v) in zip(keys, values):
                    remapped[_k] = _v
                if '#' in k:
                    to_del.add(k)
        for key in to_del:
            del remapped[key]
        dicts.append(remapped)
    return dicts
Beispiel #37
0
    def info_index(self):
        arch = 'noarch' if self.config.target_subdir == 'noarch' else self.config.host_arch
        d = dict(
            name=self.name(),
            version=self.version(),
            build=self.build_id(),
            build_number=self.build_number() if self.build_number() else 0,
            platform=self.config.platform if
            (self.config.platform != 'noarch' and arch != 'noarch') else None,
            arch=ARCH_MAP.get(arch, arch),
            subdir=self.config.target_subdir,
            depends=sorted(' '.join(ms.splitted) for ms in self.ms_depends()),
            timestamp=int(time.time() * 1000),
        )
        for key in ('license', 'license_family'):
            value = self.get_value('about/' + key)
            if value:
                d[key] = value

        preferred_env = self.get_value('build/preferred_env')
        if preferred_env:
            d['preferred_env'] = preferred_env

        # conda 4.4+ optional dependencies
        constrains = ensure_list(
            self.get_value('requirements/run_constrained'))
        # filter None values
        constrains = [v for v in constrains if v]
        if constrains:
            d['constrains'] = constrains

        if self.get_value('build/features'):
            d['features'] = ' '.join(self.get_value('build/features'))
        if self.get_value('build/track_features'):
            d['track_features'] = ' '.join(
                self.get_value('build/track_features'))
        if self.get_value('build/provides_features'):
            d['provides_features'] = self.get_value('build/provides_features')
        if self.get_value('build/requires_features'):
            d['requires_features'] = self.get_value('build/requires_features')
        if self.noarch:
            d['platform'] = d['arch'] = None
            d['subdir'] = 'noarch'
            # These are new-style noarch settings.  the self.noarch setting can be True in 2 ways:
            #    if noarch: True or if noarch_python: True.  This is disambiguation.
            build_noarch = self.get_value('build/noarch')
            if build_noarch:
                d['noarch'] = build_noarch

        # TODO
        # if self.is_app():
        #     d.update(self.app_meta())
        return d
def get_installed_version(prefix, pkgs):
    """Primarily used by conda-forge, but may be useful in general for checking when a package
    needs to be updated"""
    from conda_build.utils import ensure_list
    pkgs = ensure_list(pkgs)
    linked_pkgs = linked(prefix)
    versions = {}
    for pkg in pkgs:
        vers_inst = [dist.split('::', 1)[-1].rsplit('-', 2)[1] for dist in linked_pkgs
            if dist.split('::', 1)[-1].rsplit('-', 2)[0] == pkg]
        versions[pkg] = vers_inst[0] if len(vers_inst) == 1 else None
    return versions
Beispiel #39
0
def get_hash_input(packages):
    hash_inputs = {}
    for pkg in ensure_list(packages):
        pkgname = os.path.basename(pkg)[:-8]
        hash_inputs[pkgname] = {}
        hash_input = package_has_file(pkg, 'info/hash_input.json')
        if hash_input:
            hash_inputs[pkgname]['recipe'] = json.loads(hash_input)
        else:
            hash_inputs[pkgname] = "<no hash_input.json in file>"

    return hash_inputs
Beispiel #40
0
def get_hash_input(packages):
    hash_inputs = {}
    for pkg in ensure_list(packages):
        pkgname = os.path.basename(pkg)[:-8]
        hash_inputs[pkgname] = {}
        hash_input = package_has_file(pkg, 'info/hash_input.json')
        if hash_input:
            hash_inputs[pkgname]['recipe'] = json.loads(hash_input.decode())
        else:
            hash_inputs[pkgname] = "<no hash_input.json in file>"

    return hash_inputs
def get_installed_version(prefix, pkgs):
    """Primarily used by conda-forge, but may be useful in general for checking when a package
    needs to be updated"""
    from conda_build.utils import ensure_list
    pkgs = ensure_list(pkgs)
    linked_pkgs = linked(prefix)
    versions = {}
    for pkg in pkgs:
        vers_inst = [dist.split('::', 1)[-1].rsplit('-', 2)[1] for dist in linked_pkgs
            if dist.split('::', 1)[-1].rsplit('-', 2)[0] == pkg]
        versions[pkg] = vers_inst[0] if len(vers_inst) == 1 else None
    return versions
Beispiel #42
0
def build_string_from_metadata(metadata):
    if metadata.meta.get('build', {}).get('string'):
        build_str = metadata.get_value('build/string')
    else:
        res = []
        log = utils.get_logger(__name__)

        build_pkg_names = [ms.name for ms in metadata.ms_depends('build')]
        # TODO: this is the bit that puts in strings like py27np111 in the filename.  It would be
        #    nice to get rid of this, since the hash supercedes that functionally, but not clear
        #    whether anyone's tools depend on this file naming right now.
        for s, names, places in (('py', 'python', 2), ('np', 'numpy', 2), ('pl', 'perl', 2),
                                 ('lua', 'lua', 2), ('r', ('r', 'r-base'), 3)):
            for ms in metadata.ms_depends('run'):
                for name in ensure_list(names):
                    if ms.name == name and name in build_pkg_names:
                        # only append numpy when it is actually pinned
                        if name == 'numpy' and (not hasattr(ms, 'version') or not ms.version):
                            continue
                        log.warn("Deprecation notice: computing build string (like pyXY).  This "
                                 "functionality has been replaced with the hash (h????), which"
                                 " can be readily inpsected with `conda inspect hash-inputs "
                                 "<pkg-name>`.  pyXY, npXYY and the like will go away in "
                                 "conda-build 4.0.  Please adapt any code that depends on filenames"
                                 " with pyXY, npXYY, etc.")
                        if metadata.noarch == name or (metadata.get_value('build/noarch_python') and
                                                    name == 'python'):
                            res.append(s)
                        else:
                            variant_version = metadata.config.variant.get(name, "")
                            res.append(''.join([s] + variant_version.split('.')[:places]))

        features = ensure_list(metadata.get_value('build/features', []))
        if res:
            res.append('_')
        if features:
            res.extend(('_'.join(features), '_'))
        res.append('{0}'.format(metadata.build_number() if metadata.build_number() else 0))
        build_str = "".join(res)
    return build_str
Beispiel #43
0
def meta_vars(meta, config):
    d = {}
    for var_name in ensure_list(meta.get_value('build/script_env', [])):
        value = os.getenv(var_name)
        if value is None:
            warnings.warn(
                "The environment variable '%s' is undefined." % var_name,
                UserWarning
            )
        else:
            d[var_name] = value

    git_dir = join(config.work_dir, '.git')
    hg_dir = join(config.work_dir, '.hg')

    if not isinstance(git_dir, str):
        # On Windows, subprocess env can't handle unicode.
        git_dir = git_dir.encode(sys.getfilesystemencoding() or 'utf-8')

    if external.find_executable('git', config.build_prefix) and os.path.exists(git_dir):
        git_url = meta.get_value('source/git_url')

        if os.path.exists(git_url):
            if sys.platform == 'win32':
                git_url = utils.convert_unix_path_to_win(git_url)
            # If git_url is a relative path instead of a url, convert it to an abspath
            git_url = normpath(join(meta.path, git_url))

        _x = False

        if git_url:
            _x = verify_git_repo(git_dir,
                                 git_url,
                                 config.git_commits_since_tag,
                                 config.debug,
                                 meta.get_value('source/git_rev', 'HEAD'))

        if _x or meta.get_value('source/path'):
            d.update(get_git_info(git_dir, config.debug))

    elif external.find_executable('hg', config.build_prefix) and os.path.exists(hg_dir):
        d.update(get_hg_build_info(hg_dir))

    # use `get_value` to prevent early exit while name is still unresolved during rendering
    d['PKG_NAME'] = meta.get_value('package/name')
    d['PKG_VERSION'] = meta.version()
    d['PKG_BUILDNUM'] = str(meta.build_number() or 0)
    if meta.final:
        d['PKG_BUILD_STRING'] = str(meta.build_id())
    d['RECIPE_DIR'] = (meta.path if meta.path else
                       meta.meta.get('extra', {}).get('parent_recipe', {}).get('path', ''))
    return d
Beispiel #44
0
def create_r_files(m):
    tf, tf_exists = _create_test_files(m, '.r')

    imports = None
    # two ways we can enable R import tests:
    # 1. preface package name with r- and just list imports in test/imports
    # 2. use list of dicts for test/imports, and have lang: 'r' set in one of those dicts
    if m.name().startswith('r-'):
        imports = ensure_list(m.get_value('test/imports', []))
    else:
        for import_item in ensure_list(m.get_value('test/imports', [])):
            if (hasattr(import_item, 'keys') and 'lang' in import_item and
                    import_item['lang'] == 'r'):
                imports = import_item['imports']
                break
    if imports:
        with open(tf, 'a+') as fo:
            for name in imports:
                fo.write('print("library(%r)")\n' % name)
                fo.write('library(%s)\n' % name)
                fo.write('\n')
    return tf if (tf_exists or imports) else False
Beispiel #45
0
def create_files(m):
    """
    Create the test files for pkg in the directory given.  The resulting
    test files are configuration (i.e. platform, architecture, Python and
    numpy version, ...) independent.
    Return False, if the package has no tests (for any configuration), and
    True if it has.
    """
    has_files = False
    rm_rf(m.config.test_dir)
    for fn in ensure_list(m.get_value('test/files', [])):
        has_files = True
        path = join(m.path, fn)
        # disable locking to avoid locking a temporary directory (the extracted test folder)
        copy_into(path, join(m.config.test_dir, fn), m.config.timeout, locking=False,
                  clobber=True)
    # need to re-download source in order to do tests
    if m.get_value('test/source_files') and not isdir(m.config.work_dir):
        source.provide(m)
    for pattern in ensure_list(m.get_value('test/source_files', [])):
        if on_win and '\\' in pattern:
            raise RuntimeError("test/source_files paths must use / "
                                "as the path delimiter on Windows")
        has_files = True
        files = glob.glob(join(m.config.work_dir, pattern))
        if not files:
            raise RuntimeError("Did not find any source_files for test with pattern %s", pattern)
        for f in files:
            try:
                # disable locking to avoid locking a temporary directory (the extracted test folder)
                copy_into(f, f.replace(m.config.work_dir, m.config.test_dir), m.config.timeout,
                          locking=False)
            except OSError as e:
                log = logging.getLogger(__name__)
                log.warn("Failed to copy {0} into test files.  Error was: {1}".format(f, str(e)))
        for ext in '.pyc', '.pyo':
            for f in get_ext_files(m.config.test_dir, ext):
                os.remove(f)
    return has_files
Beispiel #46
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""

    env_specs = m.meta.get('requirements', {}).get(env, [])
    explicit_specs = [req.split(' ')[0]
                      for req in env_specs] if env_specs else []
    linked_packages = actions.get('LINK', [])
    linked_packages = [
        pkg for pkg in linked_packages if pkg.name in explicit_specs
    ]

    ignore_pkgs_list = utils.ensure_list(
        m.get_value('build/ignore_run_exports_from'))
    ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports'))
    additional_specs = {}
    for pkg in linked_packages:
        if any(pkg.name in req.split(' ')[0] for req in ignore_pkgs_list):
            continue
        run_exports = None
        if m.config.use_channeldata:
            channeldata = utils.download_channeldata(pkg.channel)
            # only use channeldata if requested, channeldata exists and contains
            # a packages key, otherwise use run_exports from the packages themselves
            if 'packages' in channeldata:
                pkg_data = channeldata['packages'].get(pkg.name, {})
                run_exports = pkg_data.get('run_exports',
                                           {}).get(pkg.version, {})
        if run_exports is None:
            loc, dist = execute_download_actions(m,
                                                 actions,
                                                 env=env,
                                                 package_subset=pkg)[pkg]
            run_exports = _read_specs_from_package(loc, dist)
        specs = _filter_run_exports(run_exports, ignore_list)
        if specs:
            additional_specs = utils.merge_dicts_of_lists(
                additional_specs, specs)
    return additional_specs
Beispiel #47
0
def create_r_files(m):
    tf, tf_exists = _create_test_files(m, '.r')

    imports = None
    # two ways we can enable R import tests:
    # 1. preface package name with r- and just list imports in test/imports
    # 2. use list of dicts for test/imports, and have lang: 'r' set in one of those dicts
    if m.name().startswith('r-'):
        imports = ensure_list(m.get_value('test/imports', []))
    else:
        for import_item in ensure_list(m.get_value('test/imports', [])):
            if (hasattr(import_item, 'keys') and 'lang' in import_item
                    and import_item['lang'] == 'r'):
                imports = import_item['imports']
                break
    if imports:
        with open(tf, 'a+') as fo:
            for name in imports:
                fo.write('print("library(%r)")\n' % name)
                fo.write('library(%s)\n' % name)
                fo.write('\n')
    return tf if (tf_exists or imports) else False
Beispiel #48
0
def combine_specs(specs):
    """With arbitrary sets of sources, combine into a single aggregate spec.

    Later specs in the input set have priority and overwrite duplicate entries.

    specs: list of dictionaries.  Keys are arbitrary, but correspond to variable
           names used in Jinja2 templated recipes.  Values can be either single
           values (strings or integers), or collections (lists, tuples, sets).
    """
    extend_keys = DEFAULT_EXTEND_KEYS
    extend_keys.extend([
        key for spec in specs if spec
        for key in ensure_list(spec.get('extend_keys'))
    ])

    values = {}
    # each spec is a dictionary.  Each subsequent spec replaces the previous one.
    #     Only the last one with the key stays.
    for spec in specs:
        if spec:
            for k, v in spec.items():
                if k in extend_keys:
                    # update dictionaries, extend lists
                    if hasattr(v, 'keys'):
                        if k in values and hasattr(values[k], 'keys'):
                            values[k].update(v)
                        else:
                            values[k] = v
                    else:
                        values[k] = ensure_list(values.get(k, []))
                        values[k].extend(ensure_list(v))
                        # uniquify
                        values[k] = list(set(values[k]))
                else:
                    if hasattr(v, 'keys'):
                        values[k] = v
                    else:
                        values[k] = ensure_list(v)
    return values, set(extend_keys)
Beispiel #49
0
def get_package_variants(recipedir_or_metadata, config=None, variants=None):
    if hasattr(recipedir_or_metadata, 'config'):
        config = recipedir_or_metadata.config
    if not config:
        from conda_build.config import Config
        config = Config()
    files = find_config_files(
        recipedir_or_metadata,
        ensure_list(config.variant_config_files),
        ignore_system_config=config.ignore_system_variants,
        exclusive_config_file=config.exclusive_config_file)

    specs = OrderedDict(internal_defaults=get_default_variant(config))

    for f in files:
        specs[f] = parse_config_file(f, config)

    # this is the override of the variants from files and args with values from CLI or env vars
    if hasattr(config, 'variant') and config.variant:
        specs['config.variant'] = config.variant
    if variants:
        specs['argument_variants'] = variants

    for f, spec in specs.items():
        try:
            validate_spec(spec)
        except ValueError as e:
            raise ValueError("Error in config {}: {}".format(f, str(e)))

    # this merges each of the specs, providing a debug message when a given setting is overridden
    #      by a later spec
    combined_spec, extend_keys = combine_specs(specs,
                                               log_output=config.verbose)

    extend_keys.update({'zip_keys', 'extend_keys'})

    # delete the default specs, so that they don't unnecessarily limit the matrix
    specs = specs.copy()
    del specs['internal_defaults']

    combined_spec = dict_of_lists_to_list_of_dicts(combined_spec,
                                                   extend_keys=extend_keys)
    for source, source_specs in reversed(specs.items()):
        for k, vs in source_specs.items():
            if k not in extend_keys:
                # when filtering ends up killing off all variants, we just ignore that.  Generally,
                #    this arises when a later variant config overrides, rather than selects a
                #    subspace of earlier configs
                combined_spec = (filter_by_key_value(
                    combined_spec, k, vs, source_name=source) or combined_spec)
    return combined_spec
Beispiel #50
0
def combine_specs(specs):
    """With arbitrary sets of sources, combine into a single aggregate spec.

    Later specs in the input set have priority and overwrite duplicate entries.

    specs: list of dictionaries.  Keys are arbitrary, but correspond to variable
           names used in Jinja2 templated recipes.  Values can be either single
           values (strings or integers), or collections (lists, tuples, sets).
    """
    extend_keys = DEFAULT_EXTEND_KEYS
    extend_keys.extend([key for spec in specs if spec
                        for key in ensure_list(spec.get('extend_keys'))])

    values = {}
    # each spec is a dictionary.  Each subsequent spec replaces the previous one.
    #     Only the last one with the key stays.
    for spec in specs:
        if spec:
            for k, v in spec.items():
                if k in extend_keys:
                    # update dictionaries, extend lists
                    if hasattr(v, 'keys'):
                        if k in values and hasattr(values[k], 'keys'):
                            values[k].update(v)
                        else:
                            values[k] = v
                    else:
                        values[k] = ensure_list(values.get(k, []))
                        values[k].extend(ensure_list(v))
                        # uniquify
                        values[k] = list(set(values[k]))
                else:
                    if hasattr(v, 'keys'):
                        values[k] = v
                    else:
                        values[k] = ensure_list(v)
    return values, set(extend_keys)
Beispiel #51
0
def get_env_dependencies(m, env, variant, exclude_pattern=None,
                         permit_unsatisfiable_variants=False,
                         merge_build_host_on_same_platform=True):
    specs = m.get_depends_top_and_out(env)
    # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x
    if env in ('build', 'host'):
        no_xx_specs = []
        for spec in specs:
            if ' x.x' in spec:
                pkg_name = spec.split()[0]
                no_xx_specs.append(' '.join((pkg_name, variant.get(pkg_name, ""))))
            else:
                no_xx_specs.append(spec)
        specs = no_xx_specs

    subpackages, dependencies, pass_through_deps = _categorize_deps(m, specs, exclude_pattern, variant)

    dependencies = set(dependencies)
    unsat = None
    random_string = ''.join(random.choice(string.ascii_uppercase + string.digits)
                            for _ in range(10))
    with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir:
        try:
            actions = environ.get_install_actions(tmpdir, tuple(dependencies), env,
                                                  subdir=getattr(m.config, '{}_subdir'.format(env)),
                                                  debug=m.config.debug,
                                                  verbose=m.config.verbose,
                                                  locking=m.config.locking,
                                                  bldpkgs_dirs=tuple(m.config.bldpkgs_dirs),
                                                  timeout=m.config.timeout,
                                                  disable_pip=m.config.disable_pip,
                                                  max_env_retry=m.config.max_env_retry,
                                                  output_folder=m.config.output_folder,
                                                  channel_urls=tuple(m.config.channel_urls))
        except (UnsatisfiableError, DependencyNeedsBuildingError) as e:
            # we'll get here if the environment is unsatisfiable
            if hasattr(e, 'packages'):
                unsat = ', '.join(e.packages)
            else:
                unsat = e.message
            if permit_unsatisfiable_variants:
                actions = {}
            else:
                raise

    specs = actions_to_pins(actions)
    return (utils.ensure_list((specs + subpackages + pass_through_deps) or
                  m.meta.get('requirements', {}).get(env, [])),
            actions, unsat)
Beispiel #52
0
def validate_spec(src, spec):
    errors = []

    # check for invalid characters
    errors.extend(
        "  {} key contains an invalid character '-'".format(k)
        for k in spec
        if "-" in k
    )

    # check for properly formatted zip_key
    try:
        zip_keys = _get_zip_keys(spec)
    except ValueError as e:
        errors.append(str(e))
    else:
        # check if every zip field is defined
        errors.extend(
            "  zip_key entry {} in group {} does not have any settings".format(k, zg)
            for zg in zip_keys
            for k in zg
            # include error if key is not defined in spec
            if k not in spec
        )

        # check for duplicate keys
        unique = set()
        errors.extend(
            "  zip_key entry {} in group {} is a duplicate, keys can only occur "
            "in one group".format(k, zg)
            # include error if key has already been seen, otherwise add to unique keys
            if k in unique else unique.add(k)
            for zg in zip_keys
            for k in zg
        )

        # check that all zip fields within a zip_group are the same length
        errors.extend(
            "  zip fields in zip_key group {} are not all the same length".format(zg)
            for zg in zip_keys
            # include error if all zip fields in a zip_group are the same size,
            # ignore missing fields
            if len({len(ensure_list(spec[k])) if k in spec else None for k in zg} - {None}) > 1
        )

    # filter out None values that were potentially added above
    errors = list(filter(None, errors))
    if errors:
        raise ValueError("Variant configuration errors in {}:\n{}".format(src, "\n".join(errors)))
Beispiel #53
0
def create_py_files(m):
    tf, tf_exists = _create_test_files(m, '.py')
    imports = ensure_list(m.get_value('test/imports', []))
    for import_item in imports:
        if (hasattr(import_item, 'keys') and 'lang' in import_item
                and import_item['lang'] == 'python'):
            imports = import_item['imports']
            break
    if imports:
        with open(tf, 'a+') as fo:
            for name in imports:
                fo.write('print("import: %r")\n' % name)
                fo.write('import %s\n' % name)
                fo.write('\n')
    return tf if (tf_exists or imports) else False
Beispiel #54
0
def _get_extend_keys(spec, include_defaults=True):
    """
    Extracts 'extend_keys' from `spec`.

    :param spec: Variants specification
    :type spec: dict
    :param include_defaults: Whether to include default 'extend_keys'
    :type include_defaults: bool, optional
    :return: Standardized 'extend_keys' value
    :rtype: set
    """
    extend_keys = {'zip_keys', 'extend_keys'}
    if include_defaults:
        extend_keys.update(DEFAULT_VARIANTS['extend_keys'])
    return extend_keys.union(ensure_list(spec.get('extend_keys')))
Beispiel #55
0
def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern):
    """Applies run_exports from any build deps to host and run sections"""
    # if we have host deps, they're more important than the build deps.
    requirements = m.meta.get('requirements', {})
    build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files(
        m, 'build', permit_unsatisfiable_variants, exclude_pattern)

    # is there a 'host' section?
    if m.is_cross:
        # this must come before we read upstream pins, because it will enforce things
        #      like vc version from the compiler.
        m.meta['requirements']['host'].extend(
            extra_run_specs_from_build.get('strong', []))
        host_deps, host_unsat, extra_run_specs_from_host = _read_upstream_pin_files(
            m, 'host', permit_unsatisfiable_variants, exclude_pattern)
        extra_run_specs = set(
            extra_run_specs_from_host.get('strong', []) +
            extra_run_specs_from_host.get('weak', []) +
            extra_run_specs_from_build.get('strong', []))
    else:
        # redo this, but lump in the host deps too, to catch any run_exports stuff that gets merged
        #    when build platform is same as host
        build_deps, build_actions, build_unsat = get_env_dependencies(
            m,
            'build',
            m.config.variant,
            exclude_pattern,
            permit_unsatisfiable_variants=permit_unsatisfiable_variants)
        m.config.build_prefix_override = not m.uses_new_style_compiler_activation
        host_deps = []
        host_unsat = []
        extra_run_specs = set(
            extra_run_specs_from_build.get('strong', []) +
            extra_run_specs_from_build.get('weak', []))

    run_deps = extra_run_specs | set(utils.ensure_list(
        requirements.get('run')))

    requirements.update({
        'build':
        [utils.ensure_valid_spec(spec, warn=True) for spec in build_deps],
        'host':
        [utils.ensure_valid_spec(spec, warn=True) for spec in host_deps],
        'run': [utils.ensure_valid_spec(spec, warn=True) for spec in run_deps]
    })

    m.meta['requirements'] = requirements
    return build_unsat, host_unsat
Beispiel #56
0
def provide(metadata, config, patch=True):
    """
    given a recipe_dir:
      - download (if necessary)
      - unpack
      - apply patches (if any)
    """

    if not os.path.isdir(config.build_folder):
        os.makedirs(config.build_folder)
    git = None

    meta = metadata.get_section('source')

    if any(k in meta for k in ('fn', 'url')):
        unpack(metadata, config=config)
    elif 'git_url' in meta:
        git = git_source(metadata, config=config)
    # build to make sure we have a work directory with source in it.  We want to make sure that
    #    whatever version that is does not interfere with the test we run next.
    elif 'hg_url' in meta:
        hg_source(metadata, config=config)
    elif 'svn_url' in meta:
        svn_source(metadata, config=config)
    elif 'path' in meta:
        path = normpath(
            abspath(join(metadata.path, metadata.get_value('source/path'))))
        if config.verbose:
            print("Copying %s to %s" % (path, config.work_dir))
        # careful here: we set test path to be outside of conda-build root in setup.cfg.
        #    If you don't do that, this is a recursive function
        copy_into(path,
                  config.work_dir,
                  config.timeout,
                  locking=config.locking)
    else:  # no source
        if not isdir(config.work_dir):
            os.makedirs(config.work_dir)

    if patch:
        src_dir = config.work_dir
        patches = ensure_list(meta.get('patches', []))
        for patch in patches:
            apply_patch(src_dir, join(metadata.path, patch), config, git)

    return config.work_dir
Beispiel #57
0
def list_of_dicts_to_dict_of_lists(list_of_dicts):
    """Opposite of dict_of_lists_to_list_of_dicts function.

    Take broken out collection of variants, and squish it into a dict, where each value is a list.
    Only squishes string/int values; does "update" for dict keys
    """
    if not list_of_dicts:
        return
    squished = OrderedDict()
    all_zip_keys = set()
    groups = None
    zip_key_groups = (list_of_dicts[0]['zip_keys']
                      if 'zip_keys' in list_of_dicts[0]
                      and list_of_dicts[0]['zip_keys'] else [])
    if zip_key_groups:
        if (isinstance(list_of_dicts[0]['zip_keys'][0], list)
                or isinstance(list_of_dicts[0]['zip_keys'][0], tuple)):
            groups = list_of_dicts[0]['zip_keys']
        else:
            groups = [list_of_dicts[0]['zip_keys']]
        for group in groups:
            for item in group:
                all_zip_keys.add(item)
    for variant in list_of_dicts:
        for k, v in variant.items():
            if k == 'zip_keys':
                continue
            if hasattr(v, 'keys'):
                existing_value = squished.get(k, OrderedDict())
                existing_value.update(v)
                squished[k] = existing_value
            elif isinstance(v, list):
                squished[k] = set(squished.get(k, set())) | set(v)
            else:
                squished[k] = list(squished.get(k, [])) + ensure_list(v)
                if k not in all_zip_keys:
                    squished[k] = list(set(squished[k]))
    # reduce the combinatoric space of the zipped keys, too:
    if groups:
        for group in groups:
            values = list(zip(*set(zip(*(squished[key] for key in group)))))
            for idx, key in enumerate(group):
                squished[key] = values[idx]
    squished['zip_keys'] = zip_key_groups
    return squished
Beispiel #58
0
def create_shell_files(m, test_dir=None):
    if not test_dir:
        test_dir = m.config.test_dir
    has_tests = False
    ext = '.bat' if sys.platform == 'win32' else '.sh'
    name = 'no-file'

    # the way this works is that each output needs to explicitly define a test script to run.
    #   They do not automatically pick up run_test.*, but can be pointed at that explicitly.
    for out in m.meta.get('outputs', []):
        if m.name() == out.get('name'):
            out_test_script = out.get('test', {}).get('script', 'no-file')
            if os.path.splitext(out_test_script)[1].lower() == ext:
                name = out_test_script
                break
    else:
        name = "run_test{}".format(ext)

    if exists(join(m.path, name)):
        # disable locking to avoid locking a temporary directory (the extracted test folder)
        copy_into(join(m.path, name),
                  test_dir,
                  m.config.timeout,
                  locking=False)
        has_tests = True

    commands = ensure_list(m.get_value('test/commands', []))
    if commands:
        with open(join(test_dir, name), 'a') as f:
            f.write('\n\n')
            if not on_win:
                f.write('set -ex\n\n')
            f.write('\n\n')
            for cmd in commands:
                f.write(cmd)
                f.write('\n')
                if on_win:
                    f.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n")
                has_tests = True
            if on_win:
                f.write('exit /B 0\n')
            else:
                f.write('exit 0\n')
    return has_tests or os.path.isfile(os.path.join(m.config.test_dir, name))
Beispiel #59
0
def update_index(dir_paths,
                 config=None,
                 force=False,
                 check_md5=False,
                 remove=False,
                 channel_name=None,
                 subdir=None,
                 threads=None,
                 patch_generator=None,
                 verbose=False,
                 progress=False,
                 hotfix_source_repo=None,
                 shared_format_cache=True,
                 current_index_versions=None,
                 **kwargs):
    import yaml
    from locale import getpreferredencoding
    import os
    from .conda_interface import PY3, string_types
    from conda_build.index import update_index
    from conda_build.utils import ensure_list
    dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)]
    # Don't use byte strings in Python 2
    if not PY3:
        dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]

    if isinstance(current_index_versions, string_types):
        with open(current_index_versions) as f:
            current_index_versions = yaml.safe_load(f)

    for path in dir_paths:
        update_index(path,
                     check_md5=check_md5,
                     channel_name=channel_name,
                     patch_generator=patch_generator,
                     threads=threads,
                     verbose=verbose,
                     progress=progress,
                     hotfix_source_repo=hotfix_source_repo,
                     subdirs=ensure_list(subdir),
                     shared_format_cache=shared_format_cache,
                     current_index_versions=current_index_versions)
Beispiel #60
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""

    env_specs = m.meta.get('requirements', {}).get(env, [])
    explicit_specs = [req.split(' ')[0] for req in env_specs] if env_specs else []
    linked_packages = actions.get('LINK', [])
    linked_packages = [pkg for pkg in linked_packages if pkg.name in explicit_specs]

    pkg_locs_and_dists = execute_download_actions(m, actions, env=env,
                                                  package_subset=linked_packages)

    ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports'))

    additional_specs = {}
    for (loc, dist) in pkg_locs_and_dists.values():
        specs = _read_specs_from_package(loc, dist)
        additional_specs = utils.merge_dicts_of_lists(additional_specs,
                                                      _filter_run_exports(specs, ignore_list))
    return additional_specs