Ejemplo n.º 1
0
def render_recipe(recipe_path, config, no_download_source=False, variants=None,
                  permit_unsatisfiable_variants=True, reset_build_id=True, expand_output=False):
    arg = recipe_path
    # Don't use byte literals for paths in Python 2
    if not PY3:
        arg = arg.decode(getpreferredencoding() or 'utf-8')
    if isfile(arg):
        if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
            recipe_dir = tempfile.mkdtemp()
            t = tarfile.open(arg, 'r:*')
            t.extractall(path=recipe_dir)
            t.close()
            need_cleanup = True
        elif arg.endswith('.yaml'):
            recipe_dir = os.path.dirname(arg)
            need_cleanup = False
        else:
            print("Ignoring non-recipe: %s" % arg)
            return None, None
    else:
        recipe_dir = abspath(arg)
        need_cleanup = False

    if not isdir(recipe_dir):
        sys.exit("Error: no such directory: %s" % recipe_dir)

    try:
        m = MetaData(recipe_dir, config=config)
    except exceptions.YamlParsingError as e:
        sys.stderr.write(e.error_msg())
        sys.exit(1)

    if config.set_build_id:
        m.config.compute_build_id(m.name(), reset=reset_build_id)

    if m.needs_source_for_render and (not os.path.isdir(m.config.work_dir) or
                                      len(os.listdir(m.config.work_dir)) == 0):
        try_download(m, no_download_source=no_download_source)

    rendered_metadata = {}

    if m.final:
        rendered_metadata = [(m, False, False), ]
        index = None
    else:
        variants = (dict_of_lists_to_list_of_dicts(variants, m.config.platform)
                    if variants else get_package_variants(m, m.config))
        index = get_build_index(m.config, m.config.build_subdir)
        rendered_metadata = distribute_variants(m, variants, index,
                                        permit_unsatisfiable_variants=permit_unsatisfiable_variants)
        if not rendered_metadata:
            raise ValueError("No variants were satisfiable - no valid recipes could be rendered.")

    if expand_output:
        rendered_metadata = expand_outputs(rendered_metadata, index)

    if need_cleanup:
        utils.rm_rf(recipe_dir)

    return rendered_metadata, index
Ejemplo n.º 2
0
def determine_package_nature(pkg, prefix, subdir, bldpkgs_dir, output_folder, channel_urls):
    dsos = []
    run_exports = None
    lib_prefix = pkg.name.startswith('lib')
    codefiles = get_package_obj_files(pkg, prefix)
    dsos = [f for f in codefiles for ext in ('.dylib', '.so', '.dll') if ext in f]
    # we don't care about the actual run_exports value, just whether or not run_exports are present.  We can use channeldata
    #    and it'll be a more reliable source (no disk race condition nonsense)
    _, _, channeldata = get_build_index(subdir=subdir,
                                        bldpkgs_dir=bldpkgs_dir,
                                        output_folder=output_folder,
                                        channel_urls=channel_urls,
                                        debug=False,
                                        verbose=False,
                                        clear_cache=False)
    channel_used = pkg.channel
    channeldata = channeldata.get(channel_used)

    if channeldata and pkg.name in channeldata['packages']:
        run_exports = channeldata['packages'][pkg.name].get('run_exports', {})
    else:
        for pkgs_dir in pkgs_dirs:
            test_folder = os.path.join(pkgs_dir, pkg.dist_name)
            test_filename = os.path.join(pkgs_dir, pkg.fn)
            if os.path.exists(test_folder):
                run_exports = get_run_exports(test_folder)
                break
            elif os.path.isfile(test_filename):
                run_exports = get_run_exports(test_filename)
                break
    return (dsos, run_exports, lib_prefix)
Ejemplo n.º 3
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""

    env_specs = m.meta.get('requirements', {}).get(env, [])
    explicit_specs = [req.split(' ')[0] for req in env_specs] if env_specs else []
    linked_packages = actions.get('LINK', [])
    linked_packages = [pkg for pkg in linked_packages if pkg.name in explicit_specs]

    # edit the plan to download all necessary packages
    for key in ('LINK', 'EXTRACT', 'UNLINK'):
        if key in actions:
            del actions[key]
    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly

    index, index_ts = get_build_index(getattr(m.config, '{}_subdir'.format(env)),
                                      bldpkgs_dir=m.config.bldpkgs_dir,
                                      output_folder=m.config.output_folder,
                                      channel_urls=m.config.channel_urls,
                                      debug=m.config.debug, verbose=m.config.verbose,
                                      locking=m.config.locking, timeout=m.config.timeout)
    if 'FETCH' in actions or 'EXTRACT' in actions:
        # this is to force the download
        execute_actions(actions, index, verbose=m.config.debug)
    ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports'))

    additional_specs = {}
    for pkg in linked_packages:
        if hasattr(pkg, 'dist_name'):
            pkg_dist = pkg.dist_name
        else:
            pkg = strip_channel(pkg)
            pkg_dist = pkg.split(' ')[0]
        pkg_loc = find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist, m)

        # ran through all pkgs_dirs, and did not find package or folder.  Download it.
        # TODO: this is a vile hack reaching into conda's internals. Replace with
        #    proper conda API when available.
        if not pkg_loc and conda_43:
            try:
                # the conda 4.4 API uses a single `link_prefs` kwarg
                # whereas conda 4.3 used `index` and `link_dists` kwargs
                pfe = ProgressiveFetchExtract(link_prefs=(index[pkg],))
            except TypeError:
                # TypeError: __init__() got an unexpected keyword argument 'link_prefs'
                pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index)
            with utils.LoggingContext():
                pfe.execute()
            for pkg_dir in pkgs_dirs:
                _loc = os.path.join(pkg_dir, index[pkg].fn)
                if os.path.isfile(_loc):
                    pkg_loc = _loc
                    break

        specs = _read_specs_from_package(pkg_loc, pkg_dist)

        additional_specs = utils.merge_dicts_of_lists(additional_specs,
                                                      _filter_run_exports(specs, ignore_list))
    return additional_specs
Ejemplo n.º 4
0
def execute_download_actions(m, actions, env, package_subset=None, require_files=False):
    index, _, _ = get_build_index(getattr(m.config, '{}_subdir'.format(env)), bldpkgs_dir=m.config.bldpkgs_dir,
                                  output_folder=m.config.output_folder, channel_urls=m.config.channel_urls,
                                  debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking,
                                  timeout=m.config.timeout)

    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly
    if 'FETCH' in actions or 'EXTRACT' in actions:
        # this is to force the download
        execute_actions(actions, index, verbose=m.config.debug)

    pkg_files = {}

    packages = actions.get('LINK', [])
    package_subset = utils.ensure_list(package_subset)
    selected_packages = set()
    if package_subset:
        for pkg in package_subset:
            if hasattr(pkg, 'name'):
                if pkg in packages:
                    selected_packages.add(pkg)
            else:
                pkg_name = pkg.split()[0]
                for link_pkg in packages:
                    if pkg_name == link_pkg.name:
                        selected_packages.add(link_pkg)
                        break
        packages = selected_packages

    for pkg in packages:
        if hasattr(pkg, 'dist_name'):
            pkg_dist = pkg.dist_name
        else:
            pkg = strip_channel(pkg)
            pkg_dist = pkg.split(' ')[0]
        pkg_loc = find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist, m, files_only=require_files)

        # ran through all pkgs_dirs, and did not find package or folder.  Download it.
        # TODO: this is a vile hack reaching into conda's internals. Replace with
        #    proper conda API when available.
        if not pkg_loc and conda_43:
            try:
                pkg_record = [_ for _ in index if _.dist_name == pkg_dist][0]
                # the conda 4.4 API uses a single `link_prefs` kwarg
                # whereas conda 4.3 used `index` and `link_dists` kwargs
                pfe = ProgressiveFetchExtract(link_prefs=(index[pkg_record],))
            except TypeError:
                # TypeError: __init__() got an unexpected keyword argument 'link_prefs'
                pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index)
            with utils.LoggingContext():
                pfe.execute()
            for pkg_dir in pkgs_dirs:
                _loc = os.path.join(pkg_dir, index[pkg].fn)
                if os.path.isfile(_loc):
                    pkg_loc = _loc
                    break
        pkg_files[pkg] = pkg_loc, pkg_dist

    return pkg_files
Ejemplo n.º 5
0
def testing_index(request):
    with capture():
        index, index_ts = get_build_index(config=Config(debug=False,
                                                        verbose=False),
                                          subdir=subdir,
                                          clear_cache=True)
    return index
Ejemplo n.º 6
0
def collect_tasks(path,
                  folders,
                  matrix_base_dir,
                  channels=None,
                  steps=0,
                  test=False,
                  max_downstream=5,
                  variant_config_files=None,
                  platform_filters=None):
    # runs = ['test']
    # not testing means build and test
    # if not test:
    #     runs.insert(0, 'build')
    runs = ['build']

    task_graph = nx.DiGraph()
    config = conda_build.api.Config()
    platform_filters = ensure_list(platform_filters) if platform_filters else [
        '*'
    ]
    for run in runs:
        platforms = parse_platforms(matrix_base_dir, run, platform_filters)
        # loop over platforms here because each platform may have different dependencies
        # each platform will be submitted with a different label
        for platform in platforms:
            index_key = '-'.join([platform['platform'], str(platform['arch'])])
            config.channel_urls = channels or []
            config.variant_config_files = variant_config_files or []
            conda_resolve = Resolve(
                get_build_index(subdir=index_key,
                                bldpkgs_dir=config.bldpkgs_dir)[0])
            # this graph is potentially different for platform and for build or test mode ("run")
            g = construct_graph(path,
                                worker=platform,
                                folders=folders,
                                run=run,
                                matrix_base_dir=matrix_base_dir,
                                conda_resolve=conda_resolve,
                                config=config)
            # Apply the build label to any nodes that need (re)building or testing
            expand_run(g,
                       conda_resolve=conda_resolve,
                       worker=platform,
                       run=run,
                       steps=steps,
                       max_downstream=max_downstream,
                       recipes_dir=path,
                       matrix_base_dir=matrix_base_dir)
            # merge this graph with the main one
            task_graph = nx.compose(task_graph, g)
    return task_graph
Ejemplo n.º 7
0
def get_env_dependencies(m, env, variant, exclude_pattern=None):
    dash_or_under = re.compile("[-_]")
    index, index_ts = get_build_index(
        m.config, getattr(m.config, "{}_subdir".format(env)))
    specs = [ms.spec for ms in m.ms_depends(env)]
    # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x
    if env == 'build':
        specs = [
            spec.replace(' x.x', ' {}'.format(variant.get('numpy', "")))
            for spec in specs
        ]
    subpackages = []
    dependencies = []
    pass_through_deps = []
    # ones that get filtered from actual versioning, to exclude them from the hash calculation
    for spec in specs:
        if not exclude_pattern or not exclude_pattern.match(spec):
            is_subpackage = False
            spec_name = spec.split()[0]
            for entry in m.get_section('outputs'):
                name = entry.get('name')
                if name == spec_name:
                    subpackages.append(' '.join((name, m.version())))
                    is_subpackage = True
            if not is_subpackage:
                dependencies.append(spec)
            for key, value in variant.items():
                if dash_or_under.sub("",
                                     key) == dash_or_under.sub("", spec_name):
                    dependencies.append(" ".join((spec_name, value)))
        elif exclude_pattern.match(spec):
            pass_through_deps.append(spec.split(' ')[0])
    random_string = ''.join(
        random.choice(string.ascii_uppercase + string.digits)
        for _ in range(10))
    dependencies = list(set(dependencies))
    with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir:
        try:
            actions = environ.get_install_actions(tmpdir,
                                                  index,
                                                  dependencies,
                                                  m.config,
                                                  timestamp=index_ts)
        except UnsatisfiableError as e:
            # we'll get here if the environment is unsatisfiable
            raise DependencyNeedsBuildingError(e)

    specs = actions_to_pins(actions)
    return specs + subpackages + pass_through_deps, actions
Ejemplo n.º 8
0
def get_env_dependencies(m, env, variant, index=None, exclude_pattern=None):
    dash_or_under = re.compile("[-_]")
    if not index:
        index = get_build_index(m.config,
                                getattr(m.config, "{}_subdir".format(env)))
    specs = [ms.spec for ms in m.ms_depends(env)]
    # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x
    if env == 'build':
        specs = [
            spec.replace(' x.x', ' {}'.format(variant.get('numpy', "")))
            for spec in specs
        ]
    subpackages = []
    dependencies = []
    # ones that get filtered from actual versioning, to exclude them from the hash calculation
    append_specs = []
    for spec in specs:
        if not exclude_pattern or not exclude_pattern.match(spec):
            is_subpackage = False
            spec_name = spec.split()[0]
            for entry in m.get_section('outputs'):
                name = entry.get('name')
                if name == spec_name:
                    subpackages.append(' '.join((name, m.version())))
                    is_subpackage = True
            if not is_subpackage:
                dependencies.append(spec)
            for key, value in variant.items():
                if dash_or_under.sub("",
                                     key) == dash_or_under.sub("", spec_name):
                    dependencies.append(" ".join((spec_name, value)))
        else:
            append_specs.append(spec)
    prefix = m.config.host_prefix if env == 'host' else m.config.build_prefix
    try:
        actions = environ.get_install_actions(prefix, index, dependencies,
                                              m.config)
    except UnsatisfiableError as e:
        # we'll get here if the environment is unsatisfiable
        raise DependencyNeedsBuildingError(e)

    specs = actions_to_pins(actions)
    return specs + subpackages + append_specs
Ejemplo n.º 9
0
def get_install_actions(prefix,
                        specs,
                        env,
                        retries=0,
                        subdir=None,
                        verbose=True,
                        debug=False,
                        locking=True,
                        bldpkgs_dirs=None,
                        timeout=900,
                        disable_pip=False,
                        max_env_retry=3,
                        output_folder=None,
                        channel_urls=None):
    global cached_actions
    global last_index_ts
    actions = {}
    log = utils.get_logger(__name__)
    conda_log_level = logging.WARN
    specs = list(specs)
    if specs:
        specs.extend(create_default_packages)
    if verbose or debug:
        capture = contextlib.contextmanager(lambda: (yield))
        if debug:
            conda_log_level = logging.DEBUG
    else:
        capture = utils.capture
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    bldpkgs_dirs = ensure_list(bldpkgs_dirs)

    index, index_ts, _ = get_build_index(subdir,
                                         list(bldpkgs_dirs)[0],
                                         output_folder=output_folder,
                                         channel_urls=channel_urls,
                                         debug=debug,
                                         verbose=verbose,
                                         locking=locking,
                                         timeout=timeout)
    specs = tuple(
        utils.ensure_valid_spec(spec) for spec in specs
        if not str(spec).endswith('@'))

    if ((specs, env, subdir, channel_urls, disable_pip) in cached_actions
            and last_index_ts >= index_ts):
        actions = cached_actions[(specs, env, subdir, channel_urls,
                                  disable_pip)].copy()
        if "PREFIX" in actions:
            actions['PREFIX'] = prefix
    elif specs:
        # this is hiding output like:
        #    Fetching package metadata ...........
        #    Solving package specifications: ..........
        with utils.LoggingContext(conda_log_level):
            with capture():
                try:
                    actions = install_actions(prefix, index, specs, force=True)
                except (NoPackagesFoundError, UnsatisfiableError) as exc:
                    raise DependencyNeedsBuildingError(exc, subdir=subdir)
                except (SystemExit, PaddingError, LinkError,
                        DependencyNeedsBuildingError, CondaError,
                        AssertionError, BuildLockError) as exc:
                    if 'lock' in str(exc):
                        log.warn(
                            "failed to get install actions, retrying.  exception was: %s",
                            str(exc))
                    elif ('requires a minimum conda version' in str(exc)
                          or 'link a source that does not' in str(exc)
                          or isinstance(exc, AssertionError)):
                        locks = utils.get_conda_operation_locks(
                            locking, bldpkgs_dirs, timeout)
                        with utils.try_acquire_locks(locks, timeout=timeout):
                            pkg_dir = str(exc)
                            folder = 0
                            while os.path.dirname(
                                    pkg_dir) not in pkgs_dirs and folder < 20:
                                pkg_dir = os.path.dirname(pkg_dir)
                                folder += 1
                            log.warn(
                                "I think conda ended up with a partial extraction for %s. "
                                "Removing the folder and retrying", pkg_dir)
                            if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                    if retries < max_env_retry:
                        log.warn(
                            "failed to get install actions, retrying.  exception was: %s",
                            str(exc))
                        actions = get_install_actions(
                            prefix,
                            tuple(specs),
                            env,
                            retries=retries + 1,
                            subdir=subdir,
                            verbose=verbose,
                            debug=debug,
                            locking=locking,
                            bldpkgs_dirs=tuple(bldpkgs_dirs),
                            timeout=timeout,
                            disable_pip=disable_pip,
                            max_env_retry=max_env_retry,
                            output_folder=output_folder,
                            channel_urls=tuple(channel_urls))
                    else:
                        log.error(
                            "Failed to get install actions, max retries exceeded."
                        )
                        raise
        if disable_pip:
            for pkg in ('pip', 'setuptools', 'wheel'):
                # specs are the raw specifications, not the conda-derived actual specs
                #   We're testing that pip etc. are manually specified
                if not any(
                        re.match(r'^%s(?:$|[\s=].*)' % pkg, str(dep))
                        for dep in specs):
                    actions['LINK'] = [
                        spec for spec in actions['LINK'] if spec.name != pkg
                    ]
        utils.trim_empty_keys(actions)
        cached_actions[(specs, env, subdir, channel_urls,
                        disable_pip)] = actions.copy()
        last_index_ts = index_ts
    return actions
Ejemplo n.º 10
0
def create_env(prefix,
               specs,
               config,
               subdir,
               clear_cache=True,
               retry=0,
               index=None,
               locks=None):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if config.debug:
        utils.get_logger("conda_build").setLevel(logging.DEBUG)
        external_logger_context = utils.LoggingContext(logging.DEBUG)
    else:
        utils.get_logger("conda_build").setLevel(logging.INFO)
        external_logger_context = utils.LoggingContext(logging.ERROR)

    with external_logger_context:
        log = utils.get_logger(__name__)

        if os.path.isdir(prefix):
            utils.rm_rf(prefix)

        specs = list(set(specs))
        for feature, value in feature_list:
            if value:
                specs.append('%s@' % feature)

        if specs:  # Don't waste time if there is nothing to do
            log.debug("Creating environment in %s", prefix)
            log.debug(str(specs))

            with utils.path_prepended(prefix):
                if not locks:
                    locks = utils.get_conda_operation_locks(config)
                try:
                    with utils.try_acquire_locks(locks,
                                                 timeout=config.timeout):
                        if not index:
                            index = get_build_index(config=config,
                                                    subdir=subdir)
                        actions = get_install_actions(prefix, index, specs,
                                                      config)
                        plan.display_actions(actions, index)
                        if utils.on_win:
                            for k, v in os.environ.items():
                                os.environ[k] = str(v)
                        plan.execute_actions(actions,
                                             index,
                                             verbose=config.debug)
                except (SystemExit, PaddingError, LinkError,
                        DependencyNeedsBuildingError, CondaError) as exc:
                    if (("too short in" in str(exc) or re.search(
                            'post-link failed for: (?:[a-zA-Z]*::)?openssl',
                            str(exc)) or isinstance(exc, PaddingError))
                            and config.prefix_length > 80):
                        if config.prefix_length_fallback:
                            log.warn(
                                "Build prefix failed with prefix length %d",
                                config.prefix_length)
                            log.warn("Error was: ")
                            log.warn(str(exc))
                            log.warn(
                                "One or more of your package dependencies needs to be rebuilt "
                                "with a longer prefix length.")
                            log.warn(
                                "Falling back to legacy prefix length of 80 characters."
                            )
                            log.warn(
                                "Your package will not install into prefixes > 80 characters."
                            )
                            config.prefix_length = 80

                            # Set this here and use to create environ
                            #   Setting this here is important because we use it below (symlink)
                            prefix = config.build_prefix

                            create_env(prefix,
                                       specs,
                                       config=config,
                                       subdir=subdir,
                                       clear_cache=clear_cache)
                        else:
                            raise
                    elif 'lock' in str(exc):
                        if retry < config.max_env_retry:
                            log.warn(
                                "failed to create env, retrying.  exception was: %s",
                                str(exc))
                            create_env(prefix,
                                       specs,
                                       config=config,
                                       subdir=subdir,
                                       clear_cache=clear_cache,
                                       retry=retry + 1)
                    elif ('requires a minimum conda version' in str(exc)
                          or 'link a source that does not' in str(exc)):
                        with utils.try_acquire_locks(locks,
                                                     timeout=config.timeout):
                            pkg_dir = str(exc)
                            folder = 0
                            while os.path.dirname(
                                    pkg_dir) not in pkgs_dirs and folder < 20:
                                pkg_dir = os.path.dirname(pkg_dir)
                                folder += 1
                            log.warn(
                                "I think conda ended up with a partial extraction for %s.  "
                                "Removing the folder and retrying", pkg_dir)
                            if os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                        if retry < config.max_env_retry:
                            log.warn(
                                "failed to create env, retrying.  exception was: %s",
                                str(exc))
                            create_env(prefix,
                                       specs,
                                       config=config,
                                       subdir=subdir,
                                       clear_cache=clear_cache,
                                       retry=retry + 1)
                        else:
                            log.error(
                                "Failed to create env, max retries exceeded.")
                            raise
                    else:
                        raise
                # HACK: some of the time, conda screws up somehow and incomplete packages result.
                #    Just retry.
                except (AssertionError, IOError, ValueError, RuntimeError,
                        LockError) as exc:
                    if isinstance(exc, AssertionError):
                        with utils.try_acquire_locks(locks,
                                                     timeout=config.timeout):
                            pkg_dir = os.path.dirname(os.path.dirname(
                                str(exc)))
                            log.warn(
                                "I think conda ended up with a partial extraction for %s.  "
                                "Removing the folder and retrying", pkg_dir)
                            if os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                    if retry < config.max_env_retry:
                        log.warn(
                            "failed to create env, retrying.  exception was: %s",
                            str(exc))
                        create_env(prefix,
                                   specs,
                                   config=config,
                                   subdir=subdir,
                                   clear_cache=clear_cache,
                                   retry=retry + 1)
                    else:
                        log.error(
                            "Failed to create env, max retries exceeded.")
                        raise
    if utils.on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Ejemplo n.º 11
0
def render_recipe(recipe_path,
                  config,
                  no_download_source=False,
                  variants=None,
                  permit_unsatisfiable_variants=True,
                  reset_build_id=True,
                  bypass_env_check=False):
    """Returns a list of tuples, each consisting of

    (metadata-object, needs_download, needs_render_in_env)

    You get one tuple per variant.  Outputs are not factored in here (subpackages won't affect these
    results returned here.)
    """
    arg = recipe_path
    # Don't use byte literals for paths in Python 2
    if not PY3:
        arg = arg.decode(getpreferredencoding() or 'utf-8')
    if isfile(arg):
        if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
            recipe_dir = tempfile.mkdtemp()
            t = tarfile.open(arg, 'r:*')
            t.extractall(path=recipe_dir)
            t.close()
            need_cleanup = True
        elif arg.endswith('.yaml'):
            recipe_dir = os.path.dirname(arg)
            need_cleanup = False
        else:
            print("Ignoring non-recipe: %s" % arg)
            return None, None
    else:
        recipe_dir = abspath(arg)
        need_cleanup = False

    if not isdir(recipe_dir):
        sys.exit("Error: no such directory: %s" % recipe_dir)

    try:
        m = MetaData(recipe_dir, config=config)
    except exceptions.YamlParsingError as e:
        sys.stderr.write(e.error_msg())
        sys.exit(1)

    rendered_metadata = {}

    # important: set build id *before* downloading source.  Otherwise source goes into a different
    #    build folder.
    if config.set_build_id:
        m.config.compute_build_id(m.name(), reset=reset_build_id)

    # this source may go into a folder that doesn't match the eventual build folder.
    #   There's no way around it AFAICT.  We must download the source to be able to render
    #   the recipe (from anything like GIT_FULL_HASH), but we can't know the final build
    #   folder until rendering is complete, because package names can have variant jinja2 in them.
    if m.needs_source_for_render and (not os.path.isdir(m.config.work_dir) or
                                      len(os.listdir(m.config.work_dir)) == 0):
        try_download(m, no_download_source=no_download_source)

    if m.final:
        if not hasattr(m.config, 'variants'):
            m.config.variants = [m.config.variant]
        rendered_metadata = [
            (m, False, False),
        ]
    else:
        index, index_ts = get_build_index(m.config.build_subdir,
                                          bldpkgs_dir=m.config.bldpkgs_dir,
                                          output_folder=m.config.output_folder,
                                          channel_urls=m.config.channel_urls,
                                          debug=m.config.debug,
                                          verbose=m.config.verbose,
                                          locking=m.config.locking,
                                          timeout=m.config.timeout)
        # when building, we don't want to fully expand all outputs into metadata, only expand
        #    whatever variants we have.
        variants = (dict_of_lists_to_list_of_dicts(variants)
                    if variants else get_package_variants(m))
        rendered_metadata = distribute_variants(
            m,
            variants,
            permit_unsatisfiable_variants=permit_unsatisfiable_variants,
            allow_no_other_outputs=True,
            bypass_env_check=bypass_env_check)

    if need_cleanup:
        utils.rm_rf(recipe_dir)

    return rendered_metadata
Ejemplo n.º 12
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""
    additional_specs = []
    linked_packages = actions.get('LINK', [])
    # edit the plan to download all necessary packages
    for key in ('LINK', 'EXTRACT', 'UNLINK'):
        if key in actions:
            del actions[key]
    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly

    index, index_ts = get_build_index(getattr(m.config,
                                              '{}_subdir'.format(env)),
                                      bldpkgs_dir=m.config.bldpkgs_dir,
                                      output_folder=m.config.output_folder,
                                      channel_urls=m.config.channel_urls,
                                      debug=m.config.debug,
                                      verbose=m.config.verbose,
                                      locking=m.config.locking,
                                      timeout=m.config.timeout)

    if actions:
        execute_actions(actions, index, verbose=m.config.debug)
        ignore_list = utils.ensure_list(
            m.get_value('build/ignore_run_exports'))

        _pkgs_dirs = pkgs_dirs + list(m.config.bldpkgs_dirs)
        for pkg in linked_packages:
            for pkgs_dir in _pkgs_dirs:
                if hasattr(pkg, 'dist_name'):
                    pkg_dist = pkg.dist_name
                else:
                    pkg = strip_channel(pkg)
                    pkg_dist = pkg.split(' ')[0]

                pkg_dir = os.path.join(pkgs_dir, pkg_dist)
                pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2')
                if os.path.isdir(pkg_dir):
                    downstream_file = os.path.join(pkg_dir, 'info/run_exports')
                    if os.path.isfile(downstream_file):
                        specs = open(downstream_file).read().splitlines()
                        additional_specs.extend(
                            _filter_run_exports(specs, ignore_list))
                    break
                elif os.path.isfile(pkg_file):
                    extra_specs = utils.package_has_file(
                        pkg_file, 'info/run_exports')
                    if extra_specs:
                        # exclude packages pinning themselves (makes no sense)
                        extra_specs = [
                            spec for spec in extra_specs.splitlines()
                            if not spec.startswith(pkg_dist.rsplit('-', 2)[0])
                        ]
                        additional_specs.extend(
                            _filter_run_exports(extra_specs, ignore_list))
                    break
                elif utils.conda_43():
                    # TODO: this is a vile hack reaching into conda's internals. Replace with
                    #    proper conda API when available.
                    try:
                        try:
                            # the conda 4.4 API uses a single `link_prefs` kwarg
                            # whereas conda 4.3 used `index` and `link_dists` kwargs
                            pfe = ProgressiveFetchExtract(
                                link_prefs=(index[pkg], ))
                        except TypeError:
                            # TypeError: __init__() got an unexpected keyword argument 'link_prefs'
                            pfe = ProgressiveFetchExtract(link_dists=[pkg],
                                                          index=index)
                        with utils.LoggingContext():
                            pfe.execute()
                        for pkgs_dir in _pkgs_dirs:
                            pkg_file = os.path.join(pkgs_dir,
                                                    pkg.dist_name + '.tar.bz2')
                            if os.path.isfile(pkg_file):
                                extra_specs = utils.package_has_file(
                                    pkg_file, 'info/run_exports')
                                if extra_specs:
                                    specs = extra_specs.splitlines()
                                    additional_specs.extend(
                                        _filter_run_exports(
                                            specs, ignore_list))
                                break
                        break
                    except KeyError:
                        raise DependencyNeedsBuildingError(packages=[pkg.name])
            else:
                raise RuntimeError(
                    "Didn't find expected package {} in package cache ({})".
                    format(pkg_dist, _pkgs_dirs))
    return additional_specs
Ejemplo n.º 13
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""

    # this attribute is added in the first pass of finalize_outputs_pass
    extract_pattern = r'(.*)package:'
    template_string = '\n'.join((
        m.get_recipe_text(extract_pattern=extract_pattern,
                          force_top_level=True),
        # second item: the requirements text for this particular metadata
        #    object (might be output)
        m.extract_requirements_text())).rstrip()
    raw_specs = {}
    if template_string:
        raw_specs = yaml.safe_load(
            m._get_contents(permit_undefined_jinja=False,
                            template_string=template_string)) or {}

    env_specs = utils.expand_reqs(raw_specs.get('requirements',
                                                {})).get(env, [])
    explicit_specs = [req.split(' ')[0]
                      for req in env_specs] if env_specs else []
    linked_packages = actions.get('LINK', [])
    linked_packages = [
        pkg for pkg in linked_packages if pkg.name in explicit_specs
    ]

    # edit the plan to download all necessary packages
    for key in ('LINK', 'EXTRACT', 'UNLINK'):
        if key in actions:
            del actions[key]
    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly

    index, index_ts = get_build_index(getattr(m.config,
                                              '{}_subdir'.format(env)),
                                      bldpkgs_dir=m.config.bldpkgs_dir,
                                      output_folder=m.config.output_folder,
                                      channel_urls=m.config.channel_urls,
                                      debug=m.config.debug,
                                      verbose=m.config.verbose,
                                      locking=m.config.locking,
                                      timeout=m.config.timeout)
    if 'FETCH' in actions or 'EXTRACT' in actions:
        # this is to force the download
        execute_actions(actions, index, verbose=m.config.debug)
    ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports'))

    _pkgs_dirs = pkgs_dirs + list(m.config.bldpkgs_dirs)
    additional_specs = {}
    for pkg in linked_packages:
        pkg_loc = None
        if hasattr(pkg, 'dist_name'):
            pkg_dist = pkg.dist_name
        else:
            pkg = strip_channel(pkg)
            pkg_dist = pkg.split(' ')[0]
        for pkgs_dir in _pkgs_dirs:
            pkg_dir = os.path.join(pkgs_dir, pkg_dist)
            pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2')

            if os.path.isdir(pkg_dir):
                pkg_loc = pkg_dir
                break
            elif os.path.isfile(pkg_file):
                pkg_loc = pkg_file
                break

        # ran through all pkgs_dirs, and did not find package or folder.  Download it.
        # TODO: this is a vile hack reaching into conda's internals. Replace with
        #    proper conda API when available.
        if not pkg_loc and conda_43:
            try:
                # the conda 4.4 API uses a single `link_prefs` kwarg
                # whereas conda 4.3 used `index` and `link_dists` kwargs
                pfe = ProgressiveFetchExtract(link_prefs=(index[pkg], ))
            except TypeError:
                # TypeError: __init__() got an unexpected keyword argument 'link_prefs'
                pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index)
            with utils.LoggingContext():
                pfe.execute()
            for pkg_dir in pkgs_dirs:
                _loc = os.path.join(pkg_dir, index[pkg].fn)
                if os.path.isfile(_loc):
                    pkg_loc = _loc
                    break

        specs = {}
        if os.path.isdir(pkg_loc):
            downstream_file = os.path.join(pkg_dir, 'info/run_exports')
            if os.path.isfile(downstream_file):
                with open(downstream_file) as f:
                    specs = {'weak': [spec.rstrip() for spec in f.readlines()]}
            # a later attempt: record more info in the yaml file, to support "strong" run exports
            elif os.path.isfile(downstream_file + '.yaml'):
                with open(downstream_file + '.yaml') as f:
                    specs = yaml.safe_load(f)
        elif os.path.isfile(pkg_file):
            legacy_specs = utils.package_has_file(pkg_file, 'info/run_exports')
            specs_yaml = utils.package_has_file(pkg_file,
                                                'info/run_exports.yaml')
            if specs:
                # exclude packages pinning themselves (makes no sense)
                specs = {
                    'weak': [
                        spec.rstrip() for spec in legacy_specs.splitlines()
                        if not spec.startswith(pkg_dist.rsplit('-', 2)[0])
                    ]
                }
            elif specs_yaml:
                specs = yaml.safe_load(specs_yaml)

        additional_specs = utils.merge_dicts_of_lists(
            additional_specs, _filter_run_exports(specs, ignore_list))
    return additional_specs
Ejemplo n.º 14
0
def finalize_metadata(m):
    """Fully render a recipe.  Fill in versions for build/host dependencies."""
    index, index_ts = get_build_index(m.config, m.config.build_subdir)

    exclude_pattern = None
    excludes = set(m.config.variant.get('ignore_version', []))

    for key in m.config.variant.get('pin_run_as_build', {}).keys():
        if key in excludes:
            excludes.remove(key)

    output_excludes = set()
    if hasattr(m, 'other_outputs'):
        output_excludes = set(name
                              for (name, variant) in m.other_outputs.keys())

    if excludes or output_excludes:
        exclude_pattern = re.compile('|'.join('(?:^{}(?:\s|$|\Z))'.format(exc)
                                              for exc in excludes
                                              | output_excludes))

    build_reqs = m.meta.get('requirements', {}).get('build', [])
    # if python is in the build specs, but doesn't have a specific associated
    #    version, make sure to add one
    if build_reqs and 'python' in build_reqs:
        build_reqs.append('python {}'.format(m.config.variant['python']))
        m.meta['requirements']['build'] = build_reqs

    # if we have host deps, they're more important than the build deps.
    build_deps, build_actions = get_env_dependencies(m, 'build',
                                                     m.config.variant,
                                                     exclude_pattern)
    # optimization: we don't need the index after here, and copying them takes a lot of time.
    rendered_metadata = m.copy()

    extra_run_specs = get_upstream_pins(m, build_actions, index)

    reset_index = False
    if m.is_cross:
        host_reqs = m.get_value('requirements/host')
        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one
        if host_reqs and 'python' in host_reqs:
            host_reqs.append('python {}'.format(m.config.variant['python']))
            m.meta['requirements']['host'] = host_reqs
        host_deps, host_actions = get_env_dependencies(m, 'host',
                                                       m.config.variant,
                                                       exclude_pattern)
        extra_run_specs += get_upstream_pins(m, host_actions, index)
    else:
        host_deps = []

    # IMPORTANT: due to the statefulness of conda's index, this index invalidates the earlier one!
    #    To avoid confusion, any index passed around is always the native build platform.
    if reset_index:
        index = None

    # here's where we pin run dependencies to their build time versions.  This happens based
    #     on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
    #     names to have this behavior.
    requirements = rendered_metadata.meta.get('requirements', {})
    run_deps = requirements.get('run', [])
    if output_excludes:
        exclude_pattern = re.compile('|'.join('(?:^{}(?:\s|$|\Z))'.format(exc)
                                              for exc in output_excludes))
    pinning_env = 'host' if m.is_cross else 'build'
    full_build_deps, _ = get_env_dependencies(m,
                                              pinning_env,
                                              m.config.variant,
                                              exclude_pattern=exclude_pattern)
    full_build_dep_versions = {
        dep.split()[0]: " ".join(dep.split()[1:])
        for dep in full_build_deps
    }
    versioned_run_deps = [
        get_pin_from_build(m, dep, full_build_dep_versions) for dep in run_deps
    ]
    versioned_run_deps.extend(extra_run_specs)

    for _env, values in (('build', build_deps), ('host', host_deps),
                         ('run', versioned_run_deps)):
        if values:
            requirements[_env] = list({strip_channel(dep) for dep in values})
    rendered_metadata.meta['requirements'] = requirements

    test_deps = rendered_metadata.get_value('test/requires')
    if test_deps:
        versioned_test_deps = list({
            get_pin_from_build(m, dep, full_build_dep_versions)
            for dep in test_deps
        })
        rendered_metadata.meta['test']['requires'] = versioned_test_deps

    # if source/path is relative, then the output package makes no sense at all.  The next
    #   best thing is to hard-code the absolute path.  This probably won't exist on any
    #   system other than the original build machine, but at least it will work there.
    if m.meta.get('source'):
        if 'path' in m.meta['source'] and not os.path.isabs(
                m.meta['source']['path']):
            rendered_metadata.meta['source']['path'] = os.path.normpath(
                os.path.join(m.path, m.meta['source']['path']))
        elif ('git_url' in m.meta['source'] and not (
                # absolute paths are not relative paths
                os.path.isabs(m.meta['source']['git_url']) or
                # real urls are not relative paths
                ":" in m.meta['source']['git_url'])):
            rendered_metadata.meta['source']['git_url'] = os.path.normpath(
                os.path.join(m.path, m.meta['source']['git_url']))

    if not rendered_metadata.meta.get('build'):
        rendered_metadata.meta['build'] = {}
    # hard-code build string so that any future "renderings" can't go wrong based on user env
    rendered_metadata.meta['build']['string'] = rendered_metadata.build_id()

    rendered_metadata.final = True
    rendered_metadata.config.index = index
    return rendered_metadata
Ejemplo n.º 15
0
def get_install_actions(prefix, specs, env, retries=0, subdir=None,
                        verbose=True, debug=False, locking=True,
                        bldpkgs_dirs=None, timeout=90, disable_pip=False,
                        max_env_retry=3, output_folder=None, channel_urls=None):
    global cached_actions
    global last_index_ts
    actions = {}
    log = utils.get_logger(__name__)
    conda_log_level = logging.WARN
    if verbose:
        capture = contextlib.contextmanager(lambda: (yield))
    elif debug:
        capture = contextlib.contextmanager(lambda: (yield))
        conda_log_level = logging.DEBUG
    else:
        capture = utils.capture
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    bldpkgs_dirs = ensure_list(bldpkgs_dirs)

    index, index_ts = get_build_index(subdir, list(bldpkgs_dirs)[0], output_folder=output_folder,
                                      channel_urls=channel_urls, debug=debug, verbose=verbose,
                                      locking=locking, timeout=timeout)
    specs = tuple(_ensure_valid_spec(spec) for spec in specs)

    if (specs, env, subdir, channel_urls) in cached_actions and last_index_ts >= index_ts:
        actions = cached_actions[(specs, env, subdir, channel_urls)].copy()
        if "PREFIX" in actions:
            actions['PREFIX'] = prefix
    elif specs:
        # this is hiding output like:
        #    Fetching package metadata ...........
        #    Solving package specifications: ..........
        with utils.LoggingContext(conda_log_level):
            with capture():
                try:
                    actions = install_actions(prefix, index, specs, force=True)
                except NoPackagesFoundError as exc:
                    raise DependencyNeedsBuildingError(exc, subdir=subdir)
                except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError,
                        CondaError, AssertionError) as exc:
                    if 'lock' in str(exc):
                        log.warn("failed to get install actions, retrying.  exception was: %s",
                                str(exc))
                    elif ('requires a minimum conda version' in str(exc) or
                            'link a source that does not' in str(exc) or
                            isinstance(exc, AssertionError)):
                        locks = utils.get_conda_operation_locks(locking, bldpkgs_dirs, timeout)
                        with utils.try_acquire_locks(locks, timeout=timeout):
                            pkg_dir = str(exc)
                            folder = 0
                            while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20:
                                pkg_dir = os.path.dirname(pkg_dir)
                                folder += 1
                            log.warn("I think conda ended up with a partial extraction for %s. "
                                        "Removing the folder and retrying", pkg_dir)
                            if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                    if retries < max_env_retry:
                        log.warn("failed to get install actions, retrying.  exception was: %s",
                                str(exc))
                        actions = get_install_actions(prefix, tuple(specs), env,
                                                      retries=retries + 1,
                                                      subdir=subdir,
                                                      verbose=verbose,
                                                      debug=debug,
                                                      locking=locking,
                                                      bldpkgs_dirs=tuple(bldpkgs_dirs),
                                                      timeout=timeout,
                                                      disable_pip=disable_pip,
                                                      max_env_retry=max_env_retry,
                                                      output_folder=output_folder,
                                                      channel_urls=tuple(channel_urls))
                    else:
                        log.error("Failed to get install actions, max retries exceeded.")
                        raise
        if disable_pip:
            actions['LINK'] = [spec for spec in actions['LINK']
                                if not spec.startswith('pip-') and
                                not spec.startswith('setuptools-')]
        utils.trim_empty_keys(actions)
        cached_actions[(specs, env, subdir, channel_urls)] = actions.copy()
        last_index_ts = index_ts
    return actions
Ejemplo n.º 16
0
def create_env(prefix,
               specs_or_actions,
               env,
               config,
               subdir,
               clear_cache=True,
               retry=0,
               locks=None,
               is_cross=False,
               is_conda=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if config.debug:
        external_logger_context = utils.LoggingContext(logging.DEBUG)
    else:
        external_logger_context = utils.LoggingContext(logging.WARN)

    if os.path.exists(prefix):
        for entry in glob(os.path.join(prefix, "*")):
            utils.rm_rf(entry)

    with external_logger_context:
        log = utils.get_logger(__name__)

        # if os.path.isdir(prefix):
        #     utils.rm_rf(prefix)

        if specs_or_actions:  # Don't waste time if there is nothing to do
            log.debug("Creating environment in %s", prefix)
            log.debug(str(specs_or_actions))

            if not locks:
                locks = utils.get_conda_operation_locks(config)
            try:
                with utils.try_acquire_locks(locks, timeout=config.timeout):
                    # input is a list - it's specs in MatchSpec format
                    if not hasattr(specs_or_actions, 'keys'):
                        specs = list(set(specs_or_actions))
                        actions = get_install_actions(
                            prefix,
                            tuple(specs),
                            env,
                            subdir=subdir,
                            verbose=config.verbose,
                            debug=config.debug,
                            locking=config.locking,
                            bldpkgs_dirs=tuple(config.bldpkgs_dirs),
                            timeout=config.timeout,
                            disable_pip=config.disable_pip,
                            max_env_retry=config.max_env_retry,
                            output_folder=config.output_folder,
                            channel_urls=tuple(config.channel_urls))
                    else:
                        actions = specs_or_actions
                    index, _, _ = get_build_index(
                        subdir=subdir,
                        bldpkgs_dir=config.bldpkgs_dir,
                        output_folder=config.output_folder,
                        channel_urls=config.channel_urls,
                        debug=config.debug,
                        verbose=config.verbose,
                        locking=config.locking,
                        timeout=config.timeout)
                    utils.trim_empty_keys(actions)
                    display_actions(actions, index)
                    if utils.on_win:
                        for k, v in os.environ.items():
                            os.environ[k] = str(v)
                    with env_var('CONDA_QUIET', not config.verbose,
                                 reset_context):
                        with env_var('CONDA_JSON', not config.verbose,
                                     reset_context):
                            execute_actions(actions, index)
            except (SystemExit, PaddingError, LinkError,
                    DependencyNeedsBuildingError, CondaError,
                    BuildLockError) as exc:
                if (("too short in" in str(exc) or re.search(
                        'post-link failed for: (?:[a-zA-Z]*::)?openssl',
                        str(exc)) or isinstance(exc, PaddingError))
                        and config.prefix_length > 80):
                    if config.prefix_length_fallback:
                        log.warn("Build prefix failed with prefix length %d",
                                 config.prefix_length)
                        log.warn("Error was: ")
                        log.warn(str(exc))
                        log.warn(
                            "One or more of your package dependencies needs to be rebuilt "
                            "with a longer prefix length.")
                        log.warn(
                            "Falling back to legacy prefix length of 80 characters."
                        )
                        log.warn(
                            "Your package will not install into prefixes > 80 characters."
                        )
                        config.prefix_length = 80

                        host = '_h_env' in prefix
                        # Set this here and use to create environ
                        #   Setting this here is important because we use it below (symlink)
                        prefix = config.host_prefix if host else config.build_prefix
                        actions['PREFIX'] = prefix

                        create_env(prefix,
                                   actions,
                                   config=config,
                                   subdir=subdir,
                                   env=env,
                                   clear_cache=clear_cache,
                                   is_cross=is_cross)
                    else:
                        raise
                elif 'lock' in str(exc):
                    if retry < config.max_env_retry:
                        log.warn(
                            "failed to create env, retrying.  exception was: %s",
                            str(exc))
                        create_env(prefix,
                                   specs_or_actions,
                                   config=config,
                                   subdir=subdir,
                                   env=env,
                                   clear_cache=clear_cache,
                                   retry=retry + 1,
                                   is_cross=is_cross)
                elif ('requires a minimum conda version' in str(exc)
                      or 'link a source that does not' in str(exc)):
                    with utils.try_acquire_locks(locks,
                                                 timeout=config.timeout):
                        pkg_dir = str(exc)
                        folder = 0
                        while os.path.dirname(
                                pkg_dir) not in pkgs_dirs and folder < 20:
                            pkg_dir = os.path.dirname(pkg_dir)
                            folder += 1
                        log.warn(
                            "I think conda ended up with a partial extraction for %s.  "
                            "Removing the folder and retrying", pkg_dir)
                        if os.path.isdir(pkg_dir):
                            utils.rm_rf(pkg_dir)
                    if retry < config.max_env_retry:
                        log.warn(
                            "failed to create env, retrying.  exception was: %s",
                            str(exc))
                        create_env(prefix,
                                   specs_or_actions,
                                   config=config,
                                   subdir=subdir,
                                   env=env,
                                   clear_cache=clear_cache,
                                   retry=retry + 1,
                                   is_cross=is_cross)
                    else:
                        log.error(
                            "Failed to create env, max retries exceeded.")
                        raise
                else:
                    raise
            # HACK: some of the time, conda screws up somehow and incomplete packages result.
            #    Just retry.
            except (AssertionError, IOError, ValueError, RuntimeError,
                    LockError) as exc:
                if isinstance(exc, AssertionError):
                    with utils.try_acquire_locks(locks,
                                                 timeout=config.timeout):
                        pkg_dir = os.path.dirname(os.path.dirname(str(exc)))
                        log.warn(
                            "I think conda ended up with a partial extraction for %s.  "
                            "Removing the folder and retrying", pkg_dir)
                        if os.path.isdir(pkg_dir):
                            utils.rm_rf(pkg_dir)
                if retry < config.max_env_retry:
                    log.warn(
                        "failed to create env, retrying.  exception was: %s",
                        str(exc))
                    create_env(prefix,
                               specs_or_actions,
                               config=config,
                               subdir=subdir,
                               env=env,
                               clear_cache=clear_cache,
                               retry=retry + 1,
                               is_cross=is_cross)
                else:
                    log.error("Failed to create env, max retries exceeded.")
                    raise
Ejemplo n.º 17
0
def render_recipe(recipe_path,
                  config,
                  no_download_source=False,
                  variants=None,
                  permit_unsatisfiable_variants=True,
                  reset_build_id=True):
    """Returns a list of tuples, each consisting of

    (metadata-object, needs_download, needs_render_in_env)

    You get one tuple per variant.  Outputs are not factored in here (subpackages won't affect these
    results returned here.)
    """
    arg = recipe_path
    # Don't use byte literals for paths in Python 2
    if not PY3:
        arg = arg.decode(getpreferredencoding() or 'utf-8')
    if isfile(arg):
        if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
            recipe_dir = tempfile.mkdtemp()
            t = tarfile.open(arg, 'r:*')
            t.extractall(path=recipe_dir)
            t.close()
            need_cleanup = True
        elif arg.endswith('.yaml'):
            recipe_dir = os.path.dirname(arg)
            need_cleanup = False
        else:
            print("Ignoring non-recipe: %s" % arg)
            return None, None
    else:
        recipe_dir = abspath(arg)
        need_cleanup = False

    if not isdir(recipe_dir):
        sys.exit("Error: no such directory: %s" % recipe_dir)

    try:
        m = MetaData(recipe_dir, config=config)
    except exceptions.YamlParsingError as e:
        sys.stderr.write(e.error_msg())
        sys.exit(1)

    if config.set_build_id:
        m.config.compute_build_id(m.name(), reset=reset_build_id)

    if m.needs_source_for_render and (not os.path.isdir(m.config.work_dir) or
                                      len(os.listdir(m.config.work_dir)) == 0):
        try_download(m, no_download_source=no_download_source)

    rendered_metadata = {}

    if m.final:
        rendered_metadata = [
            (m, False, False),
        ]

    else:
        index, index_ts = get_build_index(m.config, m.config.build_subdir)
        # when building, we don't want to fully expand all outputs into metadata, only expand
        #    whatever variants we have.
        variants = (dict_of_lists_to_list_of_dicts(variants)
                    if variants else get_package_variants(m))
        rendered_metadata = distribute_variants(
            m,
            variants,
            permit_unsatisfiable_variants=permit_unsatisfiable_variants,
            stub_subpackages=True)

    if need_cleanup:
        utils.rm_rf(recipe_dir)

    return rendered_metadata
Ejemplo n.º 18
0
def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, retry=0,
               locks=None, is_cross=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if config.debug:
        external_logger_context = utils.LoggingContext(logging.DEBUG)
    else:
        external_logger_context = utils.LoggingContext(logging.ERROR)

    with external_logger_context:
        log = utils.get_logger(__name__)

        # if os.path.isdir(prefix):
        #     utils.rm_rf(prefix)

        if specs_or_actions:  # Don't waste time if there is nothing to do
            log.debug("Creating environment in %s", prefix)
            log.debug(str(specs_or_actions))

            with utils.path_prepended(prefix):
                if not locks:
                    locks = utils.get_conda_operation_locks(config)
                try:
                    with utils.try_acquire_locks(locks, timeout=config.timeout):
                        # input is a list - it's specs in MatchSpec format
                        if not hasattr(specs_or_actions, 'keys'):
                            specs = list(set(specs_or_actions))
                            actions = get_install_actions(prefix, tuple(specs), env,
                                                          subdir=subdir,
                                                          verbose=config.verbose,
                                                          debug=config.debug,
                                                          locking=config.locking,
                                                          bldpkgs_dirs=tuple(config.bldpkgs_dirs),
                                                          timeout=config.timeout,
                                                          disable_pip=config.disable_pip,
                                                          max_env_retry=config.max_env_retry,
                                                          output_folder=config.output_folder,
                                                          channel_urls=tuple(config.channel_urls))
                        else:
                            actions = specs_or_actions
                        index, index_ts = get_build_index(subdir=subdir,
                                                        bldpkgs_dir=config.bldpkgs_dir,
                                                        output_folder=config.output_folder,
                                                        channel_urls=config.channel_urls,
                                                        debug=config.debug,
                                                        verbose=config.verbose,
                                                        locking=config.locking,
                                                        timeout=config.timeout)
                        utils.trim_empty_keys(actions)
                        display_actions(actions, index)
                        if utils.on_win:
                            for k, v in os.environ.items():
                                os.environ[k] = str(v)
                        execute_actions(actions, index, verbose=config.debug)
                except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError,
                        CondaError) as exc:
                    if (("too short in" in str(exc) or
                            re.search('post-link failed for: (?:[a-zA-Z]*::)?openssl', str(exc)) or
                            isinstance(exc, PaddingError)) and
                            config.prefix_length > 80):
                        if config.prefix_length_fallback:
                            log.warn("Build prefix failed with prefix length %d",
                                     config.prefix_length)
                            log.warn("Error was: ")
                            log.warn(str(exc))
                            log.warn("One or more of your package dependencies needs to be rebuilt "
                                    "with a longer prefix length.")
                            log.warn("Falling back to legacy prefix length of 80 characters.")
                            log.warn("Your package will not install into prefixes > 80 characters.")
                            config.prefix_length = 80

                            # Set this here and use to create environ
                            #   Setting this here is important because we use it below (symlink)
                            prefix = config.build_prefix
                            actions['PREFIX'] = prefix

                            create_env(prefix, actions, config=config, subdir=subdir, env=env,
                                       clear_cache=clear_cache, is_cross=is_cross)
                        else:
                            raise
                    elif 'lock' in str(exc):
                        if retry < config.max_env_retry:
                            log.warn("failed to create env, retrying.  exception was: %s", str(exc))
                            create_env(prefix, actions, config=config, subdir=subdir, env=env,
                                    clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross)
                    elif ('requires a minimum conda version' in str(exc) or
                          'link a source that does not' in str(exc)):
                        with utils.try_acquire_locks(locks, timeout=config.timeout):
                            pkg_dir = str(exc)
                            folder = 0
                            while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20:
                                pkg_dir = os.path.dirname(pkg_dir)
                                folder += 1
                            log.warn("I think conda ended up with a partial extraction for %s.  "
                                     "Removing the folder and retrying", pkg_dir)
                            if os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                        if retry < config.max_env_retry:
                            log.warn("failed to create env, retrying.  exception was: %s", str(exc))
                            create_env(prefix, actions, config=config, subdir=subdir, env=env,
                                       clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross)
                        else:
                            log.error("Failed to create env, max retries exceeded.")
                            raise
                    else:
                        raise
                # HACK: some of the time, conda screws up somehow and incomplete packages result.
                #    Just retry.
                except (AssertionError, IOError, ValueError, RuntimeError, LockError) as exc:
                    if isinstance(exc, AssertionError):
                        with utils.try_acquire_locks(locks, timeout=config.timeout):
                            pkg_dir = os.path.dirname(os.path.dirname(str(exc)))
                            log.warn("I think conda ended up with a partial extraction for %s.  "
                                     "Removing the folder and retrying", pkg_dir)
                            if os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                    if retry < config.max_env_retry:
                        log.warn("failed to create env, retrying.  exception was: %s", str(exc))
                        create_env(prefix, actions, config=config, subdir=subdir, env=env,
                                   clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross)
                    else:
                        log.error("Failed to create env, max retries exceeded.")
                        raise
    # We must not symlink conda across different platforms when cross-compiling.
    #  On second thought, I think we must, because activating the host env does
    #     the symlink for us anyway, and when activate does it, we end up with
    #     conda symlinks in every package. =()
    # if os.path.basename(prefix) == '_build_env' or not is_cross:
    if utils.on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Ejemplo n.º 19
0
def finalize_metadata(m, index=None):
    """Fully render a recipe.  Fill in versions for build dependencies."""
    if not index:
        index = get_build_index(m.config, m.config.build_subdir)

    exclude_pattern = None
    excludes = m.config.variant.get('exclude_from_build_hash', [])
    if excludes:
        exclude_pattern = re.compile('|'.join('(?:^{}(?:\s|$|\Z))'.format(exc) for exc in excludes))

    # these are obtained from a sort of dry-run of conda.  These are the actual packages that would
    #     be installed in the environment.

    build_deps = get_env_dependencies(m, 'build', m.config.variant, index, exclude_pattern)
    # optimization: we don't need the index after here, and copying them takes a lot of time.
    rendered_metadata = m.copy()
    build_dep_versions = {dep.split()[0]: " ".join(dep.split()[1:]) for dep in build_deps}

    extra_run_specs = get_upstream_pins(m, build_deps, index)

    reset_index = False
    if m.config.build_subdir != m.config.host_subdir:
        index = get_build_index(m.config, m.config.host_subdir)
        reset_index = True

    # IMPORTANT: due to the statefulness of conda's index, this index invalidates the earlier one!
    #    To avoid confusion, any index passed around is always the native build platform.
    if reset_index:
        index = None

    # here's where we pin run dependencies to their build time versions.  This happens based
    #     on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
    #     names to have this behavior.
    requirements = rendered_metadata.meta.get('requirements', {})
    run_deps = requirements.get('run', [])
    versioned_run_deps = [get_pin_from_build(m, dep, build_dep_versions) for dep in run_deps]
    versioned_run_deps.extend(extra_run_specs)

    rendered_metadata.meta['requirements'] = rendered_metadata.meta.get('requirements', {})
    for env, values in (('build', build_deps), ('run', versioned_run_deps)):
        if values:
            requirements[env] = list({strip_channel(dep) for dep in values})
    rendered_metadata.meta['requirements'] = requirements

    test_deps = rendered_metadata.get_value('test/requires')
    if test_deps:
        versioned_test_deps = list({get_pin_from_build(m, dep, build_dep_versions)
                                    for dep in test_deps})
        rendered_metadata.meta['test']['requires'] = versioned_test_deps

    # if source/path is relative, then the output package makes no sense at all.  The next
    #   best thing is to hard-code the absolute path.  This probably won't exist on any
    #   system other than the original build machine, but at least it will work there.
    if m.meta.get('source'):
        if 'path' in m.meta['source'] and not os.path.isabs(m.meta['source']['path']):
            rendered_metadata.meta['source']['path'] = os.path.normpath(
                os.path.join(m.path, m.meta['source']['path']))
        elif ('git_url' in m.meta['source'] and not (
                # absolute paths are not relative paths
                os.path.isabs(m.meta['source']['git_url']) or
                # real urls are not relative paths
                ":" in m.meta['source']['git_url'])):
            rendered_metadata.meta['source']['git_url'] = os.path.normpath(
                os.path.join(m.path, m.meta['source']['git_url']))

    if not rendered_metadata.meta.get('build'):
        rendered_metadata.meta['build'] = {}
    # hard-code build string so that any future "renderings" can't go wrong based on user env
    rendered_metadata.meta['build']['string'] = rendered_metadata.build_id()

    rendered_metadata.final = True
    rendered_metadata.config.index = index
    return rendered_metadata
Ejemplo n.º 20
0
def collect_tasks(path,
                  folders,
                  matrix_base_dir,
                  channels=None,
                  steps=0,
                  test=False,
                  max_downstream=5,
                  variant_config_files=None,
                  platform_filters=None,
                  clobber_sections_file=None,
                  append_sections_file=None,
                  pass_throughs=None,
                  skip_existing=True,
                  build_config_vars={}):
    """ Return a graph of build tasks """
    task_graph = nx.DiGraph()
    parsed_cli_args = _parse_python_numpy_from_pass_throughs(pass_throughs)
    config = conda_build.api.Config(
        clobber_sections_file=clobber_sections_file,
        append_sections_file=append_sections_file,
        skip_existing=skip_existing,
        **parsed_cli_args,
    )
    platform_filters = ensure_list(platform_filters) if platform_filters else [
        '*'
    ]
    platforms = parse_platforms(matrix_base_dir, platform_filters,
                                build_config_vars)
    # loop over platforms here because each platform may have different dependencies
    # each platform will be submitted with a different label
    for platform in platforms:
        subdir = f"{platform['platform']}-{platform['arch']}"
        config.variants = get_package_variants(path, config,
                                               platform.get('variants'))
        config.channel_urls = channels or []
        config.variant_config_files = variant_config_files or []
        conda_resolve = Resolve(
            get_build_index(subdir=subdir,
                            bldpkgs_dir=config.bldpkgs_dir,
                            channel_urls=channels)[0])
        # this graph is potentially different for platform and for build or test mode ("run")
        graph = construct_graph(
            path,
            worker=platform,
            folders=folders,
            run="build",
            matrix_base_dir=matrix_base_dir,
            conda_resolve=conda_resolve,
            config=config,
        )
        # Apply the build label to any nodes that need (re)building or testing
        expand_run(
            graph,
            config=config.copy(),
            conda_resolve=conda_resolve,
            worker=platform,
            run="build",
            steps=steps,
            max_downstream=max_downstream,
            recipes_dir=path,
            matrix_base_dir=matrix_base_dir,
        )
        # merge this graph with the main one
        task_graph = nx.compose(task_graph, graph)
    collapse_noarch_python_nodes(task_graph)
    return task_graph
Ejemplo n.º 21
0
def execute_download_actions(m,
                             actions,
                             env,
                             package_subset=None,
                             require_files=False):
    index, _, _ = get_build_index(getattr(m.config, '{}_subdir'.format(env)),
                                  bldpkgs_dir=m.config.bldpkgs_dir,
                                  output_folder=m.config.output_folder,
                                  channel_urls=m.config.channel_urls,
                                  debug=m.config.debug,
                                  verbose=m.config.verbose,
                                  locking=m.config.locking,
                                  timeout=m.config.timeout)

    # this should be just downloading packages.  We don't need to extract them -

    download_actions = {
        k: v
        for k, v in actions.items() if k in ('FETCH', 'EXTRACT', 'PREFIX')
    }
    if 'FETCH' in actions or 'EXTRACT' in actions:
        # this is to force the download
        execute_actions(download_actions, index, verbose=m.config.debug)

    pkg_files = {}

    packages = actions.get('LINK', [])
    package_subset = utils.ensure_list(package_subset)
    selected_packages = set()
    if package_subset:
        for pkg in package_subset:
            if hasattr(pkg, 'name'):
                if pkg in packages:
                    selected_packages.add(pkg)
            else:
                pkg_name = pkg.split()[0]
                for link_pkg in packages:
                    if pkg_name == link_pkg.name:
                        selected_packages.add(link_pkg)
                        break
        packages = selected_packages

    for pkg in packages:
        if hasattr(pkg, 'dist_name'):
            pkg_dist = pkg.dist_name
        else:
            pkg = strip_channel(pkg)
            pkg_dist = pkg.split(' ')[0]
        pkg_loc = find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist,
                                                    m,
                                                    files_only=require_files)

        # ran through all pkgs_dirs, and did not find package or folder.  Download it.
        # TODO: this is a vile hack reaching into conda's internals. Replace with
        #    proper conda API when available.
        if not pkg_loc and conda_43:
            try:
                pkg_record = [_ for _ in index if _.dist_name == pkg_dist][0]
                # the conda 4.4 API uses a single `link_prefs` kwarg
                # whereas conda 4.3 used `index` and `link_dists` kwargs
                pfe = ProgressiveFetchExtract(link_prefs=(index[pkg_record], ))
            except TypeError:
                # TypeError: __init__() got an unexpected keyword argument 'link_prefs'
                pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index)
            with utils.LoggingContext():
                pfe.execute()
            for pkg_dir in pkgs_dirs:
                _loc = os.path.join(pkg_dir, index[pkg].fn)
                if os.path.isfile(_loc):
                    pkg_loc = _loc
                    break
        pkg_files[pkg] = pkg_loc, pkg_dist

    return pkg_files
Ejemplo n.º 22
0
def create_env(prefix,
               specs_or_actions,
               env,
               config,
               subdir,
               clear_cache=True,
               retry=0,
               locks=None,
               is_cross=False,
               always_include_files=[]):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if config.debug:
        external_logger_context = utils.LoggingContext(logging.DEBUG)
    else:
        external_logger_context = utils.LoggingContext(logging.ERROR)

    with external_logger_context:
        log = utils.get_logger(__name__)

        # if os.path.isdir(prefix):
        #     utils.rm_rf(prefix)

        if specs_or_actions:  # Don't waste time if there is nothing to do
            log.debug("Creating environment in %s", prefix)
            log.debug(str(specs_or_actions))

            with utils.path_prepended(prefix):
                if not locks:
                    locks = utils.get_conda_operation_locks(config)
                try:
                    with utils.try_acquire_locks(locks,
                                                 timeout=config.timeout):
                        # input is a list - it's specs in MatchSpec format
                        if not hasattr(specs_or_actions, 'keys'):
                            specs = list(set(specs_or_actions))
                            actions = get_install_actions(
                                prefix,
                                tuple(specs),
                                env,
                                subdir=subdir,
                                verbose=config.verbose,
                                debug=config.debug,
                                locking=config.locking,
                                bldpkgs_dirs=tuple(config.bldpkgs_dirs),
                                timeout=config.timeout,
                                disable_pip=config.disable_pip,
                                max_env_retry=config.max_env_retry,
                                output_folder=config.output_folder,
                                channel_urls=tuple(config.channel_urls))
                        else:
                            actions = specs_or_actions
                        index, index_ts = get_build_index(
                            subdir=subdir,
                            bldpkgs_dir=config.bldpkgs_dir,
                            output_folder=config.output_folder,
                            channel_urls=config.channel_urls,
                            debug=config.debug,
                            verbose=config.verbose,
                            locking=config.locking,
                            timeout=config.timeout)
                        utils.trim_empty_keys(actions)
                        display_actions(actions, index)
                        if utils.on_win:
                            for k, v in os.environ.items():
                                os.environ[k] = str(v)
                        execute_actions(actions, index, verbose=config.debug)
                except (SystemExit, PaddingError, LinkError,
                        DependencyNeedsBuildingError, CondaError) as exc:
                    if (("too short in" in str(exc) or re.search(
                            'post-link failed for: (?:[a-zA-Z]*::)?openssl',
                            str(exc)) or isinstance(exc, PaddingError))
                            and config.prefix_length > 80):
                        if config.prefix_length_fallback:
                            log.warn(
                                "Build prefix failed with prefix length %d",
                                config.prefix_length)
                            log.warn("Error was: ")
                            log.warn(str(exc))
                            log.warn(
                                "One or more of your package dependencies needs to be rebuilt "
                                "with a longer prefix length.")
                            log.warn(
                                "Falling back to legacy prefix length of 80 characters."
                            )
                            log.warn(
                                "Your package will not install into prefixes > 80 characters."
                            )
                            config.prefix_length = 80

                            # Set this here and use to create environ
                            #   Setting this here is important because we use it below (symlink)
                            prefix = config.build_prefix
                            actions['PREFIX'] = prefix

                            create_env(prefix,
                                       actions,
                                       config=config,
                                       subdir=subdir,
                                       env=env,
                                       clear_cache=clear_cache,
                                       is_cross=is_cross)
                        else:
                            raise
                    elif 'lock' in str(exc):
                        if retry < config.max_env_retry:
                            log.warn(
                                "failed to create env, retrying.  exception was: %s",
                                str(exc))
                            create_env(prefix,
                                       actions,
                                       config=config,
                                       subdir=subdir,
                                       env=env,
                                       clear_cache=clear_cache,
                                       retry=retry + 1,
                                       is_cross=is_cross)
                    elif ('requires a minimum conda version' in str(exc)
                          or 'link a source that does not' in str(exc)):
                        with utils.try_acquire_locks(locks,
                                                     timeout=config.timeout):
                            pkg_dir = str(exc)
                            folder = 0
                            while os.path.dirname(
                                    pkg_dir) not in pkgs_dirs and folder < 20:
                                pkg_dir = os.path.dirname(pkg_dir)
                                folder += 1
                            log.warn(
                                "I think conda ended up with a partial extraction for %s.  "
                                "Removing the folder and retrying", pkg_dir)
                            if os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                        if retry < config.max_env_retry:
                            log.warn(
                                "failed to create env, retrying.  exception was: %s",
                                str(exc))
                            create_env(prefix,
                                       actions,
                                       config=config,
                                       subdir=subdir,
                                       env=env,
                                       clear_cache=clear_cache,
                                       retry=retry + 1,
                                       is_cross=is_cross)
                        else:
                            log.error(
                                "Failed to create env, max retries exceeded.")
                            raise
                    else:
                        raise
                # HACK: some of the time, conda screws up somehow and incomplete packages result.
                #    Just retry.
                except (AssertionError, IOError, ValueError, RuntimeError,
                        LockError) as exc:
                    if isinstance(exc, AssertionError):
                        with utils.try_acquire_locks(locks,
                                                     timeout=config.timeout):
                            pkg_dir = os.path.dirname(os.path.dirname(
                                str(exc)))
                            log.warn(
                                "I think conda ended up with a partial extraction for %s.  "
                                "Removing the folder and retrying", pkg_dir)
                            if os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                    if retry < config.max_env_retry:
                        log.warn(
                            "failed to create env, retrying.  exception was: %s",
                            str(exc))
                        create_env(prefix,
                                   actions,
                                   config=config,
                                   subdir=subdir,
                                   env=env,
                                   clear_cache=clear_cache,
                                   retry=retry + 1,
                                   is_cross=is_cross)
                    else:
                        log.error(
                            "Failed to create env, max retries exceeded.")
                        raise
    # We must not symlink conda across different platforms when cross-compiling.
    #  On second thought, I think we must, because activating the host env does
    #     the symlink for us anyway, and when activate does it, we end up with
    #     conda symlinks in every package. =()
    # if os.path.basename(prefix) == '_build_env' or not is_cross:

    # Hack, do not SYMLINK_CONDA when we're building conda.
    if not any(include in ('bin/deactivate', 'Scripts/deactivate.bat')
               for include in always_include_files):
        if utils.on_win:
            shell = "cmd.exe"
        else:
            shell = "bash"
            symlink_conda(prefix, sys.prefix, shell)