Exemple #1
0
def get_env_dependencies(m,
                         env,
                         variant,
                         exclude_pattern=None,
                         permit_unsatisfiable_variants=False,
                         merge_build_host_on_same_platform=True):
    specs = m.get_depends_top_and_out(env)
    # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x
    if env in ('build', 'host'):
        no_xx_specs = []
        for spec in specs:
            if ' x.x' in spec:
                pkg_name = spec.split()[0]
                no_xx_specs.append(' '.join(
                    (pkg_name, variant.get(pkg_name, ""))))
            else:
                no_xx_specs.append(spec)
        specs = no_xx_specs

    subpackages, dependencies, pass_through_deps = _categorize_deps(
        m, specs, exclude_pattern, variant)

    dependencies = set(dependencies)
    unsat = None
    random_string = ''.join(
        random.choice(string.ascii_uppercase + string.digits)
        for _ in range(10))
    with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir:
        try:
            actions = environ.get_install_actions(
                tmpdir,
                tuple(dependencies),
                env,
                subdir=getattr(m.config, '{}_subdir'.format(env)),
                debug=m.config.debug,
                verbose=m.config.verbose,
                locking=m.config.locking,
                bldpkgs_dirs=tuple(m.config.bldpkgs_dirs),
                timeout=m.config.timeout,
                disable_pip=m.config.disable_pip,
                max_env_retry=m.config.max_env_retry,
                output_folder=m.config.output_folder,
                channel_urls=tuple(m.config.channel_urls))
        except (UnsatisfiableError, DependencyNeedsBuildingError) as e:
            # we'll get here if the environment is unsatisfiable
            if hasattr(e, 'packages'):
                unsat = ', '.join(e.packages)
            else:
                unsat = e.message
            if permit_unsatisfiable_variants:
                actions = {}
            else:
                raise

    specs = actions_to_pins(actions)
    return (utils.ensure_list(
        (specs + subpackages + pass_through_deps)
        or m.meta.get('requirements', {}).get(env, [])), actions, unsat)
Exemple #2
0
def get_env_dependencies(m, env, variant, exclude_pattern=None):
    dash_or_under = re.compile("[-_]")
    index, index_ts = get_build_index(
        m.config, getattr(m.config, "{}_subdir".format(env)))
    specs = [ms.spec for ms in m.ms_depends(env)]
    # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x
    if env == 'build':
        specs = [
            spec.replace(' x.x', ' {}'.format(variant.get('numpy', "")))
            for spec in specs
        ]
    subpackages = []
    dependencies = []
    pass_through_deps = []
    # ones that get filtered from actual versioning, to exclude them from the hash calculation
    for spec in specs:
        if not exclude_pattern or not exclude_pattern.match(spec):
            is_subpackage = False
            spec_name = spec.split()[0]
            for entry in m.get_section('outputs'):
                name = entry.get('name')
                if name == spec_name:
                    subpackages.append(' '.join((name, m.version())))
                    is_subpackage = True
            if not is_subpackage:
                dependencies.append(spec)
            for key, value in variant.items():
                if dash_or_under.sub("",
                                     key) == dash_or_under.sub("", spec_name):
                    dependencies.append(" ".join((spec_name, value)))
        elif exclude_pattern.match(spec):
            pass_through_deps.append(spec.split(' ')[0])
    random_string = ''.join(
        random.choice(string.ascii_uppercase + string.digits)
        for _ in range(10))
    dependencies = list(set(dependencies))
    with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir:
        try:
            actions = environ.get_install_actions(tmpdir,
                                                  index,
                                                  dependencies,
                                                  m.config,
                                                  timestamp=index_ts)
        except UnsatisfiableError as e:
            # we'll get here if the environment is unsatisfiable
            raise DependencyNeedsBuildingError(e)

    specs = actions_to_pins(actions)
    return specs + subpackages + pass_through_deps, actions
Exemple #3
0
def get_env_dependencies(m, env, variant, exclude_pattern=None,
                         permit_unsatisfiable_variants=False,
                         merge_build_host_on_same_platform=True):
    specs = m.get_depends_top_and_out(env)
    # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x
    if env in ('build', 'host'):
        no_xx_specs = []
        for spec in specs:
            if ' x.x' in spec:
                pkg_name = spec.split()[0]
                no_xx_specs.append(' '.join((pkg_name, variant.get(pkg_name, ""))))
            else:
                no_xx_specs.append(spec)
        specs = no_xx_specs

    subpackages, dependencies, pass_through_deps = _categorize_deps(m, specs, exclude_pattern, variant)

    dependencies = set(dependencies)
    unsat = None
    random_string = ''.join(random.choice(string.ascii_uppercase + string.digits)
                            for _ in range(10))
    with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir:
        try:
            actions = environ.get_install_actions(tmpdir, tuple(dependencies), env,
                                                  subdir=getattr(m.config, '{}_subdir'.format(env)),
                                                  debug=m.config.debug,
                                                  verbose=m.config.verbose,
                                                  locking=m.config.locking,
                                                  bldpkgs_dirs=tuple(m.config.bldpkgs_dirs),
                                                  timeout=m.config.timeout,
                                                  disable_pip=m.config.disable_pip,
                                                  max_env_retry=m.config.max_env_retry,
                                                  output_folder=m.config.output_folder,
                                                  channel_urls=tuple(m.config.channel_urls))
        except (UnsatisfiableError, DependencyNeedsBuildingError) as e:
            # we'll get here if the environment is unsatisfiable
            if hasattr(e, 'packages'):
                unsat = ', '.join(e.packages)
            else:
                unsat = e.message
            if permit_unsatisfiable_variants:
                actions = {}
            else:
                raise

    specs = actions_to_pins(actions)
    return (utils.ensure_list((specs + subpackages + pass_through_deps) or
                  m.meta.get('requirements', {}).get(env, [])),
            actions, unsat)
Exemple #4
0
def get_env_dependencies(m, env, variant, index=None, exclude_pattern=None):
    dash_or_under = re.compile("[-_]")
    if not index:
        index = get_build_index(m.config,
                                getattr(m.config, "{}_subdir".format(env)))
    specs = [ms.spec for ms in m.ms_depends(env)]
    # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x
    if env == 'build':
        specs = [
            spec.replace(' x.x', ' {}'.format(variant.get('numpy', "")))
            for spec in specs
        ]
    subpackages = []
    dependencies = []
    # ones that get filtered from actual versioning, to exclude them from the hash calculation
    append_specs = []
    for spec in specs:
        if not exclude_pattern or not exclude_pattern.match(spec):
            is_subpackage = False
            spec_name = spec.split()[0]
            for entry in m.get_section('outputs'):
                name = entry.get('name')
                if name == spec_name:
                    subpackages.append(' '.join((name, m.version())))
                    is_subpackage = True
            if not is_subpackage:
                dependencies.append(spec)
            for key, value in variant.items():
                if dash_or_under.sub("",
                                     key) == dash_or_under.sub("", spec_name):
                    dependencies.append(" ".join((spec_name, value)))
        else:
            append_specs.append(spec)
    prefix = m.config.host_prefix if env == 'host' else m.config.build_prefix
    try:
        actions = environ.get_install_actions(prefix, index, dependencies,
                                              m.config)
    except UnsatisfiableError as e:
        # we'll get here if the environment is unsatisfiable
        raise DependencyNeedsBuildingError(e)

    specs = actions_to_pins(actions)
    return specs + subpackages + append_specs
Exemple #5
0
def get_upstream_pins(m, dependencies, index):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""
    dependencies = [strip_channel(dep) for dep in dependencies]
    random_string = ''.join(random.choice(string.ascii_uppercase + string.digits)
                            for _ in range(10))
    with TemporaryDirectory(suffix=random_string) as tmpdir:
        actions = environ.get_install_actions(tmpdir, index, dependencies,
                                            m.config)
        additional_specs = []
        linked_packages = actions['LINK']
        # edit the plan to download all necessary packages
        for key in ('LINK', 'EXTRACT', 'UNLINK'):
            if key in actions:
                del actions[key]
        # this should be just downloading packages.  We don't need to extract them -
        #    we read contents directly
        if actions:
            plan.execute_actions(actions, index, verbose=m.config.debug)

            pkgs_dirs = cc.pkgs_dirs + list(m.config.bldpkgs_dirs)
            for pkg in linked_packages:
                for pkgs_dir in pkgs_dirs:
                    if hasattr(pkg, 'dist_name'):
                        pkg_dist = pkg.dist_name
                    else:
                        pkg = strip_channel(pkg)
                        pkg_dist = pkg.split(' ')[0]

                    pkg_dir = os.path.join(pkgs_dir, pkg_dist)
                    pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2')
                    if os.path.isdir(pkg_dir):
                        downstream_file = os.path.join(pkg_dir, 'info/pin_downstream')
                        if os.path.isfile(downstream_file):
                            additional_specs.extend(open(downstream_file).read().splitlines())
                        break
                    elif os.path.isfile(pkg_file):
                        extra_specs = utils.package_has_file(pkg_file, 'info/pin_downstream')
                        if extra_specs:
                            additional_specs.extend(extra_specs.splitlines())
                        break
                    elif utils.conda_43():
                        # TODO: this is a vile hack reaching into conda's internals. Replace with
                        #    proper conda API when available.
                        try:
                            pfe = ProgressiveFetchExtract(link_dists=[pkg],
                                                        index=index)
                            pfe.execute()
                            for pkgs_dir in pkgs_dirs:
                                pkg_file = os.path.join(pkgs_dir, pkg.dist_name + '.tar.bz2')
                                if os.path.isfile(pkg_file):
                                    extra_specs = utils.package_has_file(pkg_file,
                                                                        'info/pin_downstream')
                                    if extra_specs:
                                        additional_specs.extend(extra_specs.splitlines())
                                    break
                            break
                        except KeyError:
                            raise DependencyNeedsBuildingError(packages=[pkg.name])
                else:
                    raise RuntimeError("Didn't find expected package {} in package cache ({})"
                                        .format(pkg_dist, pkgs_dirs))

    return additional_specs
Exemple #6
0
def get_upstream_pins(m, dependencies, index):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""
    dependencies = [strip_channel(dep) for dep in dependencies]
    # Add _tmp here to prevent creating the build_prefix too early. This is because, when
    # dirty is set, we skip calling create_env if the folder already exists.
    actions = environ.get_install_actions(m.config.build_prefix[:-4] + "_tmp",
                                          index, dependencies, m.config)
    additional_specs = []
    linked_packages = actions['LINK']
    # edit the plan to download all necessary packages
    if 'LINK' in actions:
        del actions['LINK']
    if 'EXTRACT' in actions:
        del actions['EXTRACT']
    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly
    if actions:
        plan.execute_actions(actions, index, verbose=m.config.debug)

        pkgs_dirs = cc.pkgs_dirs + list(m.config.bldpkgs_dirs)
        for pkg in linked_packages:
            for pkgs_dir in pkgs_dirs:
                if hasattr(pkg, 'dist_name'):
                    pkg_dist = pkg.dist_name
                else:
                    pkg = strip_channel(pkg)
                    pkg_dist = pkg.split(' ')[0]

                pkg_dir = os.path.join(pkgs_dir, pkg_dist)
                pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2')
                if os.path.isdir(pkg_dir):
                    downstream_file = os.path.join(pkg_dir,
                                                   'info/pin_downstream')
                    if os.path.isfile(downstream_file):
                        additional_specs.extend(
                            open(downstream_file).read().splitlines())
                    break
                elif os.path.isfile(pkg_file):
                    extra_specs = utils.package_has_file(
                        pkg_file, 'info/pin_downstream')
                    if extra_specs:
                        additional_specs.extend(extra_specs.splitlines())
                    break
                elif utils.conda_43():
                    # TODO: this is a vile hack reaching into conda's internals. Replace with proper
                    #    conda API when available.
                    try:
                        pfe = ProgressiveFetchExtract(link_dists=[pkg],
                                                      index=index)
                        pfe.execute()
                        for pkgs_dir in pkgs_dirs:
                            pkg_file = os.path.join(pkgs_dir,
                                                    pkg.dist_name + '.tar.bz2')
                            if os.path.isfile(pkg_file):
                                extra_specs = utils.package_has_file(
                                    pkg_file, 'info/pin_downstream')
                                if extra_specs:
                                    additional_specs.extend(
                                        extra_specs.splitlines())
                                break
                        break
                    except KeyError:
                        raise DependencyNeedsBuildingError(packages=[pkg.name])
            else:
                raise RuntimeError(
                    "Didn't find expected package {} in package cache ({})".
                    format(pkg_dist, pkgs_dirs))

    return additional_specs
Exemple #7
0
def get_env_dependencies(m,
                         env,
                         variant,
                         exclude_pattern=None,
                         permit_unsatisfiable_variants=False):
    dash_or_under = re.compile("[-_]")
    specs = [ms.spec for ms in m.ms_depends(env)]
    if env == 'build' and m.is_cross and m.config.build_subdir == m.config.host_subdir:
        specs.extend([ms.spec for ms in m.ms_depends('host')])
    # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x
    if env in ('build', 'host'):
        no_xx_specs = []
        for spec in specs:
            if ' x.x' in spec:
                pkg_name = spec.split()[0]
                no_xx_specs.append(' '.join(
                    (pkg_name, variant.get(pkg_name, ""))))
            else:
                no_xx_specs.append(spec)
        specs = no_xx_specs
    subpackages = []
    dependencies = []
    pass_through_deps = []
    # ones that get filtered from actual versioning, to exclude them from the hash calculation
    for spec in specs:
        if not exclude_pattern or not exclude_pattern.match(spec):
            is_subpackage = False
            spec_name = spec.split()[0]
            for entry in m.get_section('outputs'):
                name = entry.get('name')
                if name == spec_name:
                    subpackages.append(' '.join((name, m.version())))
                    is_subpackage = True
            if not is_subpackage:
                dependencies.append(spec)
            # fill in variant version iff no version at all is provided
            for key, value in variant.items():
                # for sake of comparison, ignore dashes and underscores
                if (dash_or_under.sub("", key) == dash_or_under.sub(
                        "", spec_name) and not re.search(
                            r'%s\s+[0-9a-zA-Z\_\.\<\>\=\*]' % spec_name,
                            spec)):
                    dependencies.append(" ".join((spec_name, value)))
        elif exclude_pattern.match(spec):
            pass_through_deps.append(spec)
    random_string = ''.join(
        random.choice(string.ascii_uppercase + string.digits)
        for _ in range(10))
    dependencies = set(dependencies)
    unsat = None
    with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir:
        try:
            actions = environ.get_install_actions(
                tmpdir,
                tuple(dependencies),
                env,
                subdir=getattr(m.config, '{}_subdir'.format(env)),
                debug=m.config.debug,
                verbose=m.config.verbose,
                locking=m.config.locking,
                bldpkgs_dirs=tuple(m.config.bldpkgs_dirs),
                timeout=m.config.timeout,
                disable_pip=m.config.disable_pip,
                max_env_retry=m.config.max_env_retry,
                output_folder=m.config.output_folder,
                channel_urls=tuple(m.config.channel_urls))
        except (UnsatisfiableError, DependencyNeedsBuildingError) as e:
            # we'll get here if the environment is unsatisfiable
            if hasattr(e, 'packages'):
                unsat = ', '.join(e.packages)
            else:
                unsat = e.message
            if permit_unsatisfiable_variants:
                actions = {}
            else:
                raise

    specs = actions_to_pins(actions)
    return specs + subpackages + pass_through_deps, actions, unsat
Exemple #8
0
def get_env_dependencies(m, env, variant, exclude_pattern=None,
                         permit_unsatisfiable_variants=False,
                         merge_build_host_on_same_platform=True):
    dash_or_under = re.compile("[-_]")
    specs = m.get_depends_top_and_out(env)
    # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x
    if env in ('build', 'host'):
        no_xx_specs = []
        for spec in specs:
            if ' x.x' in spec:
                pkg_name = spec.split()[0]
                no_xx_specs.append(' '.join((pkg_name, variant.get(pkg_name, ""))))
            else:
                no_xx_specs.append(spec)
        specs = no_xx_specs
    subpackages = []
    dependencies = []
    pass_through_deps = []
    # ones that get filtered from actual versioning, to exclude them from the hash calculation
    for spec in specs:
        if not exclude_pattern or not exclude_pattern.match(spec):
            is_subpackage = False
            spec_name = spec.split()[0]
            for entry in m.get_section('outputs'):
                name = entry.get('name')
                if name == spec_name:
                    subpackages.append(' '.join((name, m.version())))
                    is_subpackage = True
            if not is_subpackage:
                dependencies.append(spec)
            # fill in variant version iff no version at all is provided
            for key, value in variant.items():
                # for sake of comparison, ignore dashes and underscores
                if (dash_or_under.sub("", key) == dash_or_under.sub("", spec_name) and
                        not re.search(r'%s\s+[0-9a-zA-Z\_\.\<\>\=\*]' % spec_name, spec)):
                    dependencies.append(" ".join((spec_name, value)))
        elif exclude_pattern.match(spec):
            pass_through_deps.append(spec)
    random_string = ''.join(random.choice(string.ascii_uppercase + string.digits)
                            for _ in range(10))
    dependencies = set(dependencies)
    unsat = None
    with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir:
        try:
            actions = environ.get_install_actions(tmpdir, tuple(dependencies), env,
                                                  subdir=getattr(m.config, '{}_subdir'.format(env)),
                                                  debug=m.config.debug,
                                                  verbose=m.config.verbose,
                                                  locking=m.config.locking,
                                                  bldpkgs_dirs=tuple(m.config.bldpkgs_dirs),
                                                  timeout=m.config.timeout,
                                                  disable_pip=m.config.disable_pip,
                                                  max_env_retry=m.config.max_env_retry,
                                                  output_folder=m.config.output_folder,
                                                  channel_urls=tuple(m.config.channel_urls))
        except (UnsatisfiableError, DependencyNeedsBuildingError) as e:
            # we'll get here if the environment is unsatisfiable
            if hasattr(e, 'packages'):
                unsat = ', '.join(e.packages)
            else:
                unsat = e.message
            if permit_unsatisfiable_variants:
                actions = {}
            else:
                raise

    specs = actions_to_pins(actions)
    return (utils.ensure_list((specs + subpackages + pass_through_deps) or
                  m.meta.get('requirements', {}).get(env, [])),
            actions, unsat)