Esempio n. 1
0
def clone_bundle(path, prefix=None, bundle_name=None):
    """
    Clone the bundle (located at `path`) by creating a new environment at
    `prefix` (unless prefix is None or the prefix directory already exists)
    """
    try:
        t = tarfile.open(path, 'r:*')
        meta = json.load(t.extractfile('info/index.json'))
    except tarfile.ReadError:
        raise RuntimeError('bad tar archive: %s' % path)
    except KeyError:
        raise RuntimeError("no archive 'info/index.json' in: %s" % (path))

    if prefix and not isdir(prefix):
        for m in t.getmembers():
            if m.path.startswith((BDP, 'info/')):
                continue
            t.extract(m, path=prefix)
        dists = discard_conda('-'.join(s.split())
                              for s in meta.get('depends', []))
        actions = plan.ensure_linked_actions(dists, prefix)
        index = get_index()
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=True)

    bundle_dir = abspath(expanduser('~/bundles/%s' %
                                    (bundle_name or meta.get('bundle_name'))))
    for m in t.getmembers():
        if m.path.startswith(BDP):
            targetpath = join(bundle_dir, m.path[len(BDP):])
            t._extract_member(m, targetpath)

    t.close()
Esempio n. 2
0
def create_env(prefix, specs, clear_cache=True):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    specs = list(specs)
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    for d in config.bldpkgs_dirs:
        if not isdir(d):
            os.makedirs(d)
        update_index(d)
    if specs:  # Don't waste time if there is nothing to do
        index = get_build_index(clear_cache=True)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=verbose)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
Esempio n. 3
0
def create_env(prefix,
               specs,
               clear_cache=True,
               verbose=True,
               channel_urls=(),
               override_channels=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if not isdir(config.bldpkgs_dir):
        os.makedirs(config.bldpkgs_dir)
    update_index(config.bldpkgs_dir)
    if specs:  # Don't waste time if there is nothing to do
        index = get_build_index(clear_cache=True,
                                channel_urls=channel_urls,
                                override_channels=override_channels)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=verbose)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
Esempio n. 4
0
def clone_bundle(path, prefix):
    """
    Clone the bundle (located at `path`) by creating a new environment at
    `prefix`.
    The directory `path` is located in should be some temp directory or
    some other directory OUTSITE /opt/anaconda (this function handles
    copying the of the file if necessary for you).  After calling this
    funtion, the original file (at `path`) may be removed.
    """
    assert not abspath(path).startswith(abspath(config.root_dir))
    assert not isdir(prefix)
    fn = basename(path)
    assert re.match(r'share-[0-9a-f]{40}-\d+\.tar\.bz2$', fn), fn
    dist = fn[:-8]

    if not install.is_extracted(config.pkgs_dir, dist):
        shutil.copyfile(path, join(config.pkgs_dir, dist + '.tar.bz2'))
        plan.execute_plan(['%s %s' % (plan.EXTRACT, dist)])
    assert install.is_extracted(config.pkgs_dir, dist)

    with open(join(config.pkgs_dir, dist, 'info', 'index.json')) as fi:
        meta = json.load(fi)

    # for backwards compatibility, use "requires" when "depends" is not there
    dists = ['-'.join(r.split())
             for r in meta.get('depends', meta.get('requires'))
             if not r.startswith('conda ')]
    dists.append(dist)

    actions = plan.ensure_linked_actions(dists, prefix)
    index = get_index()
    plan.display_actions(actions, index)
    plan.execute_actions(actions, index, verbose=True)

    os.unlink(join(prefix, 'conda-meta', dist + '.json'))
Esempio n. 5
0
def create_env(prefix, specs, clear_cache=True):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    specs = list(specs)
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    for d in config.bldpkgs_dirs:
        if not isdir(d):
            os.makedirs(d)
        update_index(d)
    if specs:  # Don't waste time if there is nothing to do
        index = get_build_index(clear_cache=True)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=verbose)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
    if on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Esempio n. 6
0
def create_env(prefix, specs, clear_cache=True, verbose=True, channel_urls=(),
    override_channels=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if not isdir(config.bldpkgs_dir):
        os.makedirs(config.bldpkgs_dir)
    update_index(config.bldpkgs_dir)
    if specs: # Don't waste time if there is nothing to do
        if clear_cache:
            # remove the cache such that a refetch is made,
            # this is necessary because we add the local build repo URL
            fetch_index.cache = {}
        index = get_index(channel_urls=[url_path(config.croot)] + list(channel_urls),
            prepend=not override_channels)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=verbose)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
Esempio n. 7
0
def clone_bundle(path, prefix=None, bundle_name=None):
    """
    Clone the bundle (located at `path`) by creating a new environment at
    `prefix` (unless prefix is None or the prefix directory already exists)
    """
    try:
        t = tarfile.open(path, 'r:*')
        meta = json.load(t.extractfile('info/index.json'))
    except tarfile.ReadError:
        raise RuntimeError('bad tar archive: %s' % path)
    except KeyError:
        raise RuntimeError("no archive 'info/index.json' in: %s" % (path))

    if prefix and not isdir(prefix):
        for m in t.getmembers():
            if m.path.startswith((BDP, 'info/')):
                continue
            t.extract(m, path=prefix)
        dists = discard_conda('-'.join(s.split())
                              for s in meta.get('depends', []))
        actions = plan.ensure_linked_actions(dists, prefix)
        index = get_index()
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=True)

    bundle_dir = abspath(
        expanduser('~/bundles/%s' % (bundle_name or meta.get('bundle_name'))))
    for m in t.getmembers():
        if m.path.startswith(BDP):
            targetpath = join(bundle_dir, m.path[len(BDP):])
            t._extract_member(m, targetpath)

    t.close()
Esempio n. 8
0
def create_env(prefix,
               specs,
               clear_cache=True,
               verbose=True,
               channel_urls=(),
               override_channels=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if not isdir(config.bldpkgs_dir):
        os.makedirs(config.bldpkgs_dir)
    update_index(config.bldpkgs_dir)
    if specs:  # Don't waste time if there is nothing to do
        if clear_cache:
            # remove the cache such that a refetch is made,
            # this is necessary because we add the local build repo URL
            fetch_index.cache = {}
        index = get_index(channel_urls=[url_path(config.croot)] +
                          list(channel_urls),
                          prepend=not override_channels)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=verbose)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
Esempio n. 9
0
def execute(args, parser):
    import sys

    import conda.plan as plan
    from conda.api import get_index
    from conda.cli import pscheck
    from conda.install import rm_rf, linked

    if not (args.all or args.package_names):
        sys.exit('Error: no package names supplied,\n'
                 '       try "conda remove -h" for more details')

    prefix = common.get_prefix(args)
    common.check_write('remove', prefix)

    index = None
    if args.features:
        channel_urls = args.channel or ()

        common.ensure_override_channels_requires_channel(args)
        index = get_index(channel_urls=channel_urls,
                          prepend=not args.override_channels)
        features = set(args.package_names)
        actions = plan.remove_features_actions(prefix, index, features)

    elif args.all:
        if plan.is_root_prefix(prefix):
            sys.exit('Error: cannot remove root environment,\n'
                     '       add -n NAME or -p PREFIX option')

        actions = {plan.PREFIX: prefix,
                   plan.UNLINK: sorted(linked(prefix))}

    else:
        specs = common.specs_from_args(args.package_names)
        if (plan.is_root_prefix(prefix) and
            common.names_in_specs(common.root_no_rm, specs)):
            sys.exit('Error: cannot remove %s from root environment' %
                     ', '.join(common.root_no_rm))
        actions = plan.remove_actions(prefix, specs)

    if plan.nothing_to_do(actions):
        if args.all:
            rm_rf(prefix)
            return
        sys.exit('Error: no packages found to remove from '
                 'environment: %s' % prefix)

    print()
    print("Package plan for package removal in environment %s:" % prefix)
    plan.display_actions(actions)

    if not pscheck.main(args):
        common.confirm_yn(args)

    plan.execute_actions(actions, index, verbose=not args.quiet)

    if args.all:
        rm_rf(prefix)
Esempio n. 10
0
def execute(args, parser):
    import sys
    from os.path import exists

    import conda.plan as plan
    from conda.api import get_index


    if len(args.package_specs) == 0 and not args.file:
        sys.exit('Error: too few arguments, must supply command line '
                 'package specs or --file')

    common.ensure_name_or_prefix(args, 'create')
    prefix = common.get_prefix(args)

    if exists(prefix):
        if args.prefix:
            raise RuntimeError("'%s' already exists, must supply new "
                               "directory for -p/--prefix" % prefix)
        else:
            raise RuntimeError("'%s' already exists, must supply new "
                               "directory for -n/--name" % prefix)

    if args.file:
        specs = common.specs_from_file(args.file)
    else:
        specs = common.specs_from_args(args.package_specs)

    common.check_specs(prefix, specs)

    channel_urls = args.channel or ()

    common.ensure_override_channels_requires_channel(args)
    index = get_index(channel_urls=channel_urls, prepend=not args.override_channels)
    actions = plan.install_actions(prefix, index, specs)

    if plan.nothing_to_do(actions):
        print('No matching packages could be found, nothing to do')
        return

    print()
    print("Package plan for creating environment at %s:" % prefix)
    plan.display_actions(actions, index)

    common.confirm_yn(args)
    plan.execute_actions(actions, index, verbose=not args.quiet)

    if sys.platform != 'win32':
        activate_name = prefix
        if args.name:
            activate_name = args.name
        print("#")
        print("# To activate this environment, use:")
        print("# $ source activate %s" % activate_name)
        print("#")
        print("# To deactivate this environment, use:")
        print("# $ source deactivate")
        print("#")
Esempio n. 11
0
def execute(args, parser):
    import sys

    import conda.plan as plan
    from conda.api import get_index
    from conda.cli import pscheck
    from conda.install import rm_rf, linked

    if not (args.all or args.package_names):
        sys.exit('Error: no package names supplied,\n'
                 '       try "conda remove -h" for more details')

    prefix = common.get_prefix(args)
    common.check_write('remove', prefix)

    index = None
    if args.features:
        common.ensure_override_channels_requires_channel(args)
        channel_urls = args.channel or ()
        index = get_index(channel_urls=channel_urls,
                          prepend=not args.override_channels)
        features = set(args.package_names)
        actions = plan.remove_features_actions(prefix, index, features)

    elif args.all:
        if plan.is_root_prefix(prefix):
            sys.exit('Error: cannot remove root environment,\n'
                     '       add -n NAME or -p PREFIX option')

        actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))}

    else:
        specs = common.specs_from_args(args.package_names)
        if (plan.is_root_prefix(prefix)
                and common.names_in_specs(common.root_no_rm, specs)):
            sys.exit('Error: cannot remove %s from root environment' %
                     ', '.join(common.root_no_rm))
        actions = plan.remove_actions(prefix, specs)

    if plan.nothing_to_do(actions):
        if args.all:
            rm_rf(prefix)
            return
        sys.exit('Error: no packages found to remove from '
                 'environment: %s' % prefix)

    print()
    print("Package plan for package removal in environment %s:" % prefix)
    plan.display_actions(actions)

    if not pscheck.main(args):
        common.confirm_yn(args)

    plan.execute_actions(actions, index, verbose=not args.quiet)

    if args.all:
        rm_rf(prefix)
Esempio n. 12
0
def execute(args, parser):
    import conda.plan as plan
    from conda.api import get_index
    from conda.cli import pscheck

    prefix = common.get_prefix(args)

    # handle explict installs of conda packages
    if args.packages and all(s.endswith('.tar.bz2') for s in args.packages):
        from conda.misc import install_local_packages
        install_local_packages(prefix, args.packages, verbose=not args.quiet)
        return
    if any(s.endswith('.tar.bz2') for s in args.packages):
        raise RuntimeError("cannot mix specifications with conda package "
                           "filenames")

    if args.force:
        args.no_deps = True

    if args.file:
        specs = common.specs_from_file(args.file)
    else:
        specs = common.specs_from_args(args.packages)

    common.check_specs(prefix, specs)

    spec_names = set(s.split()[0] for s in specs)
    if args.no_deps:
        only_names = spec_names
    else:
        only_names = None

    common.ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()
    index = get_index(channel_urls=channel_urls, prepend=not
        args.override_channels)

    actions = plan.install_actions(prefix, index, specs,
                                   force=args.force, only_names=only_names)

    if plan.nothing_to_do(actions):
        from conda.cli.main_list import list_packages

        regex = '^(%s)$' %  '|'.join(spec_names)
        print('# All requested packages already installed.')
        list_packages(prefix, regex)
        return

    print()
    print("Package plan for installation in environment %s:" % prefix)
    plan.display_actions(actions, index)

    if not pscheck.main(args):
        common.confirm_yn(args)

    plan.execute_actions(actions, index, verbose=not args.quiet)
Esempio n. 13
0
def check_install(packages, platform=None, channel_urls=(), prepend=True, minimal_hint=False):
    try:
        prefix = tempfile.mkdtemp("conda")
        specs = common.specs_from_args(packages)
        index = get_index(channel_urls=channel_urls, prepend=prepend, platform=platform, prefix=prefix)
        actions = plan.install_actions(prefix, index, specs, pinned=False, minimal_hint=minimal_hint)
        plan.display_actions(actions, index)
        return actions
    finally:
        ci.rm_rf(prefix)
Esempio n. 14
0
def check_install(packages, platform=None, channel_urls=(), prepend=True, minimal_hint=False):
    try:
        prefix = tempfile.mkdtemp('conda')
        specs = common.specs_from_args(packages)
        index = get_index(channel_urls=channel_urls, prepend=prepend,
                          platform=platform)
        actions = plan.install_actions(prefix, index, specs, pinned=False, minimal_hint=minimal_hint)
        plan.display_actions(actions, index)
        return actions
    finally:
        ci.rm_rf(prefix)
Esempio n. 15
0
def execute(args, parser):
    import conda.config as config
    import conda.plan as plan
    from conda.api import get_index
    from conda.misc import touch_nonadmin

    common.ensure_name_or_prefix(args, 'create')
    prefix = common.get_prefix(args, search=False)
    check_prefix(prefix)
    config.set_pkgs_dirs(prefix)

    if args.clone:
        if args.package_specs:
            sys.exit('Error: did not expect any arguments for --clone')
        clone(args.clone, prefix)
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
        return

    if not args.no_default_packages:
        args.package_specs.extend(config.create_default_packages)

    if len(args.package_specs) == 0 and not args.file:
        sys.exit('Error: too few arguments, must supply command line '
                 'package specs or --file')

    if args.file:
        specs = common.specs_from_url(args.file)
    else:
        specs = common.specs_from_args(args.package_specs)

    common.check_specs(prefix, specs)

    channel_urls = args.channel or ()

    common.ensure_override_channels_requires_channel(args)
    index = get_index(channel_urls=channel_urls,
                      prepend=not args.override_channels)
    actions = plan.install_actions(prefix, index, specs)

    if plan.nothing_to_do(actions):
        print('No matching packages could be found, nothing to do')
        return

    print()
    print("Package plan for creating environment at %s:" % prefix)
    plan.display_actions(actions, index)

    common.confirm_yn(args)
    plan.execute_actions(actions, index, verbose=not args.quiet)
    touch_nonadmin(prefix)
    print_activate(args.name if args.name else prefix)
Esempio n. 16
0
def create_env(pref, specs):
    if not isdir(bldpkgs_dir):
        os.makedirs(bldpkgs_dir)
    update_index(bldpkgs_dir)
    fetch_index.cache = {}
    index = get_index([url_path(config.croot)])

    actions = plan.install_actions(pref, index, specs)
    plan.display_actions(actions, index)
    plan.execute_actions(actions, index, verbose=True)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(pref):
        os.makedirs(pref)
Esempio n. 17
0
def execute(args, parser):
    import sys

    import conda.install as ci
    import conda.config as config
    import conda.plan as plan
    from conda.api import get_index

    from conda.cli import pscheck


    if len(args.pkg_names) == 0:
        sys.exit("""Error: no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update anaconda
""")

    prefix = common.get_prefix(args)
    config.set_pkgs_dirs(prefix)
    linked = set(ci.name_dist(d) for d in ci.linked(prefix))
    for name in args.pkg_names:
        common.arg2spec(name)
        if '=' in name:
            sys.exit("Invalid package name: '%s'" % (name))
        if name not in linked:
            sys.exit("Error: package '%s' is not installed in %s" %
                     (name, prefix))

    common.ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()
    index = get_index(channel_urls=channel_urls,
                      prepend=not args.override_channels)
    actions = plan.install_actions(prefix, index, args.pkg_names)

    if plan.nothing_to_do(actions):
        from conda.cli.main_list import list_packages

        regex = '^(%s)$' %  '|'.join(args.pkg_names)
        print('# All packages already at latest version, nothing to do.')
        list_packages(prefix, regex)
        return

    print("Updating conda environment at %s" % prefix)
    plan.display_actions(actions, index)
    common.check_write('update', prefix)

    if not pscheck.main(args):
        common.confirm_yn(args)

    plan.execute_actions(actions, index, verbose=not args.quiet)
Esempio n. 18
0
def create_env(prefix, specs, clear_cache=True, debug=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if not debug:
        # This squelches a ton of conda output that is not hugely relevant
        logging.getLogger("conda.install").setLevel(logging.ERROR)
        logging.getLogger("fetch").setLevel(logging.WARN)
        logging.getLogger("print").setLevel(logging.WARN)
        logging.getLogger("progress").setLevel(logging.WARN)
        logging.getLogger("dotupdate").setLevel(logging.WARN)
        logging.getLogger("stdoutlog").setLevel(logging.WARN)
        logging.getLogger("requests.packages.urllib3.connectionpool").setLevel(
            logging.WARN)

    specs = list(specs)
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    for d in config.bldpkgs_dirs:
        if not isdir(d):
            os.makedirs(d)
        update_index(d)
    if specs:  # Don't waste time if there is nothing to do

        # FIXME: stupid hack to put test prefix on PATH so that runtime libs can be found
        old_path = os.environ['PATH']
        os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix,
                                              True)['PATH']

        index = get_build_index(clear_cache=True)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=debug)

        os.environ['PATH'] = old_path

    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
    if on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Esempio n. 19
0
def create_env(pref, specs):
    if not isdir(bldpkgs_dir):
        os.makedirs(bldpkgs_dir)
    update_index(bldpkgs_dir)
    # remove the cache such that a refetch is made,
    # this is necessary because we add the local build repo URL
    fetch_index.cache = {}
    index = get_index([url_path(config.croot)])

    actions = plan.install_actions(pref, index, specs)
    plan.display_actions(actions, index)
    plan.execute_actions(actions, index, verbose=True)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(pref):
        os.makedirs(pref)
Esempio n. 20
0
def check_install(packages, platform=None, channel_urls=(), prepend=True,
                  minimal_hint=False):
    try:
        prefix = tempfile.mkdtemp('conda')
        specs = common.specs_from_args(packages)
        index = get_index(channel_urls=channel_urls, prepend=prepend,
                          platform=platform, prefix=prefix)
        linked = ci.linked(prefix)
        plan.add_defaults_to_specs(Resolve(index), linked, specs)
        actions = plan.install_actions(prefix, index, specs, pinned=False,
                                       minimal_hint=minimal_hint)
        plan.display_actions(actions, index)
        return actions
    finally:
        ci.rm_rf(prefix)
Esempio n. 21
0
def create_env(prefix, specs, clear_cache=True, debug=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if not debug:
        # This squelches a ton of conda output that is not hugely relevant
        logging.getLogger("conda.install").setLevel(logging.ERROR)
        logging.getLogger("fetch").setLevel(logging.WARN)
        logging.getLogger("print").setLevel(logging.WARN)
        logging.getLogger("progress").setLevel(logging.WARN)
        logging.getLogger("dotupdate").setLevel(logging.WARN)
        logging.getLogger("stdoutlog").setLevel(logging.WARN)
        logging.getLogger("requests.packages.urllib3.connectionpool").setLevel(logging.WARN)

    specs = list(specs)
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    for d in config.bldpkgs_dirs:
        if not isdir(d):
            os.makedirs(d)
        update_index(d)
    if specs:  # Don't waste time if there is nothing to do

        # FIXME: stupid hack to put test prefix on PATH so that runtime libs can be found
        old_path = os.environ['PATH']
        os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix, True)['PATH']

        index = get_build_index(clear_cache=True)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=debug)

        os.environ['PATH'] = old_path

    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
    if on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Esempio n. 22
0
def create_env(prefix, specs, clear_cache=True, verbose=True, channel_urls=(), override_channels=False):
    """
    Create a conda envrionment for the given prefix and specs.
    """
    if not isdir(config.bldpkgs_dir):
        os.makedirs(config.bldpkgs_dir)
    update_index(config.bldpkgs_dir)
    if specs:  # Don't waste time if there is nothing to do
        index = get_build_index(clear_cache=True, channel_urls=channel_urls, override_channels=override_channels)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=verbose)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
Esempio n. 23
0
def clone_analysispackage(path,
                          prefix=None,
                          analysispackage_name=None,
                          data_path=None):
    """
    Clone the analysispackage (located at `path`) by creating a new environment at
    `prefix` (unless prefix is None or the prefix directory already exists)
    """
    try:
        t = tarfile.open(path, 'r:*')
        meta = json.load(t.extractfile('info/index.json'))
    except tarfile.ReadError:
        raise RuntimeError('bad tar archive: %s' % path)
    except KeyError:
        raise RuntimeError("no archive 'info/index.json' in: %s" % (path))
    if prefix and isdir(prefix):
        print("erasing old environment at %s" % prefix)
        shutil.rmtree(prefix)
    if prefix and not isdir(prefix):
        for m in t.getmembers():
            if m.path.startswith((BDP, 'info/')):
                continue
            t.extract(m, path=prefix)
        dists = discard_conda('-'.join(s.split())
                              for s in meta.get('depends', []))
        actions = plan.ensure_linked_actions(dists, prefix)
        index = get_index()
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=False)
    if not data_path:
        analysispackage_dir = abspath(
            expanduser(
                '~/analysispackages/%s' %
                (analysispackage_name or meta.get('analysispackage_name'))))
    else:
        analysispackage_dir = data_path
    for m in t.getmembers():
        if m.path.startswith(BDP):
            targetpath = join(analysispackage_dir, m.path[len(BDP):])
            t._extract_member(m, targetpath)
    with open(join(data_path, ".metadata.json"), "w+") as f:
        json.dump({'env': prefix}, f)
    t.close()
Esempio n. 24
0
def create_env(pref, specs, pypi=False):
    if not isdir(bldpkgs_dir):
        os.makedirs(bldpkgs_dir)
    update_index(bldpkgs_dir)
    # remove the cache such that a refetch is made,
    # this is necessary because we add the local build repo URL
    fetch_index.cache = {}
    index = get_index([url_path(config.croot)])

    cc.pkgs_dirs = cc.pkgs_dirs[:1]

    if pypi:
        from conda.from_pypi import install_from_pypi
        specs = install_from_pypi(pref, index, specs)

    actions = plan.install_actions(pref, index, specs)
    plan.display_actions(actions, index)
    plan.execute_actions(actions, index, verbose=True)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(pref):
        os.makedirs(pref)
def clone_analysispackage(path, prefix=None, analysispackage_name=None, data_path=None):
    """
    Clone the analysispackage (located at `path`) by creating a new environment at
    `prefix` (unless prefix is None or the prefix directory already exists)
    """
    try:
        t = tarfile.open(path, 'r:*')
        meta = json.load(t.extractfile('info/index.json'))
    except tarfile.ReadError:
        raise RuntimeError('bad tar archive: %s' % path)
    except KeyError:
        raise RuntimeError("no archive 'info/index.json' in: %s" % (path))
    if prefix and isdir(prefix):
        print ("erasing old environment at %s" % prefix)
        shutil.rmtree(prefix)
    if prefix and not isdir(prefix):
        for m in t.getmembers():
            if m.path.startswith((BDP, 'info/')):
                continue
            t.extract(m, path=prefix)
        dists = discard_conda('-'.join(s.split())
                              for s in meta.get('depends', []))
        actions = plan.ensure_linked_actions(dists, prefix)
        index = get_index()
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=False)
    if not data_path:
        analysispackage_dir = abspath(expanduser('~/analysispackages/%s' %
                                        (analysispackage_name or meta.get('analysispackage_name'))))
    else:
        analysispackage_dir = data_path
    for m in t.getmembers():
        if m.path.startswith(BDP):
            targetpath = join(analysispackage_dir, m.path[len(BDP):])
            t._extract_member(m, targetpath)
    with open(join(data_path, ".metadata.json"), "w+") as f:
        json.dump({'env' : prefix}, f)
    t.close()
Esempio n. 26
0
def test_display_actions_no_index():
    # Test removing a package that is not in the index. This issue
    # should only come up for removing.
    actions = defaultdict(list, {"UNLINK": ["notinstalled-1.0-py33_0"]})

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be REMOVED:

    notinstalled: 1.0-py33_0 <unknown>

"""
    )

    actions = defaultdict(list, {"LINK": ["numpy-1.7.1-py33_0"], "UNLINK": ["numpy-2.0.0-py33_1"]})

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be DOWNGRADED:

    numpy: 2.0.0-py33_1 <unknown> --> 1.7.1-py33_0 <unknown>

"""
    )

    # tk-8.5.13-1 is not in the index. Test that it guesses the build number
    # correctly.
    actions = defaultdict(list, {"LINK": ["tk-8.5.13-0"], "UNLINK": ["tk-8.5.13-1"]})

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be DOWNGRADED:

    tk: 8.5.13-1 <unknown> --> 8.5.13-0 <unknown>

"""
    )
Esempio n. 27
0
def test_display_actions_no_index():
    # Test removing a package that is not in the index. This issue
    # should only come up for removing.
    actions = defaultdict(list, {'UNLINK': ['notinstalled-1.0-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be REMOVED:

    notinstalled: 1.0-py33_0 <unknown>

"""

    actions = defaultdict(list, {
        "LINK": ['numpy-1.7.1-py33_0'],
        "UNLINK": ['numpy-2.0.0-py33_1']
    })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED:

    numpy: 2.0.0-py33_1 <unknown> --> 1.7.1-py33_0 <unknown>

"""

    # tk-8.5.13-1 is not in the index. Test that it guesses the build number
    # correctly.
    actions = defaultdict(list, {
        "LINK": ['tk-8.5.13-0'],
        "UNLINK": ['tk-8.5.13-1']
    })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
Esempio n. 28
0
def test_display_actions_no_index():
    # Test removing a package that is not in the index. This issue
    # should only come up for removing.
    actions = defaultdict(list, {'UNLINK': ['notinstalled-1.0-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be REMOVED:

    notinstalled: 1.0-py33_0 <unknown>

"""

    actions = defaultdict(list, {"LINK": ['numpy-1.7.1-py33_0'], "UNLINK":
        ['numpy-2.0.0-py33_1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED due to dependency conflicts:

    numpy: 2.0.0-py33_1 <unknown> --> 1.7.1-py33_0 <unknown>

"""

    # tk-8.5.13-1 is not in the index. Test that it guesses the build number
    # correctly.
    actions = defaultdict(list, {"LINK": ['tk-8.5.13-0'], "UNLINK":
        ['tk-8.5.13-1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
Esempio n. 29
0
def test_display_actions_show_channel_urls():
    os.environ['CONDA_SHOW_CHANNEL_URLS'] = 'True'
    reset_context(())
    actions = defaultdict(
        list, {"FETCH": ['sympy-0.7.2-py27_0', "numpy-1.7.1-py27_0"]})
    # The older test index doesn't have the size metadata
    d = Dist('sympy-0.7.2-py27_0.tar.bz2')
    index[d] = IndexRecord.from_objects(d, size=4374752)
    d = Dist('numpy-1.7.1-py27_0.tar.bz2')
    index[d] = IndexRecord.from_objects(d, size=5994338)

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be downloaded:

    package                    |            build
    ---------------------------|-----------------
    sympy-0.7.2                |           py27_0         4.2 MB  <unknown>
    numpy-1.7.1                |           py27_0         5.7 MB  <unknown>
    ------------------------------------------------------------
                                           Total:         9.9 MB

"""

    actions = defaultdict(
        list, {
            'PREFIX':
            '/Users/aaronmeurer/anaconda/envs/test',
            'SYMLINK_CONDA': ['/Users/aaronmeurer/anaconda'],
            'LINK': [
                'python-3.3.2-0', 'readline-6.2-0', 'sqlite-3.7.13-0',
                'tk-8.5.13-0', 'zlib-1.2.7-0'
            ]
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """Package plan for environment '/Users/aaronmeurer/anaconda/envs/test':

The following NEW packages will be INSTALLED:

    python:   3.3.2-0  <unknown>
    readline: 6.2-0    <unknown>
    sqlite:   3.7.13-0 <unknown>
    tk:       8.5.13-0 <unknown>
    zlib:     1.2.7-0  <unknown>

"""

    actions['UNLINK'] = actions['LINK']
    actions['LINK'] = []

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """Package plan for environment '/Users/aaronmeurer/anaconda/envs/test':

The following packages will be REMOVED:

    python:   3.3.2-0  <unknown>
    readline: 6.2-0    <unknown>
    sqlite:   3.7.13-0 <unknown>
    tk:       8.5.13-0 <unknown>
    zlib:     1.2.7-0  <unknown>

"""

    actions = defaultdict(list, {
        'LINK': ['cython-0.19.1-py33_0'],
        'UNLINK': ['cython-0.19-py33_0']
    })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython: 0.19-py33_0 <unknown> --> 0.19.1-py33_0 <unknown>

"""

    actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK']

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED:

    cython: 0.19.1-py33_0 <unknown> --> 0.19-py33_0 <unknown>

"""

    actions = defaultdict(
        list, {
            'LINK': [
                'cython-0.19.1-py33_0', 'dateutil-1.5-py33_0',
                'numpy-1.7.1-py33_0'
            ],
            'UNLINK':
            ['cython-0.19-py33_0', 'dateutil-2.1-py33_1', 'pip-1.3.1-py33_1']
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    numpy:    1.7.1-py33_0 <unknown>

The following packages will be REMOVED:

    pip:      1.3.1-py33_1 <unknown>

The following packages will be UPDATED:

    cython:   0.19-py33_0  <unknown> --> 0.19.1-py33_0 <unknown>

The following packages will be DOWNGRADED:

    dateutil: 2.1-py33_1   <unknown> --> 1.5-py33_0    <unknown>

"""

    actions = defaultdict(
        list, {
            'LINK': ['cython-0.19.1-py33_0', 'dateutil-2.1-py33_1'],
            'UNLINK': ['cython-0.19-py33_0', 'dateutil-1.5-py33_0']
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 <unknown> --> 0.19.1-py33_0 <unknown>
    dateutil: 1.5-py33_0  <unknown> --> 2.1-py33_1    <unknown>

"""

    actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK']

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED:

    cython:   0.19.1-py33_0 <unknown> --> 0.19-py33_0 <unknown>
    dateutil: 2.1-py33_1    <unknown> --> 1.5-py33_0  <unknown>

"""

    actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK']

    d = Dist('cython-0.19.1-py33_0.tar.bz2')
    index[d] = IndexRecord.from_objects(d, channel='my_channel')
    d = Dist('dateutil-1.5-py33_0.tar.bz2')
    index[d] = IndexRecord.from_objects(d, channel='my_channel')

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 <unknown>  --> 0.19.1-py33_0 my_channel
    dateutil: 1.5-py33_0  my_channel --> 2.1-py33_1    <unknown> \n\

"""

    actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK']

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
Esempio n. 30
0
def build(m,
          get_src=True,
          verbose=True,
          post=None,
          channel_urls=(),
          override_channels=False,
          include_recipe=True):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    '''

    if (m.get_value('build/detect_binary_files_with_prefix')
            or m.binary_has_prefix_files()):
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if post in [False, None]:
        print("Removing old build environment")
        if on_win:
            if isdir(config.short_build_prefix):
                move_to_trash(config.short_build_prefix, '')
            if isdir(config.long_build_prefix):
                move_to_trash(config.long_build_prefix, '')
        else:
            rm_rf(config.short_build_prefix)
            rm_rf(config.long_build_prefix)
        print("Removing old work directory")
        if on_win:
            if isdir(source.WORK_DIR):
                move_to_trash(source.WORK_DIR, '')
        else:
            rm_rf(source.WORK_DIR)

        # Display the name only
        # Version number could be missing due to dependency on source info.
        print("BUILD START:", m.dist())
        create_env(config.build_prefix,
                   [ms.spec for ms in m.ms_depends('build')],
                   verbose=verbose,
                   channel_urls=channel_urls,
                   override_channels=override_channels)

        if m.name() in [
                i.rsplit('-', 2)[0] for i in linked(config.build_prefix)
        ]:
            print("%s is installed as a build dependency. Removing." %
                  m.name())
            index = get_build_index(clear_cache=False,
                                    channel_urls=channel_urls,
                                    override_channels=override_channels)
            actions = plan.remove_actions(config.build_prefix, [m.name()],
                                          index=index)
            assert not plan.nothing_to_do(actions), actions
            plan.display_actions(actions, index)
            plan.execute_actions(actions, index)

        if get_src:
            source.provide(m.path, m.get_section('source'))
            # Parse our metadata again because we did not initialize the source
            # information before.
            m.parse_again()

        print("Package:", m.dist())

        assert isdir(source.WORK_DIR)
        src_dir = source.get_dir()
        contents = os.listdir(src_dir)
        if contents:
            print("source tree in:", src_dir)
        else:
            print("no source")

        rm_rf(config.info_dir)
        files1 = prefix_files()
        for pat in m.always_include_files():
            has_matches = False
            for f in set(files1):
                if fnmatch.fnmatch(f, pat):
                    print("Including in package existing file", f)
                    files1.discard(f)
                    has_matches = True
            if not has_matches:
                sys.exit(
                    "Error: Glob %s from always_include_files does not match any files"
                    % pat)
        # Save this for later
        with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
            f.write(u'\n'.join(sorted(list(files1))))
            f.write(u'\n')

        if sys.platform == 'win32':
            import conda_build.windows as windows
            windows.build(m)
        else:
            env = environ.get_dict(m)
            build_file = join(m.path, 'build.sh')

            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)
                build_file = join(source.get_dir(), 'conda_build.sh')
                with open(build_file, 'w') as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)

            if isfile(build_file):
                cmd = ['/bin/bash', '-x', '-e', build_file]

                _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post == True:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m)
        create_post_scripts(m)
        create_entry_points(m.get_value('build/entry_points'))
        assert not exists(config.info_dir)
        files2 = prefix_files()

        post_process(sorted(files2 - files1),
                     preserve_egg_dir=bool(
                         m.get_value('build/preserve_egg_dir')))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files()
        if any(config.meta_dir in join(config.build_prefix, f)
               for f in files2 - files1):
            sys.exit(
                indent(
                    """Error: Untracked file(s) %s found in conda-meta directory.  This error
usually comes from using conda in the build script.  Avoid doing this, as it
can lead to packages that include their dependencies.""" %
                    (tuple(f
                           for f in files2 - files1 if config.meta_dir in join(
                               config.build_prefix, f)), )))
        post_build(m, sorted(files2 - files1))
        create_info_files(m,
                          sorted(files2 - files1),
                          include_recipe=bool(m.path) and include_recipe)
        if m.get_value('build/noarch_python'):
            import conda_build.noarch_python as noarch_python
            noarch_python.transform(m, sorted(files2 - files1))

        files3 = prefix_files()
        fix_permissions(files3 - files1)

        path = bldpkg_path(m)
        t = tarfile.open(path, 'w:bz2')
        for f in sorted(files3 - files1):
            t.add(join(config.build_prefix, f), f)
        t.close()

        print("BUILD END:", m.dist())

        # we're done building, perform some checks
        tarcheck.check_all(path)
        update_index(config.bldpkgs_dir)
    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())
Esempio n. 31
0
def create_env(prefix, specs, clear_cache=True, debug=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if debug:
        logging.getLogger("conda").setLevel(logging.DEBUG)
        logging.getLogger("binstar").setLevel(logging.DEBUG)
        logging.getLogger("install").setLevel(logging.DEBUG)
        logging.getLogger("conda.install").setLevel(logging.DEBUG)
        logging.getLogger("fetch").setLevel(logging.DEBUG)
        logging.getLogger("print").setLevel(logging.DEBUG)
        logging.getLogger("progress").setLevel(logging.DEBUG)
        logging.getLogger("dotupdate").setLevel(logging.DEBUG)
        logging.getLogger("stdoutlog").setLevel(logging.DEBUG)
        logging.getLogger("requests").setLevel(logging.DEBUG)
    else:
        # This squelches a ton of conda output that is not hugely relevant
        logging.getLogger("conda").setLevel(logging.WARN)
        logging.getLogger("binstar").setLevel(logging.WARN)
        logging.getLogger("install").setLevel(logging.ERROR)
        logging.getLogger("conda.install").setLevel(logging.ERROR)
        logging.getLogger("fetch").setLevel(logging.WARN)
        logging.getLogger("print").setLevel(logging.WARN)
        logging.getLogger("progress").setLevel(logging.WARN)
        logging.getLogger("dotupdate").setLevel(logging.WARN)
        logging.getLogger("stdoutlog").setLevel(logging.WARN)
        logging.getLogger("requests").setLevel(logging.WARN)

    specs = list(specs)
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    for d in config.bldpkgs_dirs:
        if not isdir(d):
            os.makedirs(d)
        update_index(d)
    if specs:  # Don't waste time if there is nothing to do

        # FIXME: stupid hack to put test prefix on PATH so that runtime libs can be found
        old_path = os.environ['PATH']
        os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix,
                                              True)['PATH']

        index = get_build_index(clear_cache=True)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)

        try:
            plan.execute_actions(actions, index, verbose=debug)
        except SystemExit as exc:
            if "too short in" in exc.message and config.prefix_length > 80:
                log.warn("Build prefix failed with prefix length {0}.".format(
                    config.prefix_length))
                log.warn("Error was: ")
                log.warn(exc.message)
                log.warn(
                    "One or more of your package dependencies needs to be rebuilt with a "
                    "longer prefix length.")
                log.warn(
                    "Falling back to legacy prefix length of 80 characters.")
                log.warn(
                    "Your package will not install into prefixes longer than 80 characters."
                )
                config.prefix_length = 80
                create_env(prefix, specs, clear_cache=clear_cache, debug=debug)

        os.environ['PATH'] = old_path

    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
    if on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Esempio n. 32
0
def execute(args, parser):
    from conda import config, plan
    from conda.install import linked, rm_rf

    prefix = common.get_prefix(args)
    if plan.is_root_prefix(prefix):
        common.error_and_exit(
            'cannot remove root environment,\n'
            '       add -n NAME or -p PREFIX option',
            json=args.json,
            error_type="CantRemoveRoot")

    if prefix == config.default_prefix:
        # FIXME The way the "name" is determined now is handled by
        #       looking at the basename of the prefix.  This is brittle
        #       and underlines a use-case for an Environment object that
        #       is capable of providing a name attribute.
        common.error_and_exit(
            textwrap.dedent("""
            Conda cannot remove the current environment.

            Please deactivate and run conda env remove again with the name
            specified.

                conda env remove --name %s
            """ % basename(prefix)).lstrip())

    # TODO Why do we need an index for removing packages?
    index = common.get_index_trap(json=args.json)

    actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))}

    if plan.nothing_to_do(actions):
        # TODO Should this automatically remove even *before* confirmation?
        # TODO Should this display an error when removing something that
        #      doesn't exist?
        rm_rf(prefix)

        if args.json:
            common.stdout_json({'success': True, 'actions': actions})
        return

    if args.json and args.dry_run:
        common.stdout_json({
            'success': True,
            'dry_run': True,
            'actions': actions
        })
        return

    if not args.json:
        print()
        print("Remove the following packages in environment %s:" % prefix)
        plan.display_actions(actions, index)

    common.confirm_yn(args)
    plan.execute_actions(actions, index, verbose=not args.quiet)
    rm_rf(prefix)

    if args.json:
        common.stdout_json({'success': True, 'actions': actions})
Esempio n. 33
0
def install(args, parser, command='install'):
    """
    conda install, conda update, and conda create
    """
    newenv = bool(command == 'create')
    if newenv:
        common.ensure_name_or_prefix(args, command)
    prefix = common.get_prefix(args, search=not newenv)
    if newenv:
        check_prefix(prefix, json=args.json)
    if config.force_32bit and plan.is_root_prefix(prefix):
        common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env")

    if command == 'update':
        if args.all:
            if args.packages:
                common.error_and_exit("""--all cannot be used with packages""",
                                      json=args.json,
                                      error_type="ValueError")
        elif not args.file:
            if len(args.packages) == 0:
                common.error_and_exit("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix,
                                      json=args.json,
                                      error_type="ValueError")

    if command == 'update':
        linked = ci.linked(prefix)
        for name in args.packages:
            common.arg2spec(name, json=args.json)
            if '=' in name:
                common.error_and_exit("Invalid package name: '%s'" % (name),
                                      json=args.json,
                                      error_type="ValueError")
            if name not in set(ci.name_dist(d) for d in linked):
                common.error_and_exit("package '%s' is not installed in %s" %
                                      (name, prefix),
                                      json=args.json,
                                      error_type="ValueError")

    if newenv and not args.no_default_packages:
        default_packages = config.create_default_packages[:]
        # Override defaults if they are specified at the command line
        for default_pkg in config.create_default_packages:
            if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
                default_packages.remove(default_pkg)
        args.packages.extend(default_packages)
    else:
        default_packages = []

    common.ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()

    specs = []
    if args.file:
        for fpath in args.file:
            specs.extend(common.specs_from_url(fpath, json=args.json))
    elif getattr(args, 'all', False):
        linked = ci.linked(prefix)
        if not linked:
            common.error_and_exit("There are no packages installed in the "
                "prefix %s" % prefix)
        for pkg in linked:
            name, ver, build = pkg.rsplit('-', 2)
            if name in getattr(args, '_skip', ['anaconda']):
                continue
            if name == 'python' and ver.startswith('2'):
                # Oh Python 2...
                specs.append('%s >=%s,<3' % (name, ver))
            else:
                specs.append('%s' % name)
    specs.extend(common.specs_from_args(args.packages, json=args.json))

    if command == 'install' and args.revision:
        get_revision(args.revision, json=args.json)
    elif not (newenv and args.clone):
        common.check_specs(prefix, specs, json=args.json,
                           create=(command == 'create'))


    num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
    if num_cp:
        if num_cp == len(args.packages):
            depends = misc.install_local_packages(prefix, args.packages,
                                                  verbose=not args.quiet)
            if args.no_deps:
                depends = []
            specs = list(set(depends))
            args.unknown = True
        else:
            common.error_and_exit(
                "cannot mix specifications with conda package filenames",
                json=args.json,
                error_type="ValueError")

    # handle tar file containing conda packages
    if len(args.packages) == 1:
        tar_path = args.packages[0]
        if tar_path.endswith('.tar'):
            depends = install_tar(prefix, tar_path, verbose=not args.quiet)
            if args.no_deps:
                depends = []
            specs = list(set(depends))
            args.unknown = True

    if args.use_local:
        from conda.fetch import fetch_index
        from conda.utils import url_path
        try:
            from conda_build.config import croot
        except ImportError:
            common.error_and_exit(
                    "you need to have 'conda-build >= 1.7.1' installed"
                    " to use the --use-local option",
                    json=args.json,
                    error_type="RuntimeError")
        # remove the cache such that a refetch is made,
        # this is necessary because we add the local build repo URL
        fetch_index.cache = {}
        if exists(croot):
            channel_urls = [url_path(croot)] + list(channel_urls)

    index = common.get_index_trap(channel_urls=channel_urls,
                                  prepend=not args.override_channels,
                                  use_cache=args.use_index_cache,
                                  unknown=args.unknown,
                                  json=args.json,
                                  offline=args.offline)

    if newenv and args.clone:
        if set(args.packages) - set(default_packages):
            common.error_and_exit('did not expect any arguments for --clone',
                                  json=args.json,
                                  error_type="ValueError")
        clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index)
        misc.append_env(prefix)
        misc.touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)
        return

    # Don't update packages that are already up-to-date
    if command == 'update' and not (args.all or args.force):
        r = Resolve(index)
        orig_packages = args.packages[:]
        for name in orig_packages:
            installed_metadata = [ci.is_linked(prefix, dist)
                                  for dist in linked]
            vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
                         if dist.rsplit('-', 2)[0] == name]
            build_inst = [m['build_number'] for m in installed_metadata if
                          m['name'] == name]

            try:
                assert len(vers_inst) == 1, name
                assert len(build_inst) == 1, name
            except AssertionError as e:
                if args.json:
                    common.exception_and_exit(e, json=True)
                else:
                    raise

            pkgs = sorted(r.get_pkgs(MatchSpec(name)))
            if not pkgs:
                # Shouldn't happen?
                continue
            latest = pkgs[-1]

            if (latest.version == vers_inst[0] and
                       latest.build_number == build_inst[0]):
                args.packages.remove(name)
        if not args.packages:
            from conda.cli.main_list import print_packages

            if not args.json:
                regex = '^(%s)$' % '|'.join(orig_packages)
                print('# All requested packages already installed.')
                print_packages(prefix, regex)
            else:
                common.stdout_json_success(
                    message='All requested packages already installed.')
            return

    if args.force:
        args.no_deps = True

    spec_names = set(s.split()[0] for s in specs)
    if args.no_deps:
        only_names = spec_names
    else:
        only_names = None

    if not isdir(prefix) and not newenv:
        if args.mkdir:
            try:
                os.makedirs(prefix)
            except OSError:
                common.error_and_exit("Error: could not create directory: %s" % prefix,
                                      json=args.json,
                                      error_type="OSError")
        else:
            common.error_and_exit("""\
environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix,
                                  json=args.json,
                                  error_type="NoEnvironmentFound")

    try:
        if command == 'install' and args.revision:
            actions = plan.revert_actions(prefix, get_revision(args.revision))
        else:
            with common.json_progress_bars(json=args.json and not args.quiet):

                actions = plan.install_actions(prefix, index, specs,
                                               force=args.force,
                                               only_names=only_names,
                                               pinned=args.pinned,
                                               minimal_hint=args.alt_hint)
            if args.copy:
                new_link = []
                for pkg in actions["LINK"]:
                    dist, pkgs_dir, lt = inst.split_linkarg(pkg)
                    lt = ci.LINK_COPY
                    new_link.append("%s %s %d" % (dist, pkgs_dir, lt))
                actions["LINK"] = new_link
    except NoPackagesFound as e:
        error_message = e.args[0]

        if command == 'update' and args.all:
            # Packages not found here just means they were installed but
            # cannot be found any more. Just skip them.
            if not args.json:
                print("Warning: %s, skipping" % error_message)
            else:
                # Not sure what to do here
                pass
            args._skip = getattr(args, '_skip', ['anaconda'])
            for pkg in e.pkgs:
                p = pkg.split()[0]
                if p in args._skip:
                    # Avoid infinite recursion. This can happen if a spec
                    # comes from elsewhere, like --file
                    raise
                args._skip.append(p)

            return install(args, parser, command=command)
        else:
            packages = {index[fn]['name'] for fn in index}

            for pkg in e.pkgs:
                close = get_close_matches(pkg, packages, cutoff=0.7)
                if close:
                    error_message += ("\n\nDid you mean one of these?"
                                      "\n\n    %s" % (', '.join(close)))
            error_message += '\n\nYou can search for this package on anaconda.org with'
            error_message += '\n\n    anaconda search -t conda %s' % pkg
            if len(e.pkgs) > 1:
                # Note this currently only happens with dependencies not found
                error_message += '\n\n (and similarly for the other packages)'

            if not find_executable('anaconda', include_others=False):
                error_message += '\n\nYou may need to install the anaconda-client command line client with'
                error_message += '\n\n    conda install anaconda-client'

            pinned_specs = plan.get_pinned_specs(prefix)
            if pinned_specs:
                error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned')
                error_message += "\n\n    %r" % pinned_specs

            common.error_and_exit(error_message, json=args.json)
    except SystemExit as e:
        # Unsatisfiable package specifications/no such revision/import error
        error_type = 'UnsatisfiableSpecifications'
        if e.args and 'could not import' in e.args[0]:
            error_type = 'ImportError'
        common.exception_and_exit(e, json=args.json, newline=True,
                                  error_text=False,
                                  error_type=error_type)

    if plan.nothing_to_do(actions):
        from conda.cli.main_list import print_packages

        if not args.json:
            regex = '^(%s)$' % '|'.join(spec_names)
            print('\n# All requested packages already installed.')
            print_packages(prefix, regex)
        else:
            common.stdout_json_success(
                message='All requested packages already installed.')
        return

    if not args.json:
        print()
        print("Package plan for installation in environment %s:" % prefix)
        plan.display_actions(actions, index)

    if command in {'install', 'update'}:
        common.check_write(command, prefix)

    if not args.json:
        common.confirm_yn(args)
    elif args.dry_run:
        common.stdout_json_success(actions=actions, dry_run=True)
        sys.exit(0)

    with common.json_progress_bars(json=args.json and not args.quiet):
        try:
            plan.execute_actions(actions, index, verbose=not args.quiet)
            if not (command == 'update' and args.all):
                try:
                    with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
                        f.write('# %s specs: %s\n' % (command, specs))
                except IOError as e:
                    if e.errno == errno.EACCES:
                        log.debug("Can't write the history file")
                    else:
                        raise

        except RuntimeError as e:
            if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
                error_type = "AlreadyLocked"
            else:
                error_type = "RuntimeError"
            common.exception_and_exit(e, error_type=error_type, json=args.json)
        except SystemExit as e:
            common.exception_and_exit(e, json=args.json)

    if newenv:
        misc.append_env(prefix)
        misc.touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)

    if args.json:
        common.stdout_json_success(actions=actions)
Esempio n. 34
0
def test_display_actions():
    import conda.plan
    conda.plan.config_show_channel_urls = False
    actions = defaultdict(
        list, {"FETCH": ['sympy-0.7.2-py27_0', "numpy-1.7.1-py27_0"]})
    # The older test index doesn't have the size metadata
    index['sympy-0.7.2-py27_0.tar.bz2']['size'] = 4374752
    index["numpy-1.7.1-py27_0.tar.bz2"]['size'] = 5994338

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be downloaded:

    package                    |            build
    ---------------------------|-----------------
    sympy-0.7.2                |           py27_0         4.2 MB
    numpy-1.7.1                |           py27_0         5.7 MB
    ------------------------------------------------------------
                                           Total:         9.9 MB

"""

    actions = defaultdict(
        list, {
            'PREFIX':
            '/Users/aaronmeurer/anaconda/envs/test',
            'SYMLINK_CONDA': ['/Users/aaronmeurer/anaconda'],
            'LINK': [
                'python-3.3.2-0', 'readline-6.2-0 1', 'sqlite-3.7.13-0 1',
                'tk-8.5.13-0 1', 'zlib-1.2.7-0 1'
            ]
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    python:   3.3.2-0 \n\
    readline: 6.2-0   \n\
    sqlite:   3.7.13-0
    tk:       8.5.13-0
    zlib:     1.2.7-0 \n\

"""

    actions['UNLINK'] = actions['LINK']
    actions['LINK'] = []

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be REMOVED:

    python:   3.3.2-0 \n\
    readline: 6.2-0   \n\
    sqlite:   3.7.13-0
    tk:       8.5.13-0
    zlib:     1.2.7-0 \n\

"""

    actions = defaultdict(list, {
        'LINK': ['cython-0.19.1-py33_0'],
        'UNLINK': ['cython-0.19-py33_0']
    })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython: 0.19-py33_0 --> 0.19.1-py33_0

"""

    actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK']

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED due to dependency conflicts:

    cython: 0.19.1-py33_0 --> 0.19-py33_0

"""

    actions = defaultdict(
        list, {
            'LINK': [
                'cython-0.19.1-py33_0', 'dateutil-1.5-py33_0',
                'numpy-1.7.1-py33_0'
            ],
            'UNLINK':
            ['cython-0.19-py33_0', 'dateutil-2.1-py33_1', 'pip-1.3.1-py33_1']
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    numpy:    1.7.1-py33_0

The following packages will be REMOVED:

    pip:      1.3.1-py33_1

The following packages will be UPDATED:

    cython:   0.19-py33_0  --> 0.19.1-py33_0

The following packages will be DOWNGRADED due to dependency conflicts:

    dateutil: 2.1-py33_1   --> 1.5-py33_0   \n\

"""

    actions = defaultdict(
        list, {
            'LINK': ['cython-0.19.1-py33_0', 'dateutil-2.1-py33_1'],
            'UNLINK': ['cython-0.19-py33_0', 'dateutil-1.5-py33_0']
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 --> 0.19.1-py33_0
    dateutil: 1.5-py33_0  --> 2.1-py33_1   \n\

"""

    actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK']

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
Esempio n. 35
0
def install(args, parser, command='install'):
    """
    conda install, conda update, and conda create
    """
    newenv = bool(command == 'create')
    if newenv:
        common.ensure_name_or_prefix(args, command)
    prefix = common.get_prefix(args, search=not newenv)
    if newenv:
        check_prefix(prefix, json=args.json)

    if command == 'update':
        if args.all:
            if args.packages:
                common.error_and_exit("""--all cannot be used with packages""",
                                      json=args.json,
                                      error_type="ValueError")
        else:
            if len(args.packages) == 0:
                common.error_and_exit("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix,
                                      json=args.json,
                                      error_type="ValueError")

    if command == 'update':
        linked = ci.linked(prefix)
        for name in args.packages:
            common.arg2spec(name, json=args.json)
            if '=' in name:
                common.error_and_exit("Invalid package name: '%s'" % (name),
                                      json=args.json,
                                      error_type="ValueError")
            if name not in set(ci.name_dist(d) for d in linked):
                common.error_and_exit("package '%s' is not installed in %s" %
                                      (name, prefix),
                                      json=args.json,
                                      error_type="ValueError")

    if newenv and args.clone:
        if args.packages:
            common.error_and_exit('did not expect any arguments for --clone',
                                  json=args.json,
                                  error_type="ValueError")
        clone(args.clone, prefix, json=args.json, quiet=args.quiet)
        touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)
        return

    if newenv and not args.no_default_packages:
        default_packages = config.create_default_packages[:]
        # Override defaults if they are specified at the command line
        for default_pkg in config.create_default_packages:
            if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
                default_packages.remove(default_pkg)
        args.packages.extend(default_packages)

    common.ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()

    specs = []
    if args.file:
        specs.extend(common.specs_from_url(args.file, json=args.json))
    elif getattr(args, 'all', False):
        linked = ci.linked(prefix)
        for pkg in linked:
            name, ver, build = pkg.rsplit('-', 2)
            if name == 'python' and ver.startswith('2'):
                # Oh Python 2...
                specs.append('%s >=%s,<3' % (name, ver))
            else:
                specs.append('%s >=%s' % (name, ver))
    specs.extend(common.specs_from_args(args.packages, json=args.json))

    if command == 'install' and args.revision:
        get_revision(args.revision, json=args.json)
    else:
        common.check_specs(prefix, specs, json=args.json)

    if args.use_local:
        from conda.fetch import fetch_index
        from conda.utils import url_path
        try:
            from conda_build import config as build_config
        except ImportError:
            common.error_and_exit(
                "you need to have 'conda-build' installed"
                " to use the --use-local option",
                json=args.json,
                error_type="RuntimeError")
        # remove the cache such that a refetch is made,
        # this is necessary because we add the local build repo URL
        fetch_index.cache = {}
        index = common.get_index_trap([url_path(build_config.croot)],
                                      use_cache=args.use_index_cache,
                                      unknown=args.unknown,
                                      json=args.json)
    else:
        index = common.get_index_trap(channel_urls=channel_urls,
                                      prepend=not args.override_channels,
                                      use_cache=args.use_index_cache,
                                      unknown=args.unknown,
                                      json=args.json)

    # Don't update packages that are already up-to-date
    if command == 'update' and not args.all:
        r = Resolve(index)
        orig_packages = args.packages[:]
        for name in orig_packages:
            installed_metadata = [
                ci.is_linked(prefix, dist) for dist in linked
            ]
            vers_inst = [
                dist.rsplit('-', 2)[1] for dist in linked
                if dist.rsplit('-', 2)[0] == name
            ]
            build_inst = [
                m['build_number'] for m in installed_metadata
                if m['name'] == name
            ]

            try:
                assert len(vers_inst) == 1, name
                assert len(build_inst) == 1, name
            except AssertionError as e:
                if args.json:
                    common.exception_and_exit(e, json=True)
                else:
                    raise

            pkgs = sorted(r.get_pkgs(MatchSpec(name)))
            if not pkgs:
                # Shouldn't happen?
                continue
            latest = pkgs[-1]

            if latest.version == vers_inst[
                    0] and latest.build_number == build_inst[0]:
                args.packages.remove(name)
        if not args.packages:
            from conda.cli.main_list import print_packages

            if not args.json:
                regex = '^(%s)$' % '|'.join(orig_packages)
                print('# All requested packages already installed.')
                print_packages(prefix, regex)
            else:
                common.stdout_json_success(
                    message='All requested packages already installed.')
            return

    # handle tar file containing conda packages
    if len(args.packages) == 1:
        tar_path = args.packages[0]
        if tar_path.endswith('.tar'):
            install_tar(prefix, tar_path, verbose=not args.quiet)
            return

    # handle explicit installs of conda packages
    if args.packages and all(s.endswith('.tar.bz2') for s in args.packages):
        from conda.misc import install_local_packages
        install_local_packages(prefix, args.packages, verbose=not args.quiet)
        return

    if any(s.endswith('.tar.bz2') for s in args.packages):
        common.error_and_exit(
            "cannot mix specifications with conda package filenames",
            json=args.json,
            error_type="ValueError")

    if args.force:
        args.no_deps = True

    spec_names = set(s.split()[0] for s in specs)
    if args.no_deps:
        only_names = spec_names
    else:
        only_names = None

    if not isdir(prefix) and not newenv:
        if args.mkdir:
            try:
                os.makedirs(prefix)
            except OSError:
                common.error_and_exit("Error: could not create directory: %s" %
                                      prefix,
                                      json=args.json,
                                      error_type="OSError")
        else:
            common.error_and_exit("""\
environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix,
                                  json=args.json,
                                  error_type="NoEnvironmentFound")

    try:
        if command == 'install' and args.revision:
            actions = plan.revert_actions(prefix, get_revision(args.revision))
        else:
            actions = plan.install_actions(prefix,
                                           index,
                                           specs,
                                           force=args.force,
                                           only_names=only_names,
                                           pinned=args.pinned,
                                           minimal_hint=args.alt_hint)
    except NoPackagesFound as e:
        error_message = e.args[0]

        packages = {index[fn]['name'] for fn in index}

        for pkg in e.pkgs:
            close = get_close_matches(pkg, packages)
            if close:
                error_message += "\n\nDid you mean one of these?\n    %s" % (
                    ', '.join(close))
            error_message += '\n\nYou can search for this package on Binstar with'
            error_message += '\n\n    binstar search -t conda %s' % pkg
            error_message += '\n\nYou may need to install the Binstar command line client with'
            error_message += '\n\n    conda install binstar'
        common.error_and_exit(error_message, json=args.json)
    except SystemExit as e:
        # Unsatisfiable package specifications/no such revision/import error
        error_type = 'UnsatisfiableSpecifications'
        if e.args and 'could not import' in e.args[0]:
            error_type = 'ImportError'
        common.exception_and_exit(e,
                                  json=args.json,
                                  newline=True,
                                  error_text=False,
                                  error_type=error_type)

    if plan.nothing_to_do(actions):
        from conda.cli.main_list import print_packages

        if not args.json:
            regex = '^(%s)$' % '|'.join(spec_names)
            print('\n# All requested packages already installed.')
            print_packages(prefix, regex)
        else:
            common.stdout_json_success(
                message='All requested packages already installed.')
        return

    if not args.json:
        print()
        print("Package plan for installation in environment %s:" % prefix)
        plan.display_actions(actions, index)

    if command in {'install', 'update'}:
        common.check_write(command, prefix)

    if not args.json:
        if not pscheck.main(args):
            common.confirm_yn(args)
    else:
        if (sys.platform == 'win32' and not args.force_pscheck
                and not pscheck.check_processes(verbose=False)):
            common.error_and_exit(
                "Cannot continue operation while processes "
                "from packages are running without --force-pscheck.",
                json=True,
                error_type="ProcessesStillRunning")
        elif args.dry_run:
            common.stdout_json_success(actions=actions, dry_run=True)
            sys.exit(0)

    with common.json_progress_bars(json=args.json and not args.quiet):
        try:
            plan.execute_actions(actions, index, verbose=not args.quiet)
        except RuntimeError as e:
            if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
                error_type = "AlreadyLocked"
            else:
                error_type = "RuntimeError"
            common.exception_and_exit(e, error_type=error_type, json=args.json)
        except SystemExit as e:
            common.exception_and_exit(e, json=args.json)

    if newenv:
        touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)

    if args.json:
        common.stdout_json_success(actions=actions)
Esempio n. 36
0
def test_display_actions_link_type():
    os.environ['CONDA_SHOW_CHANNEL_URLS'] = 'False'
    reset_context(())

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 2', 'dateutil-1.5-py33_0 2',
    'numpy-1.7.1-py33_0 2', 'python-3.3.2-0 2', 'readline-6.2-0 2', 'sqlite-3.7.13-0 2', 'tk-8.5.13-0 2', 'zlib-1.2.7-0 2']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0 (soft-link)
    dateutil: 1.5-py33_0    (soft-link)
    numpy:    1.7.1-py33_0  (soft-link)
    python:   3.3.2-0       (soft-link)
    readline: 6.2-0         (soft-link)
    sqlite:   3.7.13-0      (soft-link)
    tk:       8.5.13-0      (soft-link)
    zlib:     1.2.7-0       (soft-link)

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 2',
        'dateutil-2.1-py33_1 2'], 'UNLINK':  ['cython-0.19-py33_0',
            'dateutil-1.5-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 --> 0.19.1-py33_0 (soft-link)
    dateutil: 1.5-py33_0  --> 2.1-py33_1    (soft-link)

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 2',
        'dateutil-1.5-py33_0 2'], 'UNLINK':  ['cython-0.19.1-py33_0',
            'dateutil-2.1-py33_1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED due to dependency conflicts:

    cython:   0.19.1-py33_0 --> 0.19-py33_0 (soft-link)
    dateutil: 2.1-py33_1    --> 1.5-py33_0  (soft-link)

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 1', 'dateutil-1.5-py33_0 1',
    'numpy-1.7.1-py33_0 1', 'python-3.3.2-0 1', 'readline-6.2-0 1', 'sqlite-3.7.13-0 1', 'tk-8.5.13-0 1', 'zlib-1.2.7-0 1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0
    dateutil: 1.5-py33_0   \n\
    numpy:    1.7.1-py33_0 \n\
    python:   3.3.2-0      \n\
    readline: 6.2-0        \n\
    sqlite:   3.7.13-0     \n\
    tk:       8.5.13-0     \n\
    zlib:     1.2.7-0      \n\

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 1',
        'dateutil-2.1-py33_1 1'], 'UNLINK':  ['cython-0.19-py33_0',
            'dateutil-1.5-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 --> 0.19.1-py33_0
    dateutil: 1.5-py33_0  --> 2.1-py33_1   \n\

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 1',
        'dateutil-1.5-py33_0 1'], 'UNLINK':  ['cython-0.19.1-py33_0',
            'dateutil-2.1-py33_1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED due to dependency conflicts:

    cython:   0.19.1-py33_0 --> 0.19-py33_0
    dateutil: 2.1-py33_1    --> 1.5-py33_0 \n\

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 3', 'dateutil-1.5-py33_0 3',
    'numpy-1.7.1-py33_0 3', 'python-3.3.2-0 3', 'readline-6.2-0 3', 'sqlite-3.7.13-0 3', 'tk-8.5.13-0 3', 'zlib-1.2.7-0 3']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0 (copy)
    dateutil: 1.5-py33_0    (copy)
    numpy:    1.7.1-py33_0  (copy)
    python:   3.3.2-0       (copy)
    readline: 6.2-0         (copy)
    sqlite:   3.7.13-0      (copy)
    tk:       8.5.13-0      (copy)
    zlib:     1.2.7-0       (copy)

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 3',
        'dateutil-2.1-py33_1 3'], 'UNLINK':  ['cython-0.19-py33_0',
            'dateutil-1.5-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 --> 0.19.1-py33_0 (copy)
    dateutil: 1.5-py33_0  --> 2.1-py33_1    (copy)

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 3',
        'dateutil-1.5-py33_0 3'], 'UNLINK':  ['cython-0.19.1-py33_0',
            'dateutil-2.1-py33_1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED due to dependency conflicts:

    cython:   0.19.1-py33_0 --> 0.19-py33_0 (copy)
    dateutil: 2.1-py33_1    --> 1.5-py33_0  (copy)

"""
    os.environ['CONDA_SHOW_CHANNEL_URLS'] = 'True'
    reset_context(())

    index['cython-0.19.1-py33_0.tar.bz2']['channel'] = 'my_channel'
    index['dateutil-1.5-py33_0.tar.bz2']['channel'] = 'my_channel'

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 3', 'dateutil-1.5-py33_0 3',
    'numpy-1.7.1-py33_0 3', 'python-3.3.2-0 3', 'readline-6.2-0 3', 'sqlite-3.7.13-0 3', 'tk-8.5.13-0 3', 'zlib-1.2.7-0 3']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0 my_channel (copy)
    dateutil: 1.5-py33_0    my_channel (copy)
    numpy:    1.7.1-py33_0  <unknown>  (copy)
    python:   3.3.2-0       <unknown>  (copy)
    readline: 6.2-0         <unknown>  (copy)
    sqlite:   3.7.13-0      <unknown>  (copy)
    tk:       8.5.13-0      <unknown>  (copy)
    zlib:     1.2.7-0       <unknown>  (copy)

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 3',
        'dateutil-2.1-py33_1 3'], 'UNLINK':  ['cython-0.19-py33_0',
            'dateutil-1.5-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 <unknown>  --> 0.19.1-py33_0 my_channel (copy)
    dateutil: 1.5-py33_0  my_channel --> 2.1-py33_1    <unknown>  (copy)

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 3',
        'dateutil-1.5-py33_0 3'], 'UNLINK':  ['cython-0.19.1-py33_0',
            'dateutil-2.1-py33_1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
Esempio n. 37
0
def install(args, parser, command='install'):
    """
    conda install, conda update, and conda create
    """
    newenv = bool(command == 'create')
    if newenv:
        common.ensure_name_or_prefix(args, command)
    prefix = common.get_prefix(args, search=not newenv)
    if newenv:
        check_prefix(prefix, json=args.json)
    if config.force_32bit and plan.is_root_prefix(prefix):
        common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env")

    if command == 'update':
        if not args.file:
            if not args.all and len(args.packages) == 0:
                common.error_and_exit("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix,
                                      json=args.json,
                                      error_type="ValueError")

    if command == 'update' and not args.all:
        linked = ci.linked(prefix)
        for name in args.packages:
            common.arg2spec(name, json=args.json)
            if '=' in name:
                common.error_and_exit("Invalid package name: '%s'" % (name),
                                      json=args.json,
                                      error_type="ValueError")
            if name not in set(ci.name_dist(d) for d in linked):
                common.error_and_exit("package '%s' is not installed in %s" %
                                      (name, prefix),
                                      json=args.json,
                                      error_type="ValueError")

    if newenv and not args.no_default_packages:
        default_packages = config.create_default_packages[:]
        # Override defaults if they are specified at the command line
        for default_pkg in config.create_default_packages:
            if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
                default_packages.remove(default_pkg)
        args.packages.extend(default_packages)
    else:
        default_packages = []

    common.ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()

    specs = []
    if args.file:
        for fpath in args.file:
            specs.extend(common.specs_from_url(fpath, json=args.json))
    elif getattr(args, 'all', False):
        linked = ci.linked(prefix)
        if not linked:
            common.error_and_exit("There are no packages installed in the "
                "prefix %s" % prefix)
        for pkg in linked:
            name, ver, build = pkg.rsplit('-', 2)
            if name in getattr(args, '_skip', ['anaconda']):
                continue
            if name == 'python' and ver.startswith('2'):
                # Oh Python 2...
                specs.append('%s >=%s,<3' % (name, ver))
            else:
                specs.append('%s' % name)
    specs.extend(common.specs_from_args(args.packages, json=args.json))

    if command == 'install' and args.revision:
        get_revision(args.revision, json=args.json)
    elif not (newenv and args.clone):
        common.check_specs(prefix, specs, json=args.json,
                           create=(command == 'create'))


    num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
    if num_cp:
        if num_cp == len(args.packages):
            depends = misc.install_local_packages(prefix, args.packages,
                                                  verbose=not args.quiet)
            if args.no_deps:
                depends = []
            specs = list(set(depends))
            args.unknown = True
        else:
            common.error_and_exit(
                "cannot mix specifications with conda package filenames",
                json=args.json,
                error_type="ValueError")

    # handle tar file containing conda packages
    if len(args.packages) == 1:
        tar_path = args.packages[0]
        if tar_path.endswith('.tar'):
            depends = install_tar(prefix, tar_path, verbose=not args.quiet)
            if args.no_deps:
                depends = []
            specs = list(set(depends))
            args.unknown = True

    if args.use_local:
        from conda.fetch import fetch_index
        from conda.utils import url_path
        try:
            from conda_build.config import croot
        except ImportError:
            common.error_and_exit(
                    "you need to have 'conda-build >= 1.7.1' installed"
                    " to use the --use-local option",
                    json=args.json,
                    error_type="RuntimeError")
        # remove the cache such that a refetch is made,
        # this is necessary because we add the local build repo URL
        fetch_index.cache = {}
        if exists(croot):
            channel_urls = [url_path(croot)] + list(channel_urls)

    index = common.get_index_trap(channel_urls=channel_urls,
                                  prepend=not args.override_channels,
                                  use_cache=args.use_index_cache,
                                  unknown=args.unknown,
                                  json=args.json,
                                  offline=args.offline)

    if newenv and args.clone:
        if set(args.packages) - set(default_packages):
            common.error_and_exit('did not expect any arguments for --clone',
                                  json=args.json,
                                  error_type="ValueError")
        clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index)
        misc.append_env(prefix)
        misc.touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)
        return

    # Don't update packages that are already up-to-date
    if command == 'update' and not (args.all or args.force):
        r = Resolve(index)
        orig_packages = args.packages[:]
        for name in orig_packages:
            installed_metadata = [ci.is_linked(prefix, dist)
                                  for dist in linked]
            vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
                         if dist.rsplit('-', 2)[0] == name]
            build_inst = [m['build_number'] for m in installed_metadata if
                          m['name'] == name]

            try:
                assert len(vers_inst) == 1, name
                assert len(build_inst) == 1, name
            except AssertionError as e:
                if args.json:
                    common.exception_and_exit(e, json=True)
                else:
                    raise

            pkgs = sorted(r.get_pkgs(MatchSpec(name)))
            if not pkgs:
                # Shouldn't happen?
                continue
            latest = pkgs[-1]

            if (latest.version == vers_inst[0] and
                       latest.build_number == build_inst[0]):
                args.packages.remove(name)
        if not args.packages:
            from conda.cli.main_list import print_packages

            if not args.json:
                regex = '^(%s)$' % '|'.join(orig_packages)
                print('# All requested packages already installed.')
                print_packages(prefix, regex)
            else:
                common.stdout_json_success(
                    message='All requested packages already installed.')
            return

    if args.force:
        args.no_deps = True

    spec_names = set(s.split()[0] for s in specs)
    if args.no_deps:
        only_names = spec_names
    else:
        only_names = None

    if not isdir(prefix) and not newenv:
        if args.mkdir:
            try:
                os.makedirs(prefix)
            except OSError:
                common.error_and_exit("Error: could not create directory: %s" % prefix,
                                      json=args.json,
                                      error_type="OSError")
        else:
            common.error_and_exit("""\
environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix,
                                  json=args.json,
                                  error_type="NoEnvironmentFound")

    try:
        if command == 'install' and args.revision:
            actions = plan.revert_actions(prefix, get_revision(args.revision))
        else:
            with common.json_progress_bars(json=args.json and not args.quiet):

                actions = plan.install_actions(prefix, index, specs,
                                               force=args.force,
                                               only_names=only_names,
                                               pinned=args.pinned,
                                               minimal_hint=args.alt_hint,
                                               update_deps=args.update_deps)
            if config.always_copy or args.copy:
                new_link = []
                for pkg in actions["LINK"]:
                    dist, pkgs_dir, lt = inst.split_linkarg(pkg)
                    lt = ci.LINK_COPY
                    new_link.append("%s %s %d" % (dist, pkgs_dir, lt))
                actions["LINK"] = new_link
    except NoPackagesFound as e:
        error_message = e.args[0]

        if command == 'update' and args.all:
            # Packages not found here just means they were installed but
            # cannot be found any more. Just skip them.
            if not args.json:
                print("Warning: %s, skipping" % error_message)
            else:
                # Not sure what to do here
                pass
            args._skip = getattr(args, '_skip', ['anaconda'])
            for pkg in e.pkgs:
                p = pkg.split()[0]
                if p in args._skip:
                    # Avoid infinite recursion. This can happen if a spec
                    # comes from elsewhere, like --file
                    raise
                args._skip.append(p)

            return install(args, parser, command=command)
        else:
            packages = {index[fn]['name'] for fn in index}

            for pkg in e.pkgs:
                close = get_close_matches(pkg, packages, cutoff=0.7)
                if close:
                    error_message += ("\n\nDid you mean one of these?"
                                      "\n\n    %s" % (', '.join(close)))
            error_message += '\n\nYou can search for this package on anaconda.org with'
            error_message += '\n\n    anaconda search -t conda %s' % pkg
            if len(e.pkgs) > 1:
                # Note this currently only happens with dependencies not found
                error_message += '\n\n (and similarly for the other packages)'

            if not find_executable('anaconda', include_others=False):
                error_message += '\n\nYou may need to install the anaconda-client command line client with'
                error_message += '\n\n    conda install anaconda-client'

            pinned_specs = plan.get_pinned_specs(prefix)
            if pinned_specs:
                error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned')
                error_message += "\n\n    %r" % pinned_specs

            common.error_and_exit(error_message, json=args.json)
    except SystemExit as e:
        # Unsatisfiable package specifications/no such revision/import error
        error_type = 'UnsatisfiableSpecifications'
        if e.args and 'could not import' in e.args[0]:
            error_type = 'ImportError'
        common.exception_and_exit(e, json=args.json, newline=True,
                                  error_text=False,
                                  error_type=error_type)

    if plan.nothing_to_do(actions):
        from conda.cli.main_list import print_packages

        if not args.json:
            regex = '^(%s)$' % '|'.join(spec_names)
            print('\n# All requested packages already installed.')
            print_packages(prefix, regex)
        else:
            common.stdout_json_success(
                message='All requested packages already installed.')
        return

    if not args.json:
        print()
        print("Package plan for installation in environment %s:" % prefix)
        plan.display_actions(actions, index, show_channel_urls=args.show_channel_urls)

    if command in {'install', 'update'}:
        common.check_write(command, prefix)

    if not args.json:
        common.confirm_yn(args)
    elif args.dry_run:
        common.stdout_json_success(actions=actions, dry_run=True)
        sys.exit(0)

    with common.json_progress_bars(json=args.json and not args.quiet):
        try:
            plan.execute_actions(actions, index, verbose=not args.quiet)
            if not (command == 'update' and args.all):
                try:
                    with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
                        f.write('# %s specs: %s\n' % (command, specs))
                except IOError as e:
                    if e.errno == errno.EACCES:
                        log.debug("Can't write the history file")
                    else:
                        raise

        except RuntimeError as e:
            if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
                error_type = "AlreadyLocked"
            else:
                error_type = "RuntimeError"
            common.exception_and_exit(e, error_type=error_type, json=args.json)
        except SystemExit as e:
            common.exception_and_exit(e, json=args.json)

    if newenv:
        misc.append_env(prefix)
        misc.touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)

    if args.json:
        common.stdout_json_success(actions=actions)
Esempio n. 38
0
def execute(args, parser):
    import sys

    import conda.plan as plan
    from conda.cli import pscheck
    from conda.install import rm_rf, linked
    from conda import config

    if not (args.all or args.package_names):
        common.error_and_exit(
            'no package names supplied,\n'
            '       try "conda remove -h" for more details',
            json=args.json,
            error_type="ValueError")

    prefix = common.get_prefix(args)
    common.check_write('remove', prefix, json=args.json)
    common.ensure_override_channels_requires_channel(args, json=args.json)
    channel_urls = args.channel or ()
    index = common.get_index_trap(channel_urls=channel_urls,
                                  use_cache=args.use_index_cache,
                                  prepend=not args.override_channels,
                                  json=args.json)
    if args.features:
        features = set(args.package_names)
        actions = plan.remove_features_actions(prefix, index, features)

    elif args.all:
        if plan.is_root_prefix(prefix):
            common.error_and_exit(
                'cannot remove root environment,\n'
                '       add -n NAME or -p PREFIX option',
                json=args.json,
                error_type="CantRemoveRoot")

        actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))}

    else:
        specs = common.specs_from_args(args.package_names)
        if (plan.is_root_prefix(prefix)
                and common.names_in_specs(common.root_no_rm, specs)):
            common.error_and_exit('cannot remove %s from root environment' %
                                  ', '.join(common.root_no_rm),
                                  json=args.json,
                                  error_type="CantRemoveFromRoot")
        actions = plan.remove_actions(prefix, specs, pinned=args.pinned)

    if plan.nothing_to_do(actions):
        if args.all:
            rm_rf(prefix)

            if args.json:
                common.stdout_json({'success': True, 'actions': actions})
            return
        common.error_and_exit('no packages found to remove from '
                              'environment: %s' % prefix,
                              json=args.json,
                              error_type="PackageNotInstalled")

    if not args.json:
        print()
        print("Package plan for package removal in environment %s:" % prefix)
        plan.display_actions(actions, index)

    if args.json and args.dry_run:
        common.stdout_json({
            'success': True,
            'dry_run': True,
            'actions': actions
        })
        return

    if not args.json:
        if not pscheck.main(args):
            common.confirm_yn(args)
    elif (sys.platform == 'win32' and not args.force_pscheck
          and not pscheck.check_processes(verbose=False)):
        common.error_and_exit(
            "Cannot continue removal while processes "
            "from packages are running without --force-pscheck.",
            json=True,
            error_type="ProcessesStillRunning")

    if args.json and not args.quiet:
        with json_progress_bars():
            plan.execute_actions(actions, index, verbose=not args.quiet)
    else:
        plan.execute_actions(actions, index, verbose=not args.quiet)

    if args.all:
        rm_rf(prefix)

    if args.json:
        common.stdout_json({'success': True, 'actions': actions})
Esempio n. 39
0
def test_display_actions_link_type():
    conda.config.show_channel_urls = False

    actions = defaultdict(
        list,
        {
            "LINK": [
                "cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 2",
                "dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 2",
                "numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 2",
                "python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 2",
                "readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 2",
                "sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 2",
                "tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 2",
                "zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 2",
            ]
        },
    )

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0 (soft-link)
    dateutil: 1.5-py33_0    (soft-link)
    numpy:    1.7.1-py33_0  (soft-link)
    python:   3.3.2-0       (soft-link)
    readline: 6.2-0         (soft-link)
    sqlite:   3.7.13-0      (soft-link)
    tk:       8.5.13-0      (soft-link)
    zlib:     1.2.7-0       (soft-link)

"""
    )

    actions = defaultdict(
        list,
        {
            "LINK": [
                "cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 2",
                "dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 2",
            ],
            "UNLINK": ["cython-0.19-py33_0", "dateutil-1.5-py33_0"],
        },
    )

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 --> 0.19.1-py33_0 (soft-link)
    dateutil: 1.5-py33_0  --> 2.1-py33_1    (soft-link)

"""
    )

    actions = defaultdict(
        list,
        {
            "LINK": [
                "cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 2",
                "dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 2",
            ],
            "UNLINK": ["cython-0.19.1-py33_0", "dateutil-2.1-py33_1"],
        },
    )

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be DOWNGRADED:

    cython:   0.19.1-py33_0 --> 0.19-py33_0 (soft-link)
    dateutil: 2.1-py33_1    --> 1.5-py33_0  (soft-link)

"""
    )

    actions = defaultdict(
        list,
        {
            "LINK": [
                "cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 1",
                "dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 1",
                "numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 1",
                "python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 1",
                "readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 1",
                "sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 1",
                "tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 1",
                "zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 1",
            ]
        },
    )

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0
    dateutil: 1.5-py33_0   \n\
    numpy:    1.7.1-py33_0 \n\
    python:   3.3.2-0      \n\
    readline: 6.2-0        \n\
    sqlite:   3.7.13-0     \n\
    tk:       8.5.13-0     \n\
    zlib:     1.2.7-0      \n\

"""
    )

    actions = defaultdict(
        list,
        {
            "LINK": [
                "cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 1",
                "dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 1",
            ],
            "UNLINK": ["cython-0.19-py33_0", "dateutil-1.5-py33_0"],
        },
    )

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 --> 0.19.1-py33_0
    dateutil: 1.5-py33_0  --> 2.1-py33_1   \n\

"""
    )

    actions = defaultdict(
        list,
        {
            "LINK": [
                "cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 1",
                "dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 1",
            ],
            "UNLINK": ["cython-0.19.1-py33_0", "dateutil-2.1-py33_1"],
        },
    )

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be DOWNGRADED:

    cython:   0.19.1-py33_0 --> 0.19-py33_0
    dateutil: 2.1-py33_1    --> 1.5-py33_0 \n\

"""
    )

    actions = defaultdict(
        list,
        {
            "LINK": [
                "cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3",
                "dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3",
                "numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3",
                "python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 3",
                "readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 3",
                "sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 3",
                "tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 3",
                "zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 3",
            ]
        },
    )

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0 (copy)
    dateutil: 1.5-py33_0    (copy)
    numpy:    1.7.1-py33_0  (copy)
    python:   3.3.2-0       (copy)
    readline: 6.2-0         (copy)
    sqlite:   3.7.13-0      (copy)
    tk:       8.5.13-0      (copy)
    zlib:     1.2.7-0       (copy)

"""
    )

    actions = defaultdict(
        list,
        {
            "LINK": [
                "cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3",
                "dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 3",
            ],
            "UNLINK": ["cython-0.19-py33_0", "dateutil-1.5-py33_0"],
        },
    )

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 --> 0.19.1-py33_0 (copy)
    dateutil: 1.5-py33_0  --> 2.1-py33_1    (copy)

"""
    )

    actions = defaultdict(
        list,
        {
            "LINK": [
                "cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 3",
                "dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3",
            ],
            "UNLINK": ["cython-0.19.1-py33_0", "dateutil-2.1-py33_1"],
        },
    )

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be DOWNGRADED:

    cython:   0.19.1-py33_0 --> 0.19-py33_0 (copy)
    dateutil: 2.1-py33_1    --> 1.5-py33_0  (copy)

"""
    )

    conda.config.show_channel_urls = True

    index["cython-0.19.1-py33_0.tar.bz2"]["channel"] = "my_channel"
    index["dateutil-1.5-py33_0.tar.bz2"]["channel"] = "my_channel"

    actions = defaultdict(
        list,
        {
            "LINK": [
                "cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3",
                "dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3",
                "numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3",
                "python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 3",
                "readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 3",
                "sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 3",
                "tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 3",
                "zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 3",
            ]
        },
    )

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0 my_channel (copy)
    dateutil: 1.5-py33_0    my_channel (copy)
    numpy:    1.7.1-py33_0  <unknown>  (copy)
    python:   3.3.2-0       <unknown>  (copy)
    readline: 6.2-0         <unknown>  (copy)
    sqlite:   3.7.13-0      <unknown>  (copy)
    tk:       8.5.13-0      <unknown>  (copy)
    zlib:     1.2.7-0       <unknown>  (copy)

"""
    )

    actions = defaultdict(
        list,
        {
            "LINK": [
                "cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3",
                "dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 3",
            ],
            "UNLINK": ["cython-0.19-py33_0", "dateutil-1.5-py33_0"],
        },
    )

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 <unknown>  --> 0.19.1-py33_0 my_channel (copy)
    dateutil: 1.5-py33_0  my_channel --> 2.1-py33_1    <unknown>  (copy)

"""
    )

    actions = defaultdict(
        list,
        {
            "LINK": [
                "cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 3",
                "dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3",
            ],
            "UNLINK": ["cython-0.19.1-py33_0", "dateutil-2.1-py33_1"],
        },
    )

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be DOWNGRADED:

    cython:   0.19.1-py33_0 my_channel --> 0.19-py33_0 <unknown>  (copy)
    dateutil: 2.1-py33_1    <unknown>  --> 1.5-py33_0  my_channel (copy)

"""
    )
Esempio n. 40
0
def test_display_actions_show_channel_urls():
    conda.config.show_channel_urls = True
    actions = defaultdict(list, {"FETCH": ["sympy-0.7.2-py27_0", "numpy-1.7.1-py27_0"]})
    # The older test index doesn't have the size metadata
    index["sympy-0.7.2-py27_0.tar.bz2"]["size"] = 4374752
    index["numpy-1.7.1-py27_0.tar.bz2"]["size"] = 5994338

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be downloaded:

    package                    |            build
    ---------------------------|-----------------
    sympy-0.7.2                |           py27_0         4.2 MB  <unknown>
    numpy-1.7.1                |           py27_0         5.7 MB  <unknown>
    ------------------------------------------------------------
                                           Total:         9.9 MB

"""
    )

    actions = defaultdict(
        list,
        {
            "PREFIX": "/Users/aaronmeurer/anaconda/envs/test",
            "SYMLINK_CONDA": ["/Users/aaronmeurer/anaconda"],
            "LINK": [
                "python-3.3.2-0",
                "readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 1",
                "sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 1",
                "tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 1",
                "zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 1",
            ],
        },
    )

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following NEW packages will be INSTALLED:

    python:   3.3.2-0  <unknown>
    readline: 6.2-0    <unknown>
    sqlite:   3.7.13-0 <unknown>
    tk:       8.5.13-0 <unknown>
    zlib:     1.2.7-0  <unknown>

"""
    )

    actions["UNLINK"] = actions["LINK"]
    actions["LINK"] = []

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be REMOVED:

    python:   3.3.2-0  <unknown>
    readline: 6.2-0    <unknown>
    sqlite:   3.7.13-0 <unknown>
    tk:       8.5.13-0 <unknown>
    zlib:     1.2.7-0  <unknown>

"""
    )

    actions = defaultdict(list, {"LINK": ["cython-0.19.1-py33_0"], "UNLINK": ["cython-0.19-py33_0"]})

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be UPDATED:

    cython: 0.19-py33_0 <unknown> --> 0.19.1-py33_0 <unknown>

"""
    )

    actions["LINK"], actions["UNLINK"] = actions["UNLINK"], actions["LINK"]

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be DOWNGRADED:

    cython: 0.19.1-py33_0 <unknown> --> 0.19-py33_0 <unknown>

"""
    )

    actions = defaultdict(
        list,
        {
            "LINK": ["cython-0.19.1-py33_0", "dateutil-1.5-py33_0", "numpy-1.7.1-py33_0"],
            "UNLINK": ["cython-0.19-py33_0", "dateutil-2.1-py33_1", "pip-1.3.1-py33_1"],
        },
    )

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following NEW packages will be INSTALLED:

    numpy:    1.7.1-py33_0  <unknown>

The following packages will be REMOVED:

    pip:      1.3.1-py33_1 <unknown>

The following packages will be UPDATED:

    cython:   0.19-py33_0  <unknown> --> 0.19.1-py33_0 <unknown>

The following packages will be DOWNGRADED:

    dateutil: 2.1-py33_1   <unknown> --> 1.5-py33_0    <unknown>

"""
    )

    actions = defaultdict(
        list,
        {
            "LINK": ["cython-0.19.1-py33_0", "dateutil-2.1-py33_1"],
            "UNLINK": ["cython-0.19-py33_0", "dateutil-1.5-py33_0"],
        },
    )

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 <unknown> --> 0.19.1-py33_0 <unknown>
    dateutil: 1.5-py33_0  <unknown> --> 2.1-py33_1    <unknown>

"""
    )

    actions["LINK"], actions["UNLINK"] = actions["UNLINK"], actions["LINK"]

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be DOWNGRADED:

    cython:   0.19.1-py33_0 <unknown> --> 0.19-py33_0 <unknown>
    dateutil: 2.1-py33_1    <unknown> --> 1.5-py33_0  <unknown>

"""
    )

    actions["LINK"], actions["UNLINK"] = actions["UNLINK"], actions["LINK"]

    index["cython-0.19.1-py33_0.tar.bz2"]["channel"] = "my_channel"
    index["dateutil-1.5-py33_0.tar.bz2"]["channel"] = "my_channel"

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 <unknown>  --> 0.19.1-py33_0 my_channel
    dateutil: 1.5-py33_0  my_channel --> 2.1-py33_1    <unknown> \n\

"""
    )

    actions["LINK"], actions["UNLINK"] = actions["UNLINK"], actions["LINK"]

    with captured() as c:
        display_actions(actions, index)

    assert (
        c.stdout
        == """
The following packages will be DOWNGRADED:

    cython:   0.19.1-py33_0 my_channel --> 0.19-py33_0 <unknown> \n\
    dateutil: 2.1-py33_1    <unknown>  --> 1.5-py33_0  my_channel

"""
    )
Esempio n. 41
0
def build(m,
          post=None,
          include_recipe=True,
          keep_old_work=False,
          need_source_download=True,
          verbose=True,
          dirty=False,
          activate=True):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    :type keep_old_work: bool: Keep any previous work directory.
    :type need_source_download: bool: if rendering failed to download source
    (due to missing tools), retry here after build env is populated
    '''

    if (m.get_value('build/detect_binary_files_with_prefix')
            or m.binary_has_prefix_files()) and not on_win:
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if m.skip():
        print("Skipped: The %s recipe defines build/skip for this "
              "configuration." % m.dist())
        return

    with Locked(cc.root_dir):

        # If --keep-old-work, then move the contents of source.WORK_DIR to a
        # temporary directory for the duration of the build.
        # The source unpacking procedure is too varied and complex
        # to allow this to be written cleanly (see source.get_dir() for example)
        if keep_old_work:
            old_WORK_DIR = tempfile.mkdtemp()
            old_sub_dirs = [
                name for name in os.listdir(source.WORK_DIR)
                if os.path.isdir(os.path.join(source.WORK_DIR, name))
            ]
            if len(old_sub_dirs):
                print("Keeping old work directory backup: %s => %s" %
                      (old_sub_dirs, old_WORK_DIR))
                for old_sub in old_sub_dirs:
                    shutil.move(os.path.join(source.WORK_DIR, old_sub),
                                old_WORK_DIR)

        if post in [False, None]:
            print("Removing old build environment")
            print("BUILD START:", m.dist())
            if on_win:
                if isdir(config.short_build_prefix):
                    move_to_trash(config.short_build_prefix, '')
                if isdir(config.long_build_prefix):
                    move_to_trash(config.long_build_prefix, '')
            else:
                rm_rf(config.short_build_prefix)
                rm_rf(config.long_build_prefix)

            # Display the name only
            # Version number could be missing due to dependency on source info.
            create_env(config.build_prefix,
                       [ms.spec for ms in m.ms_depends('build')])

            if need_source_download:
                # Execute any commands fetching the source (e.g., git) in the _build environment.
                # This makes it possible to provide source fetchers (eg. git, hg, svn) as build
                # dependencies.
                m, need_source_download = parse_or_try_download(
                    m,
                    no_download_source=False,
                    force_download=True,
                    verbose=verbose,
                    dirty=dirty)
                assert not need_source_download, "Source download failed.  Please investigate."

            if m.name() in [
                    i.rsplit('-', 2)[0] for i in linked(config.build_prefix)
            ]:
                print("%s is installed as a build dependency. Removing." %
                      m.name())
                index = get_build_index(clear_cache=False)
                actions = plan.remove_actions(config.build_prefix, [m.name()],
                                              index=index)
                assert not plan.nothing_to_do(actions), actions
                plan.display_actions(actions, index)
                plan.execute_actions(actions, index)

            print("Package:", m.dist())

            assert isdir(source.WORK_DIR)
            src_dir = source.get_dir()
            contents = os.listdir(src_dir)
            if contents:
                print("source tree in:", src_dir)
            else:
                print("no source")

            rm_rf(config.info_dir)
            files1 = prefix_files()
            for pat in m.always_include_files():
                has_matches = False
                for f in set(files1):
                    if fnmatch.fnmatch(f, pat):
                        print("Including in package existing file", f)
                        files1.discard(f)
                        has_matches = True
                if not has_matches:
                    sys.exit(
                        "Error: Glob %s from always_include_files does not match any files"
                        % pat)
            # Save this for later
            with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
                f.write(u'\n'.join(sorted(list(files1))))
                f.write(u'\n')

            # Use script from recipe?
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)

            if on_win:
                build_file = join(m.path, 'bld.bat')
                if script:
                    build_file = join(source.get_dir(), 'bld.bat')
                    with open(join(source.get_dir(), 'bld.bat'), 'w') as bf:
                        bf.write(script)
                import conda_build.windows as windows
                windows.build(m, build_file, dirty=dirty, activate=activate)
            else:
                build_file = join(m.path, 'build.sh')

                # There is no sense in trying to run an empty build script.
                if isfile(build_file) or script:
                    env = environ.get_dict(m, dirty=dirty)
                    work_file = join(source.get_dir(), 'conda_build.sh')
                    if script:
                        with open(work_file, 'w') as bf:
                            bf.write(script)
                    if activate:
                        if isfile(build_file):
                            data = open(build_file).read()
                        else:
                            data = open(work_file).read()
                        with open(work_file, 'w') as bf:
                            bf.write("source activate {build_prefix}\n".format(
                                build_prefix=config.build_prefix))
                            bf.write(data)
                    else:
                        if not isfile(work_file):
                            shutil.copy(build_file, work_file)
                    os.chmod(work_file, 0o766)

                    if isfile(work_file):
                        cmd = [shell_path, '-x', '-e', work_file]

                        _check_call(cmd, env=env, cwd=src_dir)

        if post in [True, None]:
            if post:
                with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                    files1 = set(f.read().splitlines())

            get_build_metadata(m)
            create_post_scripts(m)
            create_entry_points(m.get_value('build/entry_points'))
            assert not exists(config.info_dir)
            files2 = prefix_files()

            post_process(sorted(files2 - files1),
                         preserve_egg_dir=bool(
                             m.get_value('build/preserve_egg_dir')))

            # The post processing may have deleted some files (like easy-install.pth)
            files2 = prefix_files()
            if any(config.meta_dir in join(config.build_prefix, f)
                   for f in files2 - files1):
                sys.exit(
                    indent(
                        """Error: Untracked file(s) %s found in conda-meta directory.
    This error usually comes from using conda in the build script.  Avoid doing this, as it
    can lead to packages that include their dependencies.""" %
                        (tuple(f for f in files2 - files1 if config.meta_dir in
                               join(config.build_prefix, f)), )))
            post_build(m, sorted(files2 - files1))
            create_info_files(m,
                              sorted(files2 - files1),
                              include_recipe=bool(m.path) and include_recipe)
            if m.get_value('build/noarch_python'):
                import conda_build.noarch_python as noarch_python
                noarch_python.transform(m, sorted(files2 - files1))

            files3 = prefix_files()
            fix_permissions(files3 - files1)

            path = bldpkg_path(m)
            t = tarfile.open(path, 'w:bz2')

            def order(f):
                # we don't care about empty files so send them back via 100000
                fsize = os.stat(join(config.build_prefix, f)).st_size or 100000
                # info/* records will be False == 0, others will be 1.
                info_order = int(os.path.dirname(f) != 'info')
                return info_order, fsize

            # add files in order of a) in info directory, b) increasing size so
            # we can access small manifest or json files without decompressing
            # possible large binary or data files
            for f in sorted(files3 - files1, key=order):
                t.add(join(config.build_prefix, f), f)
            t.close()

            print("BUILD END:", m.dist())

            # we're done building, perform some checks
            tarcheck.check_all(path)
            update_index(config.bldpkgs_dir)
        else:
            print("STOPPING BUILD BEFORE POST:", m.dist())

        if keep_old_work and len(old_sub_dirs):
            print("Restoring old work directory backup: %s :: %s => %s" %
                  (old_WORK_DIR, old_sub_dirs, source.WORK_DIR))
            for old_sub in old_sub_dirs:
                if os.path.exists(os.path.join(source.WORK_DIR, old_sub)):
                    print(
                        "Not restoring old source directory %s over new build's version"
                        % (old_sub))
                else:
                    shutil.move(os.path.join(old_WORK_DIR, old_sub),
                                source.WORK_DIR)
            shutil.rmtree(old_WORK_DIR, ignore_errors=True)
Esempio n. 42
0
def test_display_actions():
    os.environ['CONDA_SHOW_CHANNEL_URLS'] = 'False'
    reset_context(())
    actions = defaultdict(
        list,
        {"FETCH": [Dist('sympy-0.7.2-py27_0'),
                   Dist("numpy-1.7.1-py27_0")]})
    # The older test index doesn't have the size metadata
    d = Dist.from_string('sympy-0.7.2-py27_0.tar.bz2')
    index[d] = IndexRecord.from_objects(index[d], size=4374752)
    d = Dist.from_string("numpy-1.7.1-py27_0.tar.bz2")
    index[d] = IndexRecord.from_objects(index[d], size=5994338)

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be downloaded:

    package                    |            build
    ---------------------------|-----------------
    sympy-0.7.2                |           py27_0         4.2 MB
    numpy-1.7.1                |           py27_0         5.7 MB
    ------------------------------------------------------------
                                           Total:         9.9 MB

"""

    actions = defaultdict(
        list, {
            'PREFIX':
            '/Users/aaronmeurer/anaconda/envs/test',
            'SYMLINK_CONDA': ['/Users/aaronmeurer/anaconda'],
            'LINK': [
                'python-3.3.2-0', 'readline-6.2-0 1', 'sqlite-3.7.13-0 1',
                'tk-8.5.13-0 1', 'zlib-1.2.7-0 1'
            ]
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    python:   3.3.2-0 \n\
    readline: 6.2-0   \n\
    sqlite:   3.7.13-0
    tk:       8.5.13-0
    zlib:     1.2.7-0 \n\

"""

    actions['UNLINK'] = actions['LINK']
    actions['LINK'] = []

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be REMOVED:

    python:   3.3.2-0 \n\
    readline: 6.2-0   \n\
    sqlite:   3.7.13-0
    tk:       8.5.13-0
    zlib:     1.2.7-0 \n\

"""

    actions = defaultdict(list, {
        'LINK': ['cython-0.19.1-py33_0'],
        'UNLINK': ['cython-0.19-py33_0']
    })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython: 0.19-py33_0 --> 0.19.1-py33_0

"""

    actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK']

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED due to dependency conflicts:

    cython: 0.19.1-py33_0 --> 0.19-py33_0

"""

    actions = defaultdict(
        list, {
            'LINK': [
                'cython-0.19.1-py33_0', 'dateutil-1.5-py33_0',
                'numpy-1.7.1-py33_0'
            ],
            'UNLINK':
            ['cython-0.19-py33_0', 'dateutil-2.1-py33_1', 'pip-1.3.1-py33_1']
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    numpy:    1.7.1-py33_0

The following packages will be REMOVED:

    pip:      1.3.1-py33_1

The following packages will be UPDATED:

    cython:   0.19-py33_0  --> 0.19.1-py33_0

The following packages will be DOWNGRADED due to dependency conflicts:

    dateutil: 2.1-py33_1   --> 1.5-py33_0   \n\

"""

    actions = defaultdict(
        list, {
            'LINK': ['cython-0.19.1-py33_0', 'dateutil-2.1-py33_1'],
            'UNLINK': ['cython-0.19-py33_0', 'dateutil-1.5-py33_0']
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 --> 0.19.1-py33_0
    dateutil: 1.5-py33_0  --> 2.1-py33_1   \n\

"""

    actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK']

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
Esempio n. 43
0
def test_display_actions_link_type():
    conda.config.show_channel_urls = False

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 2', 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 2',
    'numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 2', 'python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 2', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 2', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 2', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 2', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 2']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0 (soft-link)
    dateutil: 1.5-py33_0    (soft-link)
    numpy:    1.7.1-py33_0  (soft-link)
    python:   3.3.2-0       (soft-link)
    readline: 6.2-0         (soft-link)
    sqlite:   3.7.13-0      (soft-link)
    tk:       8.5.13-0      (soft-link)
    zlib:     1.2.7-0       (soft-link)

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 2',
        'dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 2'], 'UNLINK':  ['cython-0.19-py33_0',
            'dateutil-1.5-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 --> 0.19.1-py33_0 (soft-link)
    dateutil: 1.5-py33_0  --> 2.1-py33_1    (soft-link)

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 2',
        'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 2'], 'UNLINK':  ['cython-0.19.1-py33_0',
            'dateutil-2.1-py33_1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED:

    cython:   0.19.1-py33_0 --> 0.19-py33_0 (soft-link)
    dateutil: 2.1-py33_1    --> 1.5-py33_0  (soft-link)

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 1', 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 1',
    'numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 1', 'python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 1', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 1', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0
    dateutil: 1.5-py33_0   \n\
    numpy:    1.7.1-py33_0 \n\
    python:   3.3.2-0      \n\
    readline: 6.2-0        \n\
    sqlite:   3.7.13-0     \n\
    tk:       8.5.13-0     \n\
    zlib:     1.2.7-0      \n\

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 1',
        'dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 1'], 'UNLINK':  ['cython-0.19-py33_0',
            'dateutil-1.5-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 --> 0.19.1-py33_0
    dateutil: 1.5-py33_0  --> 2.1-py33_1   \n\

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 1',
        'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 1'], 'UNLINK':  ['cython-0.19.1-py33_0',
            'dateutil-2.1-py33_1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED:

    cython:   0.19.1-py33_0 --> 0.19-py33_0
    dateutil: 2.1-py33_1    --> 1.5-py33_0 \n\

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3',
    'numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', 'python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 3', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 3', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 3', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 3', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 3']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0 (copy)
    dateutil: 1.5-py33_0    (copy)
    numpy:    1.7.1-py33_0  (copy)
    python:   3.3.2-0       (copy)
    readline: 6.2-0         (copy)
    sqlite:   3.7.13-0      (copy)
    tk:       8.5.13-0      (copy)
    zlib:     1.2.7-0       (copy)

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3',
        'dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 3'], 'UNLINK':  ['cython-0.19-py33_0',
            'dateutil-1.5-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 --> 0.19.1-py33_0 (copy)
    dateutil: 1.5-py33_0  --> 2.1-py33_1    (copy)

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 3',
        'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3'], 'UNLINK':  ['cython-0.19.1-py33_0',
            'dateutil-2.1-py33_1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED:

    cython:   0.19.1-py33_0 --> 0.19-py33_0 (copy)
    dateutil: 2.1-py33_1    --> 1.5-py33_0  (copy)

"""

    conda.config.show_channel_urls = True

    index['cython-0.19.1-py33_0.tar.bz2']['channel'] = 'my_channel'
    index['dateutil-1.5-py33_0.tar.bz2']['channel'] = 'my_channel'

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3',
    'numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', 'python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 3', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 3', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 3', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 3', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 3']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0 my_channel (copy)
    dateutil: 1.5-py33_0    my_channel (copy)
    numpy:    1.7.1-py33_0  <unknown>  (copy)
    python:   3.3.2-0       <unknown>  (copy)
    readline: 6.2-0         <unknown>  (copy)
    sqlite:   3.7.13-0      <unknown>  (copy)
    tk:       8.5.13-0      <unknown>  (copy)
    zlib:     1.2.7-0       <unknown>  (copy)

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3',
        'dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 3'], 'UNLINK':  ['cython-0.19-py33_0',
            'dateutil-1.5-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 <unknown>  --> 0.19.1-py33_0 my_channel (copy)
    dateutil: 1.5-py33_0  my_channel --> 2.1-py33_1    <unknown>  (copy)

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 3',
        'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3'], 'UNLINK':  ['cython-0.19.1-py33_0',
            'dateutil-2.1-py33_1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
Esempio n. 44
0
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False):
    """
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    """

    if m.get_value("build/detect_binary_files_with_prefix") or m.binary_has_prefix_files():
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if post in [False, None]:
        print("Removing old build directory")
        rm_rf(config.short_build_prefix)
        rm_rf(config.long_build_prefix)
        print("Removing old work directory")
        rm_rf(source.WORK_DIR)

        # Display the name only
        # Version number could be missing due to dependency on source info.
        print("BUILD START:", m.dist())
        create_env(
            config.build_prefix,
            [ms.spec for ms in m.ms_depends("build")],
            verbose=verbose,
            channel_urls=channel_urls,
            override_channels=override_channels,
        )

        if m.name() in [i.rsplit("-", 2)[0] for i in linked(config.build_prefix)]:
            print("%s is installed as a build dependency. Removing." % m.name())
            index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels)
            actions = plan.remove_actions(config.build_prefix, [m.name()], index=index)
            assert not plan.nothing_to_do(actions), actions
            plan.display_actions(actions, index)
            plan.execute_actions(actions, index)

        if get_src:
            source.provide(m.path, m.get_section("source"))
            # Parse our metadata again because we did not initialize the source
            # information before.
            m.parse_again()

        print("Package:", m.dist())

        assert isdir(source.WORK_DIR)
        src_dir = source.get_dir()
        contents = os.listdir(src_dir)
        if contents:
            print("source tree in:", src_dir)
        else:
            print("no source")

        rm_rf(config.info_dir)
        files1 = prefix_files()
        for rx in m.always_include_files():
            pat = re.compile(rx)
            has_matches = False
            for f in set(files1):
                if pat.match(f):
                    print("Including in package existing file", f)
                    files1.discard(f)
                    has_matches = True
            if not has_matches:
                sys.exit("Error: Regex %s from always_include_files does not match any files" % rx)
        # Save this for later
        with open(join(config.croot, "prefix_files.txt"), "w") as f:
            f.write(u"\n".join(sorted(list(files1))))
            f.write(u"\n")

        if sys.platform == "win32":
            import conda_build.windows as windows

            windows.build(m)
        else:
            env = environ.get_dict(m)
            build_file = join(m.path, "build.sh")

            script = m.get_value("build/script", None)
            if script:
                if isinstance(script, list):
                    script = "\n".join(script)
                build_file = join(source.get_dir(), "conda_build.sh")
                with open(build_file, "w") as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)

            if exists(build_file):
                cmd = ["/bin/bash", "-x", "-e", build_file]

                _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post == True:
            with open(join(config.croot, "prefix_files.txt"), "r") as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m)
        create_post_scripts(m)
        create_entry_points(m.get_value("build/entry_points"))
        assert not exists(config.info_dir)
        files2 = prefix_files()

        post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value("build/preserve_egg_dir")))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files()
        assert not any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1)
        post_build(m, sorted(files2 - files1))
        create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path))
        if m.get_value("build/noarch_python"):
            import conda_build.noarch_python as noarch_python

            noarch_python.transform(m, sorted(files2 - files1))

        files3 = prefix_files()
        fix_permissions(files3 - files1)

        path = bldpkg_path(m)
        t = tarfile.open(path, "w:bz2")
        for f in sorted(files3 - files1):
            t.add(join(config.build_prefix, f), f)
        t.close()

        print("BUILD END:", m.dist())

        # we're done building, perform some checks
        tarcheck.check_all(path)
        update_index(config.bldpkgs_dir)
    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())
Esempio n. 45
0
def test_display_actions_link_type():
    os.environ['CONDA_SHOW_CHANNEL_URLS'] = 'False'
    reset_context(())

    actions = defaultdict(
        list, {
            'LINK': [
                'cython-0.19.1-py33_0 2', 'dateutil-1.5-py33_0 2',
                'numpy-1.7.1-py33_0 2', 'python-3.3.2-0 2', 'readline-6.2-0 2',
                'sqlite-3.7.13-0 2', 'tk-8.5.13-0 2', 'zlib-1.2.7-0 2'
            ]
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0 (softlink)
    dateutil: 1.5-py33_0    (softlink)
    numpy:    1.7.1-py33_0  (softlink)
    python:   3.3.2-0       (softlink)
    readline: 6.2-0         (softlink)
    sqlite:   3.7.13-0      (softlink)
    tk:       8.5.13-0      (softlink)
    zlib:     1.2.7-0       (softlink)

"""

    actions = defaultdict(
        list, {
            'LINK': ['cython-0.19.1-py33_0 2', 'dateutil-2.1-py33_1 2'],
            'UNLINK': ['cython-0.19-py33_0', 'dateutil-1.5-py33_0']
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 --> 0.19.1-py33_0 (softlink)
    dateutil: 1.5-py33_0  --> 2.1-py33_1    (softlink)

"""

    actions = defaultdict(
        list, {
            'LINK': ['cython-0.19-py33_0 2', 'dateutil-1.5-py33_0 2'],
            'UNLINK': ['cython-0.19.1-py33_0', 'dateutil-2.1-py33_1']
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED:

    cython:   0.19.1-py33_0 --> 0.19-py33_0 (softlink)
    dateutil: 2.1-py33_1    --> 1.5-py33_0  (softlink)

"""

    actions = defaultdict(
        list, {
            'LINK': [
                'cython-0.19.1-py33_0 1', 'dateutil-1.5-py33_0 1',
                'numpy-1.7.1-py33_0 1', 'python-3.3.2-0 1', 'readline-6.2-0 1',
                'sqlite-3.7.13-0 1', 'tk-8.5.13-0 1', 'zlib-1.2.7-0 1'
            ]
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0
    dateutil: 1.5-py33_0   \n\
    numpy:    1.7.1-py33_0 \n\
    python:   3.3.2-0      \n\
    readline: 6.2-0        \n\
    sqlite:   3.7.13-0     \n\
    tk:       8.5.13-0     \n\
    zlib:     1.2.7-0      \n\

"""

    actions = defaultdict(
        list, {
            'LINK': ['cython-0.19.1-py33_0 1', 'dateutil-2.1-py33_1 1'],
            'UNLINK': ['cython-0.19-py33_0', 'dateutil-1.5-py33_0']
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 --> 0.19.1-py33_0
    dateutil: 1.5-py33_0  --> 2.1-py33_1   \n\

"""

    actions = defaultdict(
        list, {
            'LINK': ['cython-0.19-py33_0 1', 'dateutil-1.5-py33_0 1'],
            'UNLINK': ['cython-0.19.1-py33_0', 'dateutil-2.1-py33_1']
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED:

    cython:   0.19.1-py33_0 --> 0.19-py33_0
    dateutil: 2.1-py33_1    --> 1.5-py33_0 \n\

"""

    actions = defaultdict(
        list, {
            'LINK': [
                'cython-0.19.1-py33_0 3', 'dateutil-1.5-py33_0 3',
                'numpy-1.7.1-py33_0 3', 'python-3.3.2-0 3', 'readline-6.2-0 3',
                'sqlite-3.7.13-0 3', 'tk-8.5.13-0 3', 'zlib-1.2.7-0 3'
            ]
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0 (copy)
    dateutil: 1.5-py33_0    (copy)
    numpy:    1.7.1-py33_0  (copy)
    python:   3.3.2-0       (copy)
    readline: 6.2-0         (copy)
    sqlite:   3.7.13-0      (copy)
    tk:       8.5.13-0      (copy)
    zlib:     1.2.7-0       (copy)

"""

    actions = defaultdict(
        list, {
            'LINK': ['cython-0.19.1-py33_0 3', 'dateutil-2.1-py33_1 3'],
            'UNLINK': ['cython-0.19-py33_0', 'dateutil-1.5-py33_0']
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 --> 0.19.1-py33_0 (copy)
    dateutil: 1.5-py33_0  --> 2.1-py33_1    (copy)

"""

    actions = defaultdict(
        list, {
            'LINK': ['cython-0.19-py33_0 3', 'dateutil-1.5-py33_0 3'],
            'UNLINK': ['cython-0.19.1-py33_0', 'dateutil-2.1-py33_1']
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED:

    cython:   0.19.1-py33_0 --> 0.19-py33_0 (copy)
    dateutil: 2.1-py33_1    --> 1.5-py33_0  (copy)

"""
    os.environ['CONDA_SHOW_CHANNEL_URLS'] = 'True'
    reset_context(())

    d = Dist('cython-0.19.1-py33_0.tar.bz2')
    index[d] = IndexRecord.from_objects(index[d], channel='my_channel')

    d = Dist('dateutil-1.5-py33_0.tar.bz2')
    index[d] = IndexRecord.from_objects(index[d], channel='my_channel')

    actions = defaultdict(
        list, {
            'LINK': [
                'cython-0.19.1-py33_0 3', 'dateutil-1.5-py33_0 3',
                'numpy-1.7.1-py33_0 3', 'python-3.3.2-0 3', 'readline-6.2-0 3',
                'sqlite-3.7.13-0 3', 'tk-8.5.13-0 3', 'zlib-1.2.7-0 3'
            ]
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython:   0.19.1-py33_0 my_channel (copy)
    dateutil: 1.5-py33_0    my_channel (copy)
    numpy:    1.7.1-py33_0  <unknown>  (copy)
    python:   3.3.2-0       <unknown>  (copy)
    readline: 6.2-0         <unknown>  (copy)
    sqlite:   3.7.13-0      <unknown>  (copy)
    tk:       8.5.13-0      <unknown>  (copy)
    zlib:     1.2.7-0       <unknown>  (copy)

"""

    actions = defaultdict(
        list, {
            'LINK': ['cython-0.19.1-py33_0 3', 'dateutil-2.1-py33_1 3'],
            'UNLINK': ['cython-0.19-py33_0', 'dateutil-1.5-py33_0']
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 <unknown>  --> 0.19.1-py33_0 my_channel (copy)
    dateutil: 1.5-py33_0  my_channel --> 2.1-py33_1    <unknown>  (copy)

"""

    actions = defaultdict(
        list, {
            'LINK': ['cython-0.19-py33_0 3', 'dateutil-1.5-py33_0 3'],
            'UNLINK': ['cython-0.19.1-py33_0', 'dateutil-2.1-py33_1']
        })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
Esempio n. 46
0
def test_display_actions():
    os.environ['CONDA_SHOW_CHANNEL_URLS'] = 'False'
    reset_context(())
    actions = defaultdict(list, {"FETCH": ['sympy-0.7.2-py27_0', "numpy-1.7.1-py27_0"]})
    # The older test index doesn't have the size metadata
    index['sympy-0.7.2-py27_0.tar.bz2']['size'] = 4374752
    index["numpy-1.7.1-py27_0.tar.bz2"]['size'] = 5994338

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be downloaded:

    package                    |            build
    ---------------------------|-----------------
    sympy-0.7.2                |           py27_0         4.2 MB
    numpy-1.7.1                |           py27_0         5.7 MB
    ------------------------------------------------------------
                                           Total:         9.9 MB

"""

    actions = defaultdict(list, {'PREFIX':
    '/Users/aaronmeurer/anaconda/envs/test', 'SYMLINK_CONDA':
    ['/Users/aaronmeurer/anaconda'], 'LINK': ['python-3.3.2-0', 'readline-6.2-0 1', 'sqlite-3.7.13-0 1', 'tk-8.5.13-0 1', 'zlib-1.2.7-0 1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    python:   3.3.2-0 \n\
    readline: 6.2-0   \n\
    sqlite:   3.7.13-0
    tk:       8.5.13-0
    zlib:     1.2.7-0 \n\

"""

    actions['UNLINK'] = actions['LINK']
    actions['LINK'] = []

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be REMOVED:

    python:   3.3.2-0 \n\
    readline: 6.2-0   \n\
    sqlite:   3.7.13-0
    tk:       8.5.13-0
    zlib:     1.2.7-0 \n\

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0'], 'UNLINK':
    ['cython-0.19-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython: 0.19-py33_0 --> 0.19.1-py33_0

"""

    actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK']

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED due to dependency conflicts:

    cython: 0.19.1-py33_0 --> 0.19-py33_0

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0',
        'dateutil-1.5-py33_0', 'numpy-1.7.1-py33_0'], 'UNLINK':
        ['cython-0.19-py33_0', 'dateutil-2.1-py33_1', 'pip-1.3.1-py33_1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    numpy:    1.7.1-py33_0

The following packages will be REMOVED:

    pip:      1.3.1-py33_1

The following packages will be UPDATED:

    cython:   0.19-py33_0  --> 0.19.1-py33_0

The following packages will be DOWNGRADED due to dependency conflicts:

    dateutil: 2.1-py33_1   --> 1.5-py33_0   \n\

"""

    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0',
        'dateutil-2.1-py33_1'], 'UNLINK':  ['cython-0.19-py33_0',
            'dateutil-1.5-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 --> 0.19.1-py33_0
    dateutil: 1.5-py33_0  --> 2.1-py33_1   \n\

"""

    actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK']

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
Esempio n. 47
0
def test_display_actions_features():
    os.environ['CONDA_SHOW_CHANNEL_URLS'] = 'False'
    reset_context(())

    actions = defaultdict(
        list, {'LINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython: 0.19-py33_0  \n\
    numpy:  1.7.1-py33_p0 [mkl]

"""

    actions = defaultdict(
        list, {'UNLINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be REMOVED:

    cython: 0.19-py33_0  \n\
    numpy:  1.7.1-py33_p0 [mkl]

"""

    actions = defaultdict(list, {
        'UNLINK': ['numpy-1.7.1-py33_p0'],
        'LINK': ['numpy-1.7.0-py33_p0']
    })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED:

    numpy: 1.7.1-py33_p0 [mkl] --> 1.7.0-py33_p0 [mkl]

"""

    actions = defaultdict(list, {
        'LINK': ['numpy-1.7.1-py33_p0'],
        'UNLINK': ['numpy-1.7.0-py33_p0']
    })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    numpy: 1.7.0-py33_p0 [mkl] --> 1.7.1-py33_p0 [mkl]

"""

    actions = defaultdict(list, {
        'LINK': ['numpy-1.7.1-py33_p0'],
        'UNLINK': ['numpy-1.7.1-py33_0']
    })

    with captured() as c:
        display_actions(actions, index)

    # NB: Packages whose version do not changed are put in UPDATED
    assert c.stdout == """
The following packages will be UPDATED:

    numpy: 1.7.1-py33_0 --> 1.7.1-py33_p0 [mkl]

"""

    actions = defaultdict(list, {
        'UNLINK': ['numpy-1.7.1-py33_p0'],
        'LINK': ['numpy-1.7.1-py33_0']
    })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    numpy: 1.7.1-py33_p0 [mkl] --> 1.7.1-py33_0

"""
    os.environ['CONDA_SHOW_CHANNEL_URLS'] = 'True'
    reset_context(())

    actions = defaultdict(
        list, {'LINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython: 0.19-py33_0   <unknown>
    numpy:  1.7.1-py33_p0 <unknown> [mkl]

"""

    actions = defaultdict(
        list, {'UNLINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be REMOVED:

    cython: 0.19-py33_0   <unknown>
    numpy:  1.7.1-py33_p0 <unknown> [mkl]

"""

    actions = defaultdict(list, {
        'UNLINK': ['numpy-1.7.1-py33_p0'],
        'LINK': ['numpy-1.7.0-py33_p0']
    })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED:

    numpy: 1.7.1-py33_p0 <unknown> [mkl] --> 1.7.0-py33_p0 <unknown> [mkl]

"""

    actions = defaultdict(list, {
        'LINK': ['numpy-1.7.1-py33_p0'],
        'UNLINK': ['numpy-1.7.0-py33_p0']
    })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    numpy: 1.7.0-py33_p0 <unknown> [mkl] --> 1.7.1-py33_p0 <unknown> [mkl]

"""

    actions = defaultdict(list, {
        'LINK': ['numpy-1.7.1-py33_p0'],
        'UNLINK': ['numpy-1.7.1-py33_0']
    })

    with captured() as c:
        display_actions(actions, index)

    # NB: Packages whose version do not changed are put in UPDATED
    assert c.stdout == """
The following packages will be UPDATED:

    numpy: 1.7.1-py33_0 <unknown> --> 1.7.1-py33_p0 <unknown> [mkl]

"""

    actions = defaultdict(list, {
        'UNLINK': ['numpy-1.7.1-py33_p0'],
        'LINK': ['numpy-1.7.1-py33_0']
    })

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
Esempio n. 48
0
def test_display_actions_features():
    os.environ['CONDA_SHOW_CHANNEL_URLS'] = 'False'
    reset_context(())

    actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython: 0.19-py33_0  \n\
    numpy:  1.7.1-py33_p0 [mkl]

"""

    actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be REMOVED:

    cython: 0.19-py33_0  \n\
    numpy:  1.7.1-py33_p0 [mkl]

"""

    actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.0-py33_p0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED due to dependency conflicts:

    numpy: 1.7.1-py33_p0 [mkl] --> 1.7.0-py33_p0 [mkl]

"""

    actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.0-py33_p0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    numpy: 1.7.0-py33_p0 [mkl] --> 1.7.1-py33_p0 [mkl]

"""

    actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.1-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    # NB: Packages whose version do not changed are put in UPDATED
    assert c.stdout == """
The following packages will be UPDATED:

    numpy: 1.7.1-py33_0 --> 1.7.1-py33_p0 [mkl]

"""

    actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.1-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    numpy: 1.7.1-py33_p0 [mkl] --> 1.7.1-py33_0

"""
    os.environ['CONDA_SHOW_CHANNEL_URLS'] = 'True'
    reset_context(())

    actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython: 0.19-py33_0   <unknown>
    numpy:  1.7.1-py33_p0 <unknown> [mkl]

"""

    actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be REMOVED:

    cython: 0.19-py33_0   <unknown>
    numpy:  1.7.1-py33_p0 <unknown> [mkl]

"""

    actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.0-py33_p0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED due to dependency conflicts:

    numpy: 1.7.1-py33_p0 <unknown> [mkl] --> 1.7.0-py33_p0 <unknown> [mkl]

"""

    actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.0-py33_p0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    numpy: 1.7.0-py33_p0 <unknown> [mkl] --> 1.7.1-py33_p0 <unknown> [mkl]

"""

    actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.1-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    # NB: Packages whose version do not changed are put in UPDATED
    assert c.stdout == """
The following packages will be UPDATED:

    numpy: 1.7.1-py33_0 <unknown> --> 1.7.1-py33_p0 <unknown> [mkl]

"""

    actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.1-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
Esempio n. 49
0
def execute(args, parser):
    import conda.plan as plan
    import conda.instructions as inst
    from conda.install import rm_rf, linked_data

    if not (args.all or args.package_names):
        raise CondaValueError('no package names supplied,\n'
                              '       try "conda remove -h" for more details',
                              args.json)

    prefix = get_prefix(args)
    if args.all and prefix == default_prefix:
        msg = "cannot remove current environment. deactivate and run conda remove again"
        raise CondaEnvironmentError(msg)
    check_write('remove', prefix, json=args.json)
    ensure_use_local(args)
    ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()
    if not args.features and args.all:
        index = linked_data(prefix)
        index = {dist + '.tar.bz2': info for dist, info in iteritems(index)}
    else:
        index = get_index_trap(channel_urls=channel_urls,
                               prepend=not args.override_channels,
                               use_local=args.use_local,
                               use_cache=args.use_index_cache,
                               json=args.json,
                               offline=args.offline,
                               prefix=prefix)
    specs = None
    if args.features:
        features = set(args.package_names)
        actions = plan.remove_features_actions(prefix, index, features)

    elif args.all:
        if plan.is_root_prefix(prefix):
            raise CondaEnvironmentError('cannot remove root environment,\n'
                                        '       add -n NAME or -p PREFIX option',
                                        args.json)
        actions = {inst.PREFIX: prefix}
        for fkey in sorted(iterkeys(index)):
            plan.add_unlink(actions, fkey[:-8])

    else:
        specs = specs_from_args(args.package_names)
        if (plan.is_root_prefix(prefix) and names_in_specs(root_no_rm, specs)):
            raise CondaEnvironmentError('cannot remove %s from root environment' %
                                        ', '.join(root_no_rm), args.json)
        actions = plan.remove_actions(prefix, specs, index=index,
                                      force=args.force, pinned=args.pinned)

    if plan.nothing_to_do(actions):
        if args.all:
            rm_rf(prefix)

            if args.json:
                stdout_json({
                    'success': True,
                    'actions': actions
                })
            return
        raise PackageNotFoundError('no packages found to remove from '
                                   'environment: %s' % prefix, args.json)

    if not args.json:
        print()
        print("Package plan for package removal in environment %s:" % prefix)
        plan.display_actions(actions, index)

    if args.json and args.dry_run:
        stdout_json({
            'success': True,
            'dry_run': True,
            'actions': actions
        })
        return

    if not args.json:
        confirm_yn(args)

    if args.json and not args.quiet:
        with json_progress_bars():
            plan.execute_actions(actions, index, verbose=not args.quiet)
    else:
        plan.execute_actions(actions, index, verbose=not args.quiet)
        if specs:
            try:
                with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
                    f.write('# remove specs: %s\n' % specs)
            except IOError as e:
                if e.errno == errno.EACCES:
                    log.debug("Can't write the history file")
                else:
                    raise

    if args.all:
        rm_rf(prefix)

    if args.json:
        stdout_json({
            'success': True,
            'actions': actions
        })
Esempio n. 50
0
def execute(args, parser):
    import sys

    import conda.plan as plan
    from conda.cli import pscheck
    from conda.install import rm_rf, linked
    from conda import config

    if not (args.all or args.package_names):
        common.error_and_exit(
            'no package names supplied,\n'
            '       try "conda remove -h" for more details',
            json=args.json,
            error_type="ValueError")

    prefix = common.get_prefix(args)
    if args.all and prefix == config.default_prefix:
        common.error_and_exit(
            "cannot remove current environment. deactivate and run conda remove again"
        )
    common.check_write('remove', prefix, json=args.json)
    common.ensure_override_channels_requires_channel(args, json=args.json)
    channel_urls = args.channel or ()
    if args.use_local:
        from conda.fetch import fetch_index
        from conda.utils import url_path
        try:
            from conda_build.config import croot
        except ImportError:
            common.error_and_exit(
                "you need to have 'conda-build >= 1.7.1' installed"
                " to use the --use-local option",
                json=args.json,
                error_type="RuntimeError")
        # remove the cache such that a refetch is made,
        # this is necessary because we add the local build repo URL
        fetch_index.cache = {}
        index = common.get_index_trap(channel_urls=[url_path(croot)] +
                                      list(channel_urls),
                                      prepend=not args.override_channels,
                                      use_cache=args.use_index_cache,
                                      json=args.json)
    else:
        index = common.get_index_trap(channel_urls=channel_urls,
                                      prepend=not args.override_channels,
                                      use_cache=args.use_index_cache,
                                      json=args.json)
    if args.features:
        features = set(args.package_names)
        actions = plan.remove_features_actions(prefix, index, features)

    elif args.all:
        if plan.is_root_prefix(prefix):
            common.error_and_exit(
                'cannot remove root environment,\n'
                '       add -n NAME or -p PREFIX option',
                json=args.json,
                error_type="CantRemoveRoot")

        actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))}

    else:
        specs = common.specs_from_args(args.package_names)
        if (plan.is_root_prefix(prefix)
                and common.names_in_specs(common.root_no_rm, specs)):
            common.error_and_exit('cannot remove %s from root environment' %
                                  ', '.join(common.root_no_rm),
                                  json=args.json,
                                  error_type="CantRemoveFromRoot")
        actions = plan.remove_actions(prefix,
                                      specs,
                                      index=index,
                                      pinned=args.pinned)

    if plan.nothing_to_do(actions):
        if args.all:
            rm_rf(prefix)

            if args.json:
                common.stdout_json({'success': True, 'actions': actions})
            return
        common.error_and_exit('no packages found to remove from '
                              'environment: %s' % prefix,
                              json=args.json,
                              error_type="PackageNotInstalled")

    if not args.json:
        print()
        print("Package plan for package removal in environment %s:" % prefix)
        plan.display_actions(actions, index)

    if args.json and args.dry_run:
        common.stdout_json({
            'success': True,
            'dry_run': True,
            'actions': actions
        })
        return

    if not args.json:
        if not pscheck.main(args):
            common.confirm_yn(args)
    elif (sys.platform == 'win32' and not args.force_pscheck
          and not pscheck.check_processes(verbose=False)):
        common.error_and_exit(
            "Cannot continue removal while processes "
            "from packages are running without --force-pscheck.",
            json=True,
            error_type="ProcessesStillRunning")

    if args.json and not args.quiet:
        with json_progress_bars():
            plan.execute_actions(actions, index, verbose=not args.quiet)
    else:
        plan.execute_actions(actions, index, verbose=not args.quiet)

    if args.all:
        rm_rf(prefix)

    if args.json:
        common.stdout_json({'success': True, 'actions': actions})
Esempio n. 51
0
def install(args, parser, command='install'):
    """
    conda install, conda update, and conda create
    """

    newenv = command == 'create'
    if newenv:
        common.ensure_name_or_prefix(args, command)
    prefix = common.get_prefix(args, search=not newenv)
    if newenv:
        check_prefix(prefix)
    config.set_pkgs_dirs(prefix)

    if command == 'update':
        if len(args.packages) == 0:
            sys.exit("""Error: no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)

    if command == 'update':
        linked = ci.linked(prefix)
        for name in args.packages:
            common.arg2spec(name)
            if '=' in name:
                sys.exit("Invalid package name: '%s'" % (name))
            if name not in set(ci.name_dist(d) for d in linked):
                sys.exit("Error: package '%s' is not installed in %s" %
                         (name, prefix))

    if newenv and args.clone:
        if args.packages:
            sys.exit('Error: did not expect any arguments for --clone')
        clone(args.clone, prefix)
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
        return

    if newenv and not args.no_default_packages:
        default_packages = config.create_default_packages[:]
        # Override defaults if they are specified at the command line
        for default_pkg in config.create_default_packages:
            if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
                default_packages.remove(default_pkg)
        args.packages.extend(default_packages)

    common.ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()

    if args.use_local:
        from conda.fetch import fetch_index
        from conda.utils import url_path
        try:
            from conda_build import config as build_config
        except ImportError:
            sys.exit("Error: you need to have 'conda-build' installed"
                     " to use the --use-local option")
        # remove the cache such that a refetch is made,
        # this is necessary because we add the local build repo URL
        fetch_index.cache = {}
        index = get_index([url_path(build_config.croot)],
                          use_cache=args.use_cache)
    else:
        index = get_index(channel_urls=channel_urls, prepend=not
                          args.override_channels,
                          use_cache=args.use_cache)

    # Don't update packages that are already up-to-date
    if command == 'update':
        r = Resolve(index)
        orig_packages = args.packages[:]
        for name in orig_packages:
            vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
                if dist.rsplit('-', 2)[0] == name]
            build_inst = [dist.rsplit('-', 2)[2].rsplit('.tar.bz2', 1)[0]
                          for dist in linked
                          if dist.rsplit('-', 2)[0] == name]
            assert len(vers_inst) == 1, name
            assert len(build_inst) == 1, name
            pkgs = sorted(r.get_pkgs(MatchSpec(name)))
            if not pkgs:
                # Shouldn't happen?
                continue
            # This won't do the right thing for python 2
            latest = pkgs[-1]
            if latest.version == vers_inst[0] and latest.build == build_inst[0]:
                args.packages.remove(name)
        if not args.packages:
            from conda.cli.main_list import list_packages

            regex = '^(%s)$' % '|'.join(orig_packages)
            print('# All requested packages already installed.')
            list_packages(prefix, regex)
            return

    # handle tar file containing conda packages
    if len(args.packages) == 1:
        tar_path = args.packages[0]
        if tar_path.endswith('.tar'):
            install_tar(prefix, tar_path, verbose=not args.quiet)
            return

    # handle explicit installs of conda packages
    if args.packages and all(s.endswith('.tar.bz2') for s in args.packages):
        from conda.misc import install_local_packages
        install_local_packages(prefix, args.packages, verbose=not args.quiet)
        return

    if any(s.endswith('.tar.bz2') for s in args.packages):
        sys.exit("cannot mix specifications with conda package filenames")

    if args.force:
        args.no_deps = True

    if args.file:
        specs = common.specs_from_url(args.file)
    else:
        specs = common.specs_from_args(args.packages)

    common.check_specs(prefix, specs)

    spec_names = set(s.split()[0] for s in specs)
    if args.no_deps:
        only_names = spec_names
    else:
        only_names = None

    if not isdir(prefix) and not newenv:
        if args.mkdir:
            try:
                os.makedirs(prefix)
            except OSError:
                sys.exit("Error: could not create directory: %s" % prefix)
        else:
            sys.exit("""\
Error: environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix)

    actions = plan.install_actions(prefix, index, specs,
                                   force=args.force, only_names=only_names)

    if plan.nothing_to_do(actions):
        from conda.cli.main_list import list_packages

        regex = '^(%s)$' % '|'.join(spec_names)
        print('# All requested packages already installed.')
        list_packages(prefix, regex)
        return

    print()
    print("Package plan for installation in environment %s:" % prefix)
    plan.display_actions(actions, index)
    if command in {'install', 'update'}:
        common.check_write(command, prefix)

    if not pscheck.main(args):
        common.confirm_yn(args)

    plan.execute_actions(actions, index, verbose=not args.quiet)
    if newenv:
        touch_nonadmin(prefix)
        print_activate(args.name if args.name else prefix)
Esempio n. 52
0
def execute(args, parser):
    import conda.plan as plan
    import conda.instructions as inst
    from conda.gateways.disk.delete import rm_rf
    from conda.core.linked_data import linked_data

    if not (args.all or args.package_names):
        raise CondaValueError('no package names supplied,\n'
                              '       try "conda remove -h" for more details')

    prefix = context.prefix_w_legacy_search
    if args.all and prefix == context.default_prefix:
        msg = "cannot remove current environment. deactivate and run conda remove again"
        raise CondaEnvironmentError(msg)
    check_write('remove', prefix, json=context.json)
    ensure_use_local(args)
    ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()
    if not args.features and args.all:
        index = linked_data(prefix)
        index = {dist: info for dist, info in iteritems(index)}
    else:
        index = get_index(channel_urls=channel_urls,
                          prepend=not args.override_channels,
                          use_local=args.use_local,
                          use_cache=args.use_index_cache,
                          prefix=prefix)
    specs = None
    if args.features:
        features = set(args.package_names)
        actions = plan.remove_features_actions(prefix, index, features)

    elif args.all:
        if plan.is_root_prefix(prefix):
            raise CondaEnvironmentError(
                'cannot remove root environment,\n'
                '       add -n NAME or -p PREFIX option')
        actions = {inst.PREFIX: prefix}
        for dist in sorted(iterkeys(index)):
            plan.add_unlink(actions, dist)

    else:
        specs = specs_from_args(args.package_names)
        # import pdb; pdb.set_trace()
        if (context.conda_in_root and plan.is_root_prefix(prefix)
                and names_in_specs(ROOT_NO_RM, specs) and not args.force):
            raise CondaEnvironmentError(
                'cannot remove %s from root environment' %
                ', '.join(ROOT_NO_RM))
        actions = plan.remove_actions(prefix,
                                      specs,
                                      index=index,
                                      force=args.force,
                                      pinned=args.pinned)

    delete_trash()

    if plan.nothing_to_do(actions):
        if args.all:
            print("\nRemove all packages in environment %s:\n" % prefix,
                  file=sys.stderr)
            if not context.json:
                confirm_yn(args)
            rm_rf(prefix)

            if context.json:
                stdout_json({'success': True, 'actions': actions})
            return
        raise PackageNotFoundError(
            '', 'no packages found to remove from '
            'environment: %s' % prefix)

    if not context.json:
        print()
        print("Package plan for package removal in environment %s:" % prefix)
        plan.display_actions(actions, index)

    if context.json and args.dry_run:
        stdout_json({'success': True, 'dry_run': True, 'actions': actions})
        return

    if not context.json:
        confirm_yn(args)

    if context.json and not context.quiet:
        with json_progress_bars():
            plan.execute_actions(actions, index, verbose=not context.quiet)
    else:
        plan.execute_actions(actions, index, verbose=not context.quiet)
        if specs:
            try:
                with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
                    f.write('# remove specs: %s\n' % ','.join(specs))
            except IOError as e:
                if e.errno == errno.EACCES:
                    log.debug("Can't write the history file")
                else:
                    raise

    if args.all:
        rm_rf(prefix)

    if context.json:
        stdout_json({'success': True, 'actions': actions})
Esempio n. 53
0
def execute(args, parser):
    import conda.plan as plan
    import conda.instructions as inst
    from conda.install import rm_rf, linked
    from conda import config

    if not (args.all or args.package_names):
        common.error_and_exit(
            'no package names supplied,\n'
            '       try "conda remove -h" for more details',
            json=args.json,
            error_type="ValueError")

    prefix = common.get_prefix(args)
    if args.all and prefix == config.default_prefix:
        common.error_and_exit(
            "cannot remove current environment. deactivate and run conda remove again"
        )
    common.check_write('remove', prefix, json=args.json)
    common.ensure_override_channels_requires_channel(args, json=args.json)
    channel_urls = args.channel or ()
    if args.use_local:
        from conda.fetch import fetch_index
        from conda.utils import url_path
        try:
            from conda_build.config import croot
        except ImportError:
            common.error_and_exit(
                "you need to have 'conda-build >= 1.7.1' installed"
                " to use the --use-local option",
                json=args.json,
                error_type="RuntimeError")
        # remove the cache such that a refetch is made,
        # this is necessary because we add the local build repo URL
        fetch_index.cache = {}
        if exists(croot):
            channel_urls = [url_path(croot)] + list(channel_urls)
        index = common.get_index_trap(channel_urls=channel_urls,
                                      prepend=not args.override_channels,
                                      use_cache=args.use_index_cache,
                                      json=args.json,
                                      offline=args.offline)
    else:
        index = common.get_index_trap(channel_urls=channel_urls,
                                      prepend=not args.override_channels,
                                      use_cache=args.use_index_cache,
                                      json=args.json,
                                      offline=args.offline)
    specs = None
    if args.features:
        features = set(args.package_names)
        actions = plan.remove_features_actions(prefix, index, features)

    elif args.all:
        if plan.is_root_prefix(prefix):
            common.error_and_exit(
                'cannot remove root environment,\n'
                '       add -n NAME or -p PREFIX option',
                json=args.json,
                error_type="CantRemoveRoot")

        actions = {inst.PREFIX: prefix}
        for dist in sorted(linked(prefix)):
            plan.add_unlink(actions, dist)

    else:
        specs = common.specs_from_args(args.package_names)
        if (plan.is_root_prefix(prefix)
                and common.names_in_specs(common.root_no_rm, specs)):
            common.error_and_exit('cannot remove %s from root environment' %
                                  ', '.join(common.root_no_rm),
                                  json=args.json,
                                  error_type="CantRemoveFromRoot")
        actions = plan.remove_actions(prefix,
                                      specs,
                                      index=index,
                                      pinned=args.pinned)

    if plan.nothing_to_do(actions):
        if args.all:
            rm_rf(prefix)

            if args.json:
                common.stdout_json({'success': True, 'actions': actions})
            return
        common.error_and_exit('no packages found to remove from '
                              'environment: %s' % prefix,
                              json=args.json,
                              error_type="PackageNotInstalled")

    if not args.json:
        print()
        print("Package plan for package removal in environment %s:" % prefix)
        plan.display_actions(actions, index)

    if args.json and args.dry_run:
        common.stdout_json({
            'success': True,
            'dry_run': True,
            'actions': actions
        })
        return

    if not args.json:
        common.confirm_yn(args)

    if args.json and not args.quiet:
        with json_progress_bars():
            plan.execute_actions(actions, index, verbose=not args.quiet)
    else:
        plan.execute_actions(actions, index, verbose=not args.quiet)
        if specs:
            try:
                with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
                    f.write('# remove specs: %s\n' % specs)
            except IOError as e:
                if e.errno == errno.EACCES:
                    log.debug("Can't write the history file")
                else:
                    raise

    if args.all:
        rm_rf(prefix)

    if args.json:
        common.stdout_json({'success': True, 'actions': actions})
Esempio n. 54
0
def test_display_actions_show_channel_urls():
    conda.config.show_channel_urls = True
    actions = defaultdict(list, {"FETCH": ['sympy-0.7.2-py27_0',
        "numpy-1.7.1-py27_0"]})
    # The older test index doesn't have the size metadata
    index['sympy-0.7.2-py27_0.tar.bz2']['size'] = 4374752
    index["numpy-1.7.1-py27_0.tar.bz2"]['size'] = 5994338

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be downloaded:

    package                    |            build
    ---------------------------|-----------------
    sympy-0.7.2                |           py27_0         4.2 MB  <unknown>
    numpy-1.7.1                |           py27_0         5.7 MB  <unknown>
    ------------------------------------------------------------
                                           Total:         9.9 MB

"""


    actions = defaultdict(list, {'PREFIX':
    '/Users/aaronmeurer/anaconda/envs/test', 'SYMLINK_CONDA':
    ['/Users/aaronmeurer/anaconda'], 'LINK': ['python-3.3.2-0', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 1', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    python:   3.3.2-0  <unknown>
    readline: 6.2-0    <unknown>
    sqlite:   3.7.13-0 <unknown>
    tk:       8.5.13-0 <unknown>
    zlib:     1.2.7-0  <unknown>

"""

    actions['UNLINK'] = actions['LINK']
    actions['LINK'] = []

    with captured() as c:
        display_actions(actions, index)


    assert c.stdout == """
The following packages will be REMOVED:

    python:   3.3.2-0  <unknown>
    readline: 6.2-0    <unknown>
    sqlite:   3.7.13-0 <unknown>
    tk:       8.5.13-0 <unknown>
    zlib:     1.2.7-0  <unknown>

"""


    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0'], 'UNLINK':
    ['cython-0.19-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython: 0.19-py33_0 <unknown> --> 0.19.1-py33_0 <unknown>

"""

    actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK']


    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED:

    cython: 0.19.1-py33_0 <unknown> --> 0.19-py33_0 <unknown>

"""


    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0',
        'dateutil-1.5-py33_0', 'numpy-1.7.1-py33_0'], 'UNLINK':
        ['cython-0.19-py33_0', 'dateutil-2.1-py33_1', 'pip-1.3.1-py33_1']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    numpy:    1.7.1-py33_0  <unknown>

The following packages will be REMOVED:

    pip:      1.3.1-py33_1 <unknown>

The following packages will be UPDATED:

    cython:   0.19-py33_0  <unknown> --> 0.19.1-py33_0 <unknown>

The following packages will be DOWNGRADED:

    dateutil: 2.1-py33_1   <unknown> --> 1.5-py33_0    <unknown>

"""


    actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0',
        'dateutil-2.1-py33_1'], 'UNLINK':  ['cython-0.19-py33_0',
            'dateutil-1.5-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 <unknown> --> 0.19.1-py33_0 <unknown>
    dateutil: 1.5-py33_0  <unknown> --> 2.1-py33_1    <unknown>

"""

    actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK']

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED:

    cython:   0.19.1-py33_0 <unknown> --> 0.19-py33_0 <unknown>
    dateutil: 2.1-py33_1    <unknown> --> 1.5-py33_0  <unknown>

"""

    actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK']

    index['cython-0.19.1-py33_0.tar.bz2']['channel'] = 'my_channel'
    index['dateutil-1.5-py33_0.tar.bz2']['channel'] = 'my_channel'

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    cython:   0.19-py33_0 <unknown>  --> 0.19.1-py33_0 my_channel
    dateutil: 1.5-py33_0  my_channel --> 2.1-py33_1    <unknown> \n\

"""

    actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK']


    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
Esempio n. 55
0
            if args.json:
                common.stdout_json({
                    'success': True,
                    'actions': actions
                })
            return
        common.error_and_exit('no packages found to remove from '
                              'environment: %s' % prefix,
                              json=args.json,
                              error_type="PackageNotInstalled")

    if not args.json:
        print()
        print("Package plan for package removal in environment %s:" % prefix)
        plan.display_actions(actions, index)

    if args.json and args.dry_run:
        common.stdout_json({
            'success': True,
            'dry_run': True,
            'actions': actions
        })
        return


    if not args.json:
        common.confirm_yn(args)

    if args.json and not args.quiet:
        with json_progress_bars():
Esempio n. 56
0
def test_display_actions_features():
    conda.config.show_channel_urls = False

    actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython: 0.19-py33_0  \n\
    numpy:  1.7.1-py33_p0 [mkl]

"""

    actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be REMOVED:

    cython: 0.19-py33_0  \n\
    numpy:  1.7.1-py33_p0 [mkl]

"""

    actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.0-py33_p0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED:

    numpy: 1.7.1-py33_p0 [mkl] --> 1.7.0-py33_p0 [mkl]

"""

    actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.0-py33_p0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    numpy: 1.7.0-py33_p0 [mkl] --> 1.7.1-py33_p0 [mkl]

"""

    actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.1-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    # NB: Packages whose version do not changed are put in UPDATED
    assert c.stdout == """
The following packages will be UPDATED:

    numpy: 1.7.1-py33_0 --> 1.7.1-py33_p0 [mkl]

"""

    actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.1-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    numpy: 1.7.1-py33_p0 [mkl] --> 1.7.1-py33_0

"""

    conda.config.show_channel_urls = True

    actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following NEW packages will be INSTALLED:

    cython: 0.19-py33_0   <unknown>
    numpy:  1.7.1-py33_p0 <unknown> [mkl]

"""


    actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be REMOVED:

    cython: 0.19-py33_0   <unknown>
    numpy:  1.7.1-py33_p0 <unknown> [mkl]

"""

    actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.0-py33_p0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be DOWNGRADED:

    numpy: 1.7.1-py33_p0 <unknown> [mkl] --> 1.7.0-py33_p0 <unknown> [mkl]

"""

    actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.0-py33_p0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
The following packages will be UPDATED:

    numpy: 1.7.0-py33_p0 <unknown> [mkl] --> 1.7.1-py33_p0 <unknown> [mkl]

"""


    actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.1-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    # NB: Packages whose version do not changed are put in UPDATED
    assert c.stdout == """
The following packages will be UPDATED:

    numpy: 1.7.1-py33_0 <unknown> --> 1.7.1-py33_p0 <unknown> [mkl]

"""

    actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.1-py33_0']})

    with captured() as c:
        display_actions(actions, index)

    assert c.stdout == """
Esempio n. 57
0
def install(args, parser, command='install'):
    """
    conda install, conda update, and conda create
    """
    newenv = bool(command == 'create')
    isupdate = bool(command == 'update')
    isinstall = bool(command == 'install')
    if newenv:
        common.ensure_name_or_prefix(args, command)
    prefix = common.get_prefix(args, search=not newenv)
    if newenv:
        check_prefix(prefix, json=args.json)
    if force_32bit and plan.is_root_prefix(prefix):
        common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env")

    if isupdate and not (args.file or args.all or args.packages):
        common.error_and_exit("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix,
                              json=args.json,
                              error_type="ValueError")

    linked = ci.linked(prefix)
    lnames = {ci.name_dist(d) for d in linked}
    if isupdate and not args.all:
        for name in args.packages:
            common.arg2spec(name, json=args.json, update=True)
            if name not in lnames:
                common.error_and_exit("Package '%s' is not installed in %s" %
                                      (name, prefix),
                                      json=args.json,
                                      error_type="ValueError")

    if newenv and not args.no_default_packages:
        default_packages = create_default_packages[:]
        # Override defaults if they are specified at the command line
        for default_pkg in create_default_packages:
            if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
                default_packages.remove(default_pkg)
        args.packages.extend(default_packages)
    else:
        default_packages = []

    common.ensure_use_local(args)
    common.ensure_override_channels_requires_channel(args)
    channel_urls = args.channel or ()

    specs = []
    if args.file:
        for fpath in args.file:
            specs.extend(common.specs_from_url(fpath, json=args.json))
        if '@EXPLICIT' in specs:
            misc.explicit(specs, prefix, verbose=not args.quiet)
            return
    elif getattr(args, 'all', False):
        if not linked:
            common.error_and_exit("There are no packages installed in the "
                                  "prefix %s" % prefix)
        specs.extend(nm for nm in lnames)
    specs.extend(common.specs_from_args(args.packages, json=args.json))

    if isinstall and args.revision:
        get_revision(args.revision, json=args.json)
    elif not (newenv and args.clone):
        common.check_specs(prefix, specs, json=args.json,
                           create=(command == 'create'))

    num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
    if num_cp:
        if num_cp == len(args.packages):
            misc.explicit(args.packages, prefix, verbose=not args.quiet)
            return
        else:
            common.error_and_exit(
                "cannot mix specifications with conda package filenames",
                json=args.json,
                error_type="ValueError")

    # handle tar file containing conda packages
    if len(args.packages) == 1:
        tar_path = args.packages[0]
        if tar_path.endswith('.tar'):
            install_tar(prefix, tar_path, verbose=not args.quiet)
            return

    if newenv and args.clone:
        if set(args.packages) - set(default_packages):
            common.error_and_exit('did not expect any arguments for --clone',
                                  json=args.json,
                                  error_type="ValueError")
        clone(args.clone, prefix, json=args.json, quiet=args.quiet,
              fetch_args={'use_cache': args.use_index_cache,
                          'unknown': args.unknown})
        misc.append_env(prefix)
        misc.touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)
        return

    index = common.get_index_trap(channel_urls=channel_urls,
                                  prepend=not args.override_channels,
                                  use_local=args.use_local,
                                  use_cache=args.use_index_cache,
                                  unknown=args.unknown,
                                  json=args.json,
                                  offline=args.offline,
                                  prefix=prefix)
    r = Resolve(index)
    ospecs = list(specs)
    plan.add_defaults_to_specs(r, linked, specs, update=isupdate)

    # Don't update packages that are already up-to-date
    if isupdate and not (args.all or args.force):
        orig_packages = args.packages[:]
        installed_metadata = [ci.is_linked(prefix, dist) for dist in linked]
        for name in orig_packages:
            vers_inst = [m['version'] for m in installed_metadata if m['name'] == name]
            build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name]

            try:
                assert len(vers_inst) == 1, name
                assert len(build_inst) == 1, name
            except AssertionError as e:
                if args.json:
                    common.exception_and_exit(e, json=True)
                else:
                    raise

            pkgs = sorted(r.get_pkgs(name))
            if not pkgs:
                # Shouldn't happen?
                continue
            latest = pkgs[-1]

            if (latest.version == vers_inst[0] and
                    latest.build_number == build_inst[0]):
                args.packages.remove(name)
        if not args.packages:
            from conda.cli.main_list import print_packages

            if not args.json:
                regex = '^(%s)$' % '|'.join(orig_packages)
                print('# All requested packages already installed.')
                print_packages(prefix, regex)
            else:
                common.stdout_json_success(
                    message='All requested packages already installed.')
            return

    if args.force:
        args.no_deps = True

    if args.no_deps:
        only_names = set(s.split()[0] for s in ospecs)
    else:
        only_names = None

    if not isdir(prefix) and not newenv:
        if args.mkdir:
            try:
                os.makedirs(prefix)
            except OSError:
                common.error_and_exit("Error: could not create directory: %s" % prefix,
                                      json=args.json,
                                      error_type="OSError")
        else:
            common.error_and_exit("""\
environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix,
                                  json=args.json,
                                  error_type="NoEnvironmentFound")

    try:
        if isinstall and args.revision:
            actions = plan.revert_actions(prefix, get_revision(args.revision))
        else:
            with common.json_progress_bars(json=args.json and not args.quiet):
                actions = plan.install_actions(prefix, index, specs,
                                               force=args.force,
                                               only_names=only_names,
                                               pinned=args.pinned,
                                               always_copy=args.copy,
                                               minimal_hint=args.alt_hint,
                                               update_deps=args.update_deps)
    except NoPackagesFound as e:
        error_message = e.args[0]

        if isupdate and args.all:
            # Packages not found here just means they were installed but
            # cannot be found any more. Just skip them.
            if not args.json:
                print("Warning: %s, skipping" % error_message)
            else:
                # Not sure what to do here
                pass
            args._skip = getattr(args, '_skip', ['anaconda'])
            for pkg in e.pkgs:
                p = pkg.split()[0]
                if p in args._skip:
                    # Avoid infinite recursion. This can happen if a spec
                    # comes from elsewhere, like --file
                    raise
                args._skip.append(p)

            return install(args, parser, command=command)
        else:
            packages = {index[fn]['name'] for fn in index}

            nfound = 0
            for pkg in sorted(e.pkgs):
                pkg = pkg.split()[0]
                if pkg in packages:
                    continue
                close = get_close_matches(pkg, packages, cutoff=0.7)
                if not close:
                    continue
                if nfound == 0:
                    error_message += "\n\nClose matches found; did you mean one of these?\n"
                error_message += "\n    %s: %s" % (pkg, ', '.join(close))
                nfound += 1
            error_message += '\n\nYou can search for packages on anaconda.org with'
            error_message += '\n\n    anaconda search -t conda %s' % pkg
            if len(e.pkgs) > 1:
                # Note this currently only happens with dependencies not found
                error_message += '\n\n(and similarly for the other packages)'

            if not find_executable('anaconda', include_others=False):
                error_message += '\n\nYou may need to install the anaconda-client'
                error_message += ' command line client with'
                error_message += '\n\n    conda install anaconda-client'

            pinned_specs = plan.get_pinned_specs(prefix)
            if pinned_specs:
                path = join(prefix, 'conda-meta', 'pinned')
                error_message += "\n\nNote that you have pinned specs in %s:" % path
                error_message += "\n\n    %r" % pinned_specs

            common.error_and_exit(error_message, json=args.json)
    except (Unsatisfiable, SystemExit) as e:
        # Unsatisfiable package specifications/no such revision/import error
        error_type = 'UnsatisfiableSpecifications'
        if e.args and 'could not import' in e.args[0]:
            error_type = 'ImportError'
        common.exception_and_exit(e, json=args.json, newline=True,
                                  error_text=False,
                                  error_type=error_type)

    if plan.nothing_to_do(actions):
        from conda.cli.main_list import print_packages

        if not args.json:
            regex = '^(%s)$' % '|'.join(s.split()[0] for s in ospecs)
            print('\n# All requested packages already installed.')
            print_packages(prefix, regex)
        else:
            common.stdout_json_success(
                message='All requested packages already installed.')
        return

    if not args.json:
        print()
        print("Package plan for installation in environment %s:" % prefix)
        plan.display_actions(actions, index, show_channel_urls=args.show_channel_urls)

    if command in {'install', 'update'}:
        common.check_write(command, prefix)

    if not args.json:
        common.confirm_yn(args)
    elif args.dry_run:
        common.stdout_json_success(actions=actions, dry_run=True)
        sys.exit(0)

    with common.json_progress_bars(json=args.json and not args.quiet):
        try:
            plan.execute_actions(actions, index, verbose=not args.quiet)
            if not (command == 'update' and args.all):
                try:
                    with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
                        f.write('# %s specs: %s\n' % (command, specs))
                except IOError as e:
                    if e.errno == errno.EACCES:
                        log.debug("Can't write the history file")
                    else:
                        raise

        except RuntimeError as e:
            if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
                error_type = "AlreadyLocked"
            else:
                error_type = "RuntimeError"
            common.exception_and_exit(e, error_type=error_type, json=args.json)
        except SystemExit as e:
            common.exception_and_exit(e, json=args.json)

    if newenv:
        misc.append_env(prefix)
        misc.touch_nonadmin(prefix)
        if not args.json:
            print_activate(args.name if args.name else prefix)

    if args.json:
        common.stdout_json_success(actions=actions)