Ejemplo n.º 1
0
    def test_clone_offline_multichannel_with_untracked(self):
        with make_temp_env("python") as prefix:
            assert_package_is_installed(prefix, 'python')
            from conda.config import get_rc_urls
            assert 'r' not in get_rc_urls()

            # assert conda search cannot find rpy2
            stdout, stderr = run_command(Commands.SEARCH, prefix, "rpy2", "--json")
            json_obj = json_loads(stdout.replace("Fetching package metadata ...", "").strip())
            assert bool(json_obj) is False

            # add r channel
            run_command(Commands.CONFIG, prefix, "--add channels r")
            stdout, stderr = run_command(Commands.CONFIG, prefix, "--get", "--json")
            json_obj = json_loads(stdout)
            assert json_obj['rc_path'] == join(prefix, 'condarc')
            assert json_obj['get']['channels']

            # assert conda search can now find rpy2
            stdout, stderr = run_command(Commands.SEARCH, prefix, "rpy2", "--json")
            json_obj = json_loads(stdout.replace("Fetching package metadata ...", "").strip())
            assert len(json_obj['rpy2']) > 1

            run_command(Commands.INSTALL, prefix, "rpy2")
            assert_package_is_installed(prefix, 'rpy2')
            run_command(Commands.LIST, prefix)

            with enforce_offline():
                with make_temp_env("--clone", prefix, "--offline") as clone_prefix:
                    assert_package_is_installed(clone_prefix, 'python')
                    assert_package_is_installed(clone_prefix, 'rpy2')
                    assert isfile(join(clone_prefix, 'condarc'))  # untracked file
Ejemplo n.º 2
0
    def test_clone_offline_multichannel_with_untracked(self):
        with make_temp_env("python") as prefix:
            assert_package_is_installed(prefix, 'python')
            from conda.config import get_rc_urls
            assert 'r' not in get_rc_urls()

            # assert conda search cannot find rpy2
            stdout, stderr = run_command(Commands.SEARCH, prefix, "rpy2",
                                         "--json")
            json_obj = json_loads(
                stdout.replace("Fetching package metadata ...", "").strip())
            assert bool(json_obj) is False

            run_command(Commands.CONFIG, prefix, "--add channels r")

            # assert conda search cannot find rpy2
            stdout, stderr = run_command(Commands.SEARCH, prefix, "rpy2",
                                         "--json")
            json_obj = json_loads(
                stdout.replace("Fetching package metadata ...", "").strip())
            assert len(json_obj['rpy2']) > 1

            run_command(Commands.INSTALL, prefix, "rpy2")
            assert_package_is_installed(prefix, 'rpy2')
            run_command(Commands.LIST, prefix)

            with enforce_offline():
                with make_temp_env("--clone", prefix,
                                   "--offline") as clone_prefix:
                    assert_package_is_installed(clone_prefix, 'python')
                    assert_package_is_installed(clone_prefix, 'rpy2')
                    assert isfile(join(clone_prefix,
                                       'condarc'))  # untracked file
Ejemplo n.º 3
0
def from_environment(name, prefix, no_builds=False):
    installed = install.linked(prefix)
    conda_pkgs = copy(installed)
    # json=True hides the output, data is added to installed
    add_pip_installed(prefix, installed, json=True)

    pip_pkgs = sorted(installed - conda_pkgs)

    if no_builds:
        dependencies = ['='.join(a.rsplit('-', 2)[0:2]) for a in sorted(conda_pkgs)]
    else:
        dependencies = ['='.join(a.rsplit('-', 2)) for a in sorted(conda_pkgs)]
    if len(pip_pkgs) > 0:
        dependencies.append({'pip': ['=='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})

    channels = config.get_rc_urls()

    return Environment(name=name, dependencies=dependencies, channels=channels, prefix=prefix)
Ejemplo n.º 4
0
def from_environment(name, prefix, no_builds=False):
    installed = install.linked(prefix)
    conda_pkgs = copy(installed)
    # json=True hides the output, data is added to installed
    add_pip_installed(prefix, installed, json=True)

    pip_pkgs = sorted(installed - conda_pkgs)

    if no_builds:
        dependencies = [
            '='.join(a.rsplit('-', 2)[0:2]) for a in sorted(conda_pkgs)
        ]
    else:
        dependencies = ['='.join(a.rsplit('-', 2)) for a in sorted(conda_pkgs)]
    if len(pip_pkgs) > 0:
        dependencies.append(
            {'pip': ['=='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})

    channels = config.get_rc_urls()

    return Environment(name=name,
                       dependencies=dependencies,
                       channels=channels,
                       prefix=prefix)
Ejemplo n.º 5
0
    def test_normalize_urls(self):
        current_platform = config.subdir
        assert config.DEFAULT_CHANNEL_ALIAS == 'https://conda.anaconda.org/'
        assert config.rc.get('channel_alias') == 'https://your.repo/'
        assert config.channel_prefix(False) == 'https://your.repo/'
        assert config.binstar_domain == 'https://mybinstar.com/'
        assert config.binstar_domain_tok == 'https://mybinstar.com/t/01234abcde/'
        assert config.get_rc_urls() == ["binstar_username", "http://some.custom/channel", "defaults"]
        channel_urls = [
            'defaults', 'system',
            'https://conda.anaconda.org/username',
            'file:///Users/username/repo', 
            'https://mybinstar.com/t/5768wxyz/test2', 
            'https://mybinstar.com/test', 
            'https://conda.anaconda.org/t/abcdefgh/username', 
            'username'
        ]
        platform = 'osx-64'

        normurls = config.normalize_urls(channel_urls, platform)
        assert normurls == [
           # defaults
           'https://repo.continuum.io/pkgs/free/osx-64/',
           'https://repo.continuum.io/pkgs/free/noarch/',
           'https://repo.continuum.io/pkgs/pro/osx-64/',
           'https://repo.continuum.io/pkgs/pro/noarch/',
           # system (condarc)
           'https://your.repo/binstar_username/osx-64/',
           'https://your.repo/binstar_username/noarch/',
           'http://some.custom/channel/osx-64/',
           'http://some.custom/channel/noarch/',
           # defaults is repeated in condarc; that's OK
           'https://repo.continuum.io/pkgs/free/osx-64/',
           'https://repo.continuum.io/pkgs/free/noarch/',
           'https://repo.continuum.io/pkgs/pro/osx-64/',
           'https://repo.continuum.io/pkgs/pro/noarch/',
           # conda.anaconda.org is not our default binstar clinet
           'https://conda.anaconda.org/username/osx-64/',
           'https://conda.anaconda.org/username/noarch/',
           'file:///Users/username/repo/osx-64/',
           'file:///Users/username/repo/noarch/',
           # mybinstar.com is not channel_alias, but we still add tokens
           'https://mybinstar.com/t/5768wxyz/test2/osx-64/',
           'https://mybinstar.com/t/5768wxyz/test2/noarch/',
           # token already supplied, do not change/remove it
           'https://mybinstar.com/t/01234abcde/test/osx-64/',
           'https://mybinstar.com/t/01234abcde/test/noarch/',
           # we do not remove tokens from conda.anaconda.org
           'https://conda.anaconda.org/t/abcdefgh/username/osx-64/',
           'https://conda.anaconda.org/t/abcdefgh/username/noarch/',
           # short channel; add channel_alias
           'https://your.repo/username/osx-64/',
           'https://your.repo/username/noarch/']

        priurls = config.prioritize_channels(normurls)
        assert dict(priurls) == {
           # defaults appears twice, keep higher priority
           'https://repo.continuum.io/pkgs/free/noarch/': ('defaults', 1),
           'https://repo.continuum.io/pkgs/free/osx-64/': ('defaults', 1),
           'https://repo.continuum.io/pkgs/pro/noarch/': ('defaults', 1),
           'https://repo.continuum.io/pkgs/pro/osx-64/': ('defaults', 1),
           'https://your.repo/binstar_username/noarch/': ('binstar_username', 2),
           'https://your.repo/binstar_username/osx-64/': ('binstar_username', 2),
           'http://some.custom/channel/noarch/': ('http://some.custom/channel', 3),
           'http://some.custom/channel/osx-64/': ('http://some.custom/channel', 3),
           'https://conda.anaconda.org/t/abcdefgh/username/noarch/': ('https://conda.anaconda.org/username', 4),
           'https://conda.anaconda.org/t/abcdefgh/username/osx-64/': ('https://conda.anaconda.org/username', 4),
           'file:///Users/username/repo/noarch/': ('file:///Users/username/repo', 5),
           'file:///Users/username/repo/osx-64/': ('file:///Users/username/repo', 5),
           # the tokenized version came first, but we still give it the same priority
           'https://conda.anaconda.org/username/noarch/': ('https://conda.anaconda.org/username', 4),
           'https://conda.anaconda.org/username/osx-64/': ('https://conda.anaconda.org/username', 4),
           'https://mybinstar.com/t/5768wxyz/test2/noarch/': ('https://mybinstar.com/test2', 6),
           'https://mybinstar.com/t/5768wxyz/test2/osx-64/': ('https://mybinstar.com/test2', 6),
           'https://mybinstar.com/t/01234abcde/test/noarch/': ('https://mybinstar.com/test', 7),
           'https://mybinstar.com/t/01234abcde/test/osx-64/': ('https://mybinstar.com/test', 7),
           'https://your.repo/username/noarch/': ('username', 8),
           'https://your.repo/username/osx-64/': ('username', 8)
        }

        # Delete the channel alias so now the short channels point to binstar
        del config.rc['channel_alias']
        config.rc['offline'] = False
        config.load_condarc()
        config.binstar_client = BinstarTester()
        normurls = config.normalize_urls(channel_urls, platform)
        # all your.repo references should be changed to mybinstar.com
        assert normurls == [
           'https://repo.continuum.io/pkgs/free/osx-64/',
           'https://repo.continuum.io/pkgs/free/noarch/',
           'https://repo.continuum.io/pkgs/pro/osx-64/',
           'https://repo.continuum.io/pkgs/pro/noarch/',
           'https://mybinstar.com/t/01234abcde/binstar_username/osx-64/',
           'https://mybinstar.com/t/01234abcde/binstar_username/noarch/',
           'http://some.custom/channel/osx-64/',
           'http://some.custom/channel/noarch/',
           'https://repo.continuum.io/pkgs/free/osx-64/',
           'https://repo.continuum.io/pkgs/free/noarch/',
           'https://repo.continuum.io/pkgs/pro/osx-64/',
           'https://repo.continuum.io/pkgs/pro/noarch/',
           'https://conda.anaconda.org/username/osx-64/',
           'https://conda.anaconda.org/username/noarch/',
           'file:///Users/username/repo/osx-64/',
           'file:///Users/username/repo/noarch/',
           'https://mybinstar.com/t/5768wxyz/test2/osx-64/',
           'https://mybinstar.com/t/5768wxyz/test2/noarch/',
           'https://mybinstar.com/t/01234abcde/test/osx-64/',
           'https://mybinstar.com/t/01234abcde/test/noarch/',
           'https://conda.anaconda.org/t/abcdefgh/username/osx-64/',
           'https://conda.anaconda.org/t/abcdefgh/username/noarch/',
           'https://mybinstar.com/t/01234abcde/username/osx-64/',
           'https://mybinstar.com/t/01234abcde/username/noarch/'
        ]

        # Delete the anaconda token
        config.load_condarc()
        config.binstar_client = BinstarTester(token=None)
        normurls = config.normalize_urls(channel_urls, platform)
        # tokens should not be added (but supplied tokens are kept)
        assert normurls == [
           'https://repo.continuum.io/pkgs/free/osx-64/',
           'https://repo.continuum.io/pkgs/free/noarch/',
           'https://repo.continuum.io/pkgs/pro/osx-64/',
           'https://repo.continuum.io/pkgs/pro/noarch/',
           'https://mybinstar.com/binstar_username/osx-64/',
           'https://mybinstar.com/binstar_username/noarch/',
           'http://some.custom/channel/osx-64/',
           'http://some.custom/channel/noarch/',
           'https://repo.continuum.io/pkgs/free/osx-64/',
           'https://repo.continuum.io/pkgs/free/noarch/',
           'https://repo.continuum.io/pkgs/pro/osx-64/',
           'https://repo.continuum.io/pkgs/pro/noarch/',
           'https://conda.anaconda.org/username/osx-64/',
           'https://conda.anaconda.org/username/noarch/',
           'file:///Users/username/repo/osx-64/',
           'file:///Users/username/repo/noarch/',
           'https://mybinstar.com/t/5768wxyz/test2/osx-64/',
           'https://mybinstar.com/t/5768wxyz/test2/noarch/',
           'https://mybinstar.com/test/osx-64/',
           'https://mybinstar.com/test/noarch/',
           'https://conda.anaconda.org/t/abcdefgh/username/osx-64/',
           'https://conda.anaconda.org/t/abcdefgh/username/noarch/',
           'https://mybinstar.com/username/osx-64/',
           'https://mybinstar.com/username/noarch/'
        ]

        # Turn off add_anaconda_token
        config.rc['add_binstar_token'] = False
        config.load_condarc()
        config.binstar_client = BinstarTester()
        normurls2 = config.normalize_urls(channel_urls, platform)
        # tokens should not be added (but supplied tokens are kept)
        assert normurls == normurls2

        # Disable binstar client altogether
        config.load_condarc()
        config.binstar_client = ()
        normurls = config.normalize_urls(channel_urls, platform)
        # should drop back to conda.anaconda.org
        assert normurls == [
          'https://repo.continuum.io/pkgs/free/osx-64/',
          'https://repo.continuum.io/pkgs/free/noarch/',
          'https://repo.continuum.io/pkgs/pro/osx-64/',
          'https://repo.continuum.io/pkgs/pro/noarch/',
          'https://conda.anaconda.org/binstar_username/osx-64/',
          'https://conda.anaconda.org/binstar_username/noarch/',
          'http://some.custom/channel/osx-64/',
          'http://some.custom/channel/noarch/',
          'https://repo.continuum.io/pkgs/free/osx-64/',
          'https://repo.continuum.io/pkgs/free/noarch/',
          'https://repo.continuum.io/pkgs/pro/osx-64/',
          'https://repo.continuum.io/pkgs/pro/noarch/',
          'https://conda.anaconda.org/username/osx-64/',
          'https://conda.anaconda.org/username/noarch/',
          'file:///Users/username/repo/osx-64/',
          'file:///Users/username/repo/noarch/',
          'https://mybinstar.com/t/5768wxyz/test2/osx-64/',
          'https://mybinstar.com/t/5768wxyz/test2/noarch/',
          'https://mybinstar.com/test/osx-64/',
          'https://mybinstar.com/test/noarch/',
          'https://conda.anaconda.org/t/abcdefgh/username/osx-64/',
          'https://conda.anaconda.org/t/abcdefgh/username/noarch/',
          'https://conda.anaconda.org/username/osx-64/',
          'https://conda.anaconda.org/username/noarch/'
        ]
Ejemplo n.º 6
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os import makedirs
    from os.path import abspath, isdir, isfile

    import conda.config as cc

    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config

    check_external()

    # change globals in build module, see comment there as well
    build.channel_urls = args.channel or ()
    build.override_channels = args.override_channels
    build.verbose = not args.quiet or args.debug

    if on_win:
        try:
            # needs to happen before any c extensions are imported that might be
            # hard-linked by files in the trash. one of those is markupsafe,
            # used by jinja2. see https://github.com/conda/conda-build/pull/520
            delete_trash(None)
        except:
            # when we can't delete the trash, don't crash on AssertionError,
            # instead inform the user and try again next time.
            # see https://github.com/conda/conda-build/pull/744
            warnings.warn("Cannot delete trash; some c extension has been "
                          "imported that is hard-linked by files in the trash. "
                          "Will try again on next run.")

    set_language_env_vars(args, parser, execute=execute)

    if args.skip_existing:
        for d in config.bldpkgs_dirs:
            if not isdir(d):
                makedirs(d)
            update_index(d)
        arg_channels = ['local']
        if args.channel:
            arg_channels.extend(args.channel)
        index = build.get_build_index(clear_cache=True,
                                      arg_channels=arg_channels)

    already_built = set()
    to_build_recursive = []
    recipes = deque(args.recipe)
    while recipes:
        arg = recipes.popleft()
        try_again = False
        # Don't use byte literals for paths in Python 2
        if not PY3:
            arg = arg.decode(getpreferredencoding() or 'utf-8')
        if isfile(arg):
            if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                recipe_dir = tempfile.mkdtemp()
                t = tarfile.open(arg, 'r:*')
                t.extractall(path=recipe_dir)
                t.close()
                need_cleanup = True
            else:
                print("Ignoring non-recipe: %s" % arg)
                continue
        else:
            recipe_dir = abspath(arg)
            need_cleanup = False

        # recurse looking for meta.yaml that is potentially not in immediate folder
        recipe_dir = find_recipe(recipe_dir)
        if not isdir(recipe_dir):
            sys.exit("Error: no such directory: %s" % recipe_dir)

        # this fully renders any jinja templating, throwing an error if any data is missing
        m, need_source_download, need_reparse_in_env = render_recipe(recipe_dir,
                                                                     no_download_source=False,
                                                                     verbose=False,
                                                                     dirty=args.dirty)

        if m.get_value('build/noarch_python'):
            config.noarch = True

        if args.check and len(args.recipe) > 1:
            print(m.path)
        m.check_fields()
        if args.check:
            continue
        if m.skip():
            print("Skipped: The %s recipe defines build/skip for this "
                    "configuration." % m.dist())
            continue
        if args.skip_existing:
            urls = cc.get_rc_urls() + cc.get_local_urls() + ['local', ]
            if args.channel:
                urls.extend(args.channel)

            # will be empty if none found, and evalute to False
            package_exists = [url for url in urls if url + '::' + m.pkg_fn() in index]
            if (package_exists or m.pkg_fn() in index or m.pkg_fn() in already_built):
                print(m.dist(), "is already built in {0}, skipping.".format(package_exists))
                continue
        if args.output:
            print(bldpkg_path(m))
            continue
        elif args.test:
            build.test(m, move_broken=False, debug=args.debug)
        elif args.source:
            if need_source_download:
                source.provide(m.path, m.get_section('source'), verbose=build.verbose)
            print('Source tree in:', source.get_dir())
        else:
            # This loop recursively builds dependencies if recipes exist
            if args.build_only:
                post = False
                args.notest = True
                args.binstar_upload = False
            elif args.post:
                post = True
                args.notest = True
                args.binstar_upload = False
            else:
                post = None
            try:
                build.build(m, post=post,
                            include_recipe=args.include_recipe,
                            keep_old_work=args.keep_old_work,
                            need_source_download=need_source_download,
                            need_reparse_in_env=need_reparse_in_env,
                            dirty=args.dirty, activate=args.activate,
                            debug=args.debug)
            except (NoPackagesFound, Unsatisfiable) as e:
                error_str = str(e)
                # Typically if a conflict is with one of these
                # packages, the other package needs to be rebuilt
                # (e.g., a conflict with 'python 3.5*' and 'x' means
                # 'x' isn't build for Python 3.5 and needs to be
                # rebuilt).
                skip_names = ['python', 'r']
                add_recipes = []
                for line in error_str.splitlines():
                    if not line.startswith('  - '):
                        continue
                    pkg = line.lstrip('  - ').split(' -> ')[-1]
                    pkg = pkg.strip().split(' ')[0]
                    if pkg in skip_names:
                        sys.stderr.write("Warning: package conflict - you may have unresolved "
                                         "dependencies. Try to conda install each of your "
                                         "dependencies to figure out which has unresolved "
                                         "dependencies.")
                        continue
                    recipe_glob = glob(pkg + '-[v0-9][0-9.]*')
                    if os.path.exists(pkg):
                        recipe_glob.append(pkg)
                    if recipe_glob:
                        try_again = True
                        for recipe_dir in recipe_glob:
                            if pkg in to_build_recursive:
                                sys.exit(str(e))
                            print(error_str)
                            print(("Missing dependency {0}, but found" +
                                    " recipe directory, so building " +
                                    "{0} first").format(pkg))
                            add_recipes.append(recipe_dir)
                            to_build_recursive.append(pkg)
                    else:
                        raise
                recipes.appendleft(arg)
                recipes.extendleft(reversed(add_recipes))

            if try_again:
                continue

            if not args.notest:
                build.test(m, activate=args.activate, debug=args.debug)

        if need_cleanup:
            shutil.rmtree(recipe_dir)

        # outputs message, or does upload, depending on value of args.binstar_upload
        handle_binstar_upload(build.bldpkg_path(m), args)

        already_built.add(m.pkg_fn())
Ejemplo n.º 7
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os import makedirs
    from os.path import abspath, isdir, isfile

    import conda.config as cc

    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config

    check_external()

    # change globals in build module, see comment there as well
    build.channel_urls = args.channel or ()
    build.override_channels = args.override_channels
    build.verbose = not args.quiet or args.debug

    if on_win:
        try:
            # needs to happen before any c extensions are imported that might be
            # hard-linked by files in the trash. one of those is markupsafe,
            # used by jinja2. see https://github.com/conda/conda-build/pull/520
            delete_trash(None)
        except:
            # when we can't delete the trash, don't crash on AssertionError,
            # instead inform the user and try again next time.
            # see https://github.com/conda/conda-build/pull/744
            warnings.warn("Cannot delete trash; some c extension has been "
                          "imported that is hard-linked by files in the trash. "
                          "Will try again on next run.")

    set_language_env_vars(args, parser, execute=execute)

    if args.skip_existing:
        for d in config.bldpkgs_dirs:
            if not isdir(d):
                makedirs(d)
            update_index(d)
        arg_channels = ['local']
        if args.channel:
            arg_channels.extend(args.channel)
        index = build.get_build_index(clear_cache=True,
                                      arg_channels=arg_channels)

    already_built = set()
    to_build_recursive = []
    recipes = deque(args.recipe)
    while recipes:
        arg = recipes.popleft()
        try_again = False
        # Don't use byte literals for paths in Python 2
        if not PY3:
            arg = arg.decode(getpreferredencoding() or 'utf-8')
        if isfile(arg):
            if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                recipe_dir = tempfile.mkdtemp()
                t = tarfile.open(arg, 'r:*')
                t.extractall(path=recipe_dir)
                t.close()
                need_cleanup = True
            else:
                print("Ignoring non-recipe: %s" % arg)
                continue
        else:
            recipe_dir = abspath(arg)
            need_cleanup = False

        # recurse looking for meta.yaml that is potentially not in immediate folder
        recipe_dir = find_recipe(recipe_dir)
        if not isdir(recipe_dir):
            sys.exit("Error: no such directory: %s" % recipe_dir)

        # this fully renders any jinja templating, throwing an error if any data is missing
        m, need_source_download, need_reparse_in_env = render_recipe(recipe_dir,
                                                                     no_download_source=False,
                                                                     verbose=False,
                                                                     dirty=args.dirty)

        if m.get_value('build/noarch_python'):
            config.noarch = True

        if args.check and len(args.recipe) > 1:
            print(m.path)
        m.check_fields()
        if args.check:
            continue
        if m.skip():
            print("Skipped: The %s recipe defines build/skip for this "
                    "configuration." % m.dist())
            continue
        if args.skip_existing:
            urls = cc.get_rc_urls() + cc.get_local_urls() + ['local', ]
            if args.channel:
                urls.extend(args.channel)

            # will be empty if none found, and evalute to False
            package_exists = [url for url in urls if url + '::' + m.pkg_fn() in index]
            if (package_exists or m.pkg_fn() in index or m.pkg_fn() in already_built):
                print(m.dist(), "is already built in {0}, skipping.".format(package_exists))
                continue
        if args.output:
            print(bldpkg_path(m))
            continue
        elif args.test:
            build.test(m, move_broken=False, debug=args.debug)
        elif args.source:
            if need_source_download:
                source.provide(m.path, m.get_section('source'), verbose=build.verbose)
            print('Source tree in:', source.get_dir())
        else:
            # This loop recursively builds dependencies if recipes exist
            if args.build_only:
                post = False
                args.notest = True
                args.binstar_upload = False
            elif args.post:
                post = True
                args.notest = True
                args.binstar_upload = False
            else:
                post = None
            try:
                build.build(m, post=post,
                            include_recipe=args.include_recipe,
                            keep_old_work=args.keep_old_work,
                            need_source_download=need_source_download,
                            need_reparse_in_env=need_reparse_in_env,
                            dirty=args.dirty, activate=args.activate,
                            debug=args.debug)
            except (NoPackagesFound, Unsatisfiable) as e:
                error_str = str(e)
                # Typically if a conflict is with one of these
                # packages, the other package needs to be rebuilt
                # (e.g., a conflict with 'python 3.5*' and 'x' means
                # 'x' isn't build for Python 3.5 and needs to be
                # rebuilt).
                skip_names = ['python', 'r']
                add_recipes = []
                for line in error_str.splitlines():
                    if not line.startswith('  - '):
                        continue
                    pkg = line.lstrip('  - ').split(' -> ')[-1]
                    pkg = pkg.strip().split(' ')[0]
                    if pkg in skip_names:
                        sys.stderr.write("Warning: package conflict - you may have unresolved "
                                         "dependencies. Try to conda install each of your "
                                         "dependencies to figure out which has unresolved "
                                         "dependencies.")
                        continue
                    recipe_glob = glob(pkg + '-[v0-9][0-9.]*')
                    if os.path.exists(pkg):
                        recipe_glob.append(pkg)
                    if recipe_glob:
                        try_again = True
                        for recipe_dir in recipe_glob:
                            if pkg in to_build_recursive:
                                sys.exit(str(e))
                            print(error_str)
                            print(("Missing dependency {0}, but found" +
                                    " recipe directory, so building " +
                                    "{0} first").format(pkg))
                            add_recipes.append(recipe_dir)
                            to_build_recursive.append(pkg)
                    else:
                        raise
                recipes.appendleft(arg)
                recipes.extendleft(reversed(add_recipes))

            if try_again:
                continue

            if not args.notest:
                build.test(m, activate=args.activate, debug=args.debug)

        if need_cleanup:
            shutil.rmtree(recipe_dir)

        # outputs message, or does upload, depending on value of args.binstar_upload
        handle_binstar_upload(build.bldpkg_path(m), args)

        already_built.add(m.pkg_fn())