Ejemplo n.º 1
0
def test_build_output_folder(testing_workdir, testing_metadata, capfd):
    api.output_yaml(testing_metadata, 'meta.yaml')
    with TemporaryDirectory() as tmp:
        out = os.path.join(tmp, 'out')
        args = [
            testing_workdir, '--no-build-id', '--croot', tmp, '--no-activate',
            '--no-anaconda-upload', '--output-folder', out
        ]
        output = main_build.execute(args)[0]
        assert os.path.isfile(
            os.path.join(out, testing_metadata.config.host_subdir,
                         os.path.basename(output)))
Ejemplo n.º 2
0
def test_purge_all(testing_workdir, testing_metadata):
    """
    purge-all clears out build folders as well as build packages in the osx-64 folders and such
    """
    api.output_yaml(testing_metadata, 'meta.yaml')
    with TemporaryDirectory() as tmpdir:
        testing_metadata.config.croot = tmpdir
        outputs = api.build(testing_workdir, config=testing_metadata.config, notest=True)
        args = ['purge-all', '--croot', tmpdir]
        main_build.execute(args)
        assert not get_build_folders(testing_metadata.config.croot)
        assert not any(os.path.isfile(fn) for fn in outputs)
Ejemplo n.º 3
0
def test_render_without_channel_fails():
    # do make extra channel available, so the required package should not be found
    with TemporaryDirectory() as tmpdir:
        rendered_filename = os.path.join(tmpdir, 'out.yaml')
        args = ['--override-channels', '-c', 'conda', os.path.join(metadata_dir, "_recipe_requiring_external_channel"), '--file', rendered_filename]
        main_render.execute(args)
        rendered_meta = yaml.safe_load(open(rendered_filename, 'r'))
        required_package_string = [pkg for pkg in
                                   rendered_meta.get('requirements', {}).get('build', [])
                                   if 'conda_build_test_requirement' in pkg][0]
        assert required_package_string == 'conda_build_test_requirement', \
               "Expected to get only base package name because it should not be found, but got :{}".format(required_package_string)
Ejemplo n.º 4
0
def test_build_output_folder(testing_workdir, test_metadata, capfd):
    api.output_yaml(test_metadata, 'meta.yaml')
    with TemporaryDirectory() as tmp:
        out = os.path.join(tmp, 'out')
        args = [
            testing_workdir, '--no-build-id', '--croot', tmp, '--no-activate',
            '--no-anaconda-upload', '--output-folder', out
        ]
        main_build.execute(args)
        test_metadata.config.output_folder = out
        output, error = capfd.readouterr()
        assert "anaconda upload {}".format(out) in output
Ejemplo n.º 5
0
def test_build_source(testing_workdir):
    with TemporaryDirectory() as tmp:
        args = [
            os.path.join(metadata_dir, '_pyyaml_find_header'),
            '--source',
            '--no-build-id',
            '--croot',
            tmp,
            '--no-activate',
            '--no-anaconda-upload',
        ]
        main_build.execute(args)
        assert os.path.isfile(os.path.join(tmp, 'work', 'setup.py'))
Ejemplo n.º 6
0
def test_render_add_channel():
    """This recipe requires the conda_build_test_requirement package, which is
    only on the conda_build_test channel. This verifies that the -c argument
    works for rendering."""
    with TemporaryDirectory() as tmpdir:
        rendered_filename = os.path.join(tmpdir, 'out.yaml')
        args = ['-c', 'conda_build_test', os.path.join(metadata_dir, "_recipe_requiring_external_channel"), '--file', rendered_filename]
        main_render.execute(args)
        rendered_meta = yaml.safe_load(open(rendered_filename, 'r'))
        required_package_string = [pkg for pkg in rendered_meta['requirements']['build'] if 'conda_build_test_requirement' in pkg][0]
        required_package_details = required_package_string.split(' ')
        assert len(required_package_details) > 1, "Expected version number on successful rendering, but got only {}".format(required_package_details)
        assert required_package_details[1] == '1.0', "Expected version number 1.0 on successful rendering, but got {}".format(required_package_details[1])
Ejemplo n.º 7
0
def make_hardlink_copy(path, prefix):
    """Hardlinks create invalid packages.  Copy files to break the link.
    Symlinks are OK, and unaffected here."""
    if not os.path.isabs(path):
        path = os.path.normpath(os.path.join(prefix, path))
    fn = os.path.basename(path)
    if os.lstat(path).st_nlink > 1:
        with TemporaryDirectory() as dest:
            # copy file to new name
            utils.copy_into(path, dest)
            # remove old file
            utils.rm_rf(path)
            # rename copy to original filename
            #   It is essential here to use copying (as opposed to os.rename), so that
            #        crossing volume boundaries works
            utils.copy_into(os.path.join(dest, fn), path)
Ejemplo n.º 8
0
def test_keep_old_work(config, build_id):
    config.keep_old_work = True
    with TemporaryDirectory() as temp_dir:
        config.croot = temp_dir
        config.build_id = build_id
        work_path = os.path.join(temp_dir, build_id, "work")
        os.makedirs(work_path)
        # assert False
        assert len(os.listdir(config.work_dir)) == 0
        with open(os.path.join(work_path, 'a_touched_file.magic'), 'w') as _:
            # Touch a random file so the "work_dir" is not empty
            pass
        assert len(os.listdir(config.work_dir)) > 0
        config.compute_build_id("a_new_name", reset=True)
        assert config.work_dir != work_path
        assert not os.path.exists(work_path)
        assert len(os.listdir(config.work_dir)) > 0
Ejemplo n.º 9
0
def bundle_conda(metadata, initial_files, env, files_selector=None):

    files = post_process_files(metadata, initial_files)

    # first filter is so that info_files does not pick up ignored files
    files = utils.filter_files(files, prefix=metadata.config.host_prefix)
    if files_selector:
        files = select_files(files, files_selector.get("include"),
                             files_selector.get("exclude"))

    console.print(f"\n[yellow]Adding files for {metadata.name()}[/yellow]\n")
    if files:
        for f in sorted(files):
            console.print(f"- {f}")
    else:
        console.print(
            f"[red]ATTENTION: No files added in target [bold]{metadata.name()}[/bold][/red]"
        )
    console.print("\n")

    # this is also copying things like run_test.sh into info/recipe
    utils.rm_rf(os.path.join(metadata.config.info_dir, "test"))

    output = {}

    with tmp_chdir(metadata.config.host_prefix):
        output["checksums"] = create_info_files(
            metadata, files, prefix=metadata.config.host_prefix)

    # here we add the info files into the prefix, so we want to re-collect the files list
    prefix_files = set(utils.prefix_files(metadata.config.host_prefix))
    files = utils.filter_files(prefix_files - initial_files,
                               prefix=metadata.config.host_prefix)
    if files_selector:
        include_files = files_selector.get("include")
        if include_files:
            include_files += ["info/*"]
        files = select_files(files, include_files,
                             files_selector.get("exclude"))

    basename = metadata.dist()
    tmp_archives = []
    final_outputs = []
    ext = ".tar.bz2"
    if output.get(
            "type") == "conda_v2" or metadata.config.conda_pkg_format == "2":
        ext = ".conda"

    with TemporaryDirectory() as tmp:
        conda_package_handling.api.create(metadata.config.host_prefix,
                                          files,
                                          basename + ext,
                                          out_folder=tmp)
        tmp_archives = [os.path.join(tmp, basename + ext)]

        # we're done building, perform some checks
        for tmp_path in tmp_archives:
            #     if tmp_path.endswith('.tar.bz2'):
            #         tarcheck.check_all(tmp_path, metadata.config)
            output_filename = os.path.basename(tmp_path)

            #     # we do the import here because we want to respect logger level context
            #     try:
            #         from conda_verify.verify import Verify
            #     except ImportError:
            #         Verify = None
            #         log.warn("Importing conda-verify failed.  Please be sure to test your packages.  "
            #             "conda install conda-verify to make this message go away.")
            #     if getattr(metadata.config, "verify", False) and Verify:
            #         verifier = Verify()
            #         checks_to_ignore = (utils.ensure_list(metadata.config.ignore_verify_codes) +
            #                             metadata.ignore_verify_codes())
            #         try:
            #             verifier.verify_package(path_to_package=tmp_path, checks_to_ignore=checks_to_ignore,
            #                                     exit_on_error=metadata.config.exit_on_verify_error)
            #         except KeyError as e:
            #             log.warn("Package doesn't have necessary files.  It might be too old to inspect."
            #                      "Legacy noarch packages are known to fail.  Full message was {}".format(e))
            try:
                crossed_subdir = metadata.config.target_subdir
            except AttributeError:
                crossed_subdir = metadata.config.host_subdir
            subdir = ("noarch" if (metadata.noarch or metadata.noarch_python)
                      else crossed_subdir)
            if metadata.config.output_folder:
                output_folder = os.path.join(metadata.config.output_folder,
                                             subdir)
            else:
                output_folder = os.path.join(
                    os.path.dirname(metadata.config.bldpkgs_dir), subdir)
            final_output = os.path.join(output_folder, output_filename)
            if os.path.isfile(final_output):
                utils.rm_rf(final_output)

            # disable locking here. It's just a temp folder getting locked.
            # Having it proved a major bottleneck.
            utils.copy_into(tmp_path,
                            final_output,
                            metadata.config.timeout,
                            locking=False)
            final_outputs.append(final_output)

    update_index(os.path.dirname(output_folder),
                 verbose=metadata.config.debug,
                 threads=1)

    # clean out host prefix so that this output's files don't interfere with other outputs
    # We have a backup of how things were before any output scripts ran.  That's
    # restored elsewhere.
    if metadata.config.keep_old_work:
        prefix = metadata.config.host_prefix
        dest = os.path.join(
            os.path.dirname(prefix),
            "_".join(("_h_env_moved", metadata.dist(),
                      metadata.config.host_subdir)),
        )
        console.print("Renaming host env directory, ", prefix, " to ", dest)
        if os.path.exists(dest):
            utils.rm_rf(dest)
        shutil.move(prefix, dest)
    else:
        utils.rm_rf(metadata.config.host_prefix)

    return final_outputs
Ejemplo n.º 10
0
def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=None, version=None,
                git_tag=None, cran_url=None, recursive=False, archive=True,
                version_compare=False, update_policy='', r_interp='r-base', use_binaries_ver=None,
                use_noarch_generic=False, use_rtools_win=False, config=None,
                variant_config_files=None):

    output_dir = realpath(output_dir)
    config = get_or_merge_config(config, variant_config_files=variant_config_files)

    if not cran_url:
        with TemporaryDirectory() as t:
            _variant = get_package_variants(t, config)[0]
        cran_url = ensure_list(_variant.get('cran_mirror', DEFAULT_VARIANTS['cran_mirror']))[0]

    if len(in_packages) > 1 and version_compare:
        raise ValueError("--version-compare only works with one package at a time")
    if update_policy == 'error' and not in_packages:
        raise ValueError("At least one package must be supplied")

    package_dicts = {}
    package_list = []

    cran_url = cran_url.rstrip('/')
    cran_metadata = get_cran_metadata(cran_url, output_dir)

    # r_recipes_in_output_dir = []
    # recipes = listdir(output_dir)
    # for recipe in recipes:
    #     if not recipe.startswith('r-') or not isdir(recipe):
    #         continue
    #     r_recipes_in_output_dir.append(recipe)

    for package in in_packages:
        inputs_dict = package_to_inputs_dict(output_dir, output_suffix, git_tag, package)
        if inputs_dict:
            package_dicts.update({inputs_dict['pkg-name']: {'inputs': inputs_dict}})

    for package_name, package_dict in package_dicts.items():
        package_list.append(package_name)

    while package_list:
        inputs = package_dicts[package_list.pop()]['inputs']
        location = inputs['location']
        pkg_name = inputs['pkg-name']
        is_github_url = location and 'github.com' in location
        is_tarfile = location and isfile(location) and tarfile.is_tarfile(location)
        url = inputs['location']

        dir_path = inputs['new-location']
        print("Making/refreshing recipe for {}".format(pkg_name))

        # Bodges GitHub packages into cran_metadata
        if is_github_url or is_tarfile:
            rm_rf(config.work_dir)
            if is_github_url:
                m = metadata.MetaData.fromdict({'source': {'git_url': location}}, config=config)
                source.git_source(m.get_section('source'), m.config.git_cache, m.config.work_dir)
                new_git_tag = git_tag if git_tag else get_latest_git_tag(config)
                p = subprocess.Popen(['git', 'checkout', new_git_tag], stdout=subprocess.PIPE,
                                     stderr=subprocess.PIPE, cwd=config.work_dir)
                stdout, stderr = p.communicate()
                stdout = stdout.decode('utf-8')
                stderr = stderr.decode('utf-8')
                if p.returncode:
                    sys.exit("Error: 'git checkout %s' failed (%s).\nInvalid tag?" %
                             (new_git_tag, stderr.strip()))
                if stdout:
                    print(stdout, file=sys.stdout)
                if stderr:
                    print(stderr, file=sys.stderr)
            else:
                m = metadata.MetaData.fromdict({'source': {'url': location}}, config=config)
                source.unpack(m.get_section('source'), m.config.work_dir, m.config.src_cache,
                              output_dir, m.config.work_dir)
            DESCRIPTION = join(config.work_dir, "DESCRIPTION")
            if not isfile(DESCRIPTION):
                sub_description_pkg = join(config.work_dir, 'pkg', "DESCRIPTION")
                sub_description_name = join(config.work_dir, location.split('/')[-1], "DESCRIPTION")
                if isfile(sub_description_pkg):
                    DESCRIPTION = sub_description_pkg
                elif isfile(sub_description_name):
                    DESCRIPTION = sub_description_name
                else:
                    sys.exit("%s does not appear to be a valid R package "
                             "(no DESCRIPTION file in %s, %s)"
                                 % (location, sub_description_pkg, sub_description_name))

            with open(DESCRIPTION) as f:
                description_text = clear_whitespace(f.read())

            d = dict_from_cran_lines(remove_package_line_continuations(
                description_text.splitlines()))
            d['orig_description'] = description_text
            package = d['Package'].lower()
            cran_metadata[package] = d
        else:
            package = pkg_name

        if pkg_name not in cran_metadata:
            sys.exit("Package %s not found" % pkg_name)

        # Make sure package always uses the CRAN capitalization
        package = cran_metadata[package.lower()]['Package']

        if not is_github_url and not is_tarfile:
            session = get_session(output_dir)
            cran_metadata[package.lower()].update(get_package_metadata(cran_url,
            package, session))

        cran_package = cran_metadata[package.lower()]

        package_dicts[package.lower()].update(
            {
                'cran_packagename': package,
                'packagename': 'r-' + package.lower(),
                'patches': '',
                'build_number': 0,
                'build_depends': '',
                'host_depends': '',
                'run_depends': '',
                # CRAN doesn't seem to have this metadata :(
                'home_comment': '#',
                'homeurl': '',
                'summary_comment': '#',
                'summary': '',
            })
        d = package_dicts[package.lower()]
        d['binary1'] = ''
        d['binary2'] = ''

        if version:
            d['version'] = version
            raise NotImplementedError("Package versions from CRAN are not yet implemented")

        d['cran_version'] = cran_package['Version']
        # Conda versions cannot have -. Conda (verlib) will treat _ as a .
        d['conda_version'] = d['cran_version'].replace('-', '_')
        if version_compare:
            sys.exit(not version_compare(dir_path, d['conda_version']))

        patches = []
        script_env = []
        extra_recipe_maintainers = []
        build_number = 0
        if update_policy.startswith('merge') and inputs['old-metadata']:
            m = inputs['old-metadata']
            patches = make_array(m, 'source/patches')
            script_env = make_array(m, 'build/script_env')
            extra_recipe_maintainers = make_array(m, 'extra/recipe-maintainers', add_maintainer)
            if m.version() == d['conda_version']:
                build_number = int(m.get_value('build/number', 0))
                build_number += 1 if update_policy == 'merge-incr-build-num' else 0
        if add_maintainer:
            new_maintainer = "{indent}{add_maintainer}".format(indent=INDENT,
                                                               add_maintainer=add_maintainer)
            if new_maintainer not in extra_recipe_maintainers:
                if not len(extra_recipe_maintainers):
                    # We hit this case when there is no existing recipe.
                    extra_recipe_maintainers = make_array({}, 'extra/recipe-maintainers', True)
                extra_recipe_maintainers.append(new_maintainer)
        if len(extra_recipe_maintainers):
            extra_recipe_maintainers[1:].sort()
            extra_recipe_maintainers.insert(0, "extra:\n  ")
        d['extra_recipe_maintainers'] = ''.join(extra_recipe_maintainers)
        d['patches'] = ''.join(patches)
        d['script_env'] = ''.join(script_env)
        d['build_number'] = build_number

        cached_path = None
        cran_layout = {'source': {'selector': '{others}',
                                  'dir': 'src/contrib/',
                                  'ext': '.tar.gz',
                                  # If we had platform filters we would change this to:
                                  # build_for_linux or is_github_url or is_tarfile
                                  'use_this': True},
                       'win-64': {'selector': 'win64',
                                  'dir': 'bin/windows/contrib/{}/'.format(use_binaries_ver),
                                  'ext': '.zip',
                                  'use_this': True if use_binaries_ver else False},
                       'osx-64': {'selector': 'osx',
                                  'dir': 'bin/macosx/el-capitan/contrib/{}/'.format(
                                      use_binaries_ver),
                                  'ext': '.tgz',
                                  'use_this': True if use_binaries_ver else False}}
        available = {}
        for archive_type, archive_details in iteritems(cran_layout):
            contrib_url = ''
            if archive_details['use_this']:
                if is_tarfile:
                    filename = basename(location)
                    contrib_url = relpath(location, dir_path)
                    contrib_url_rendered = package_url = contrib_url
                    sha256 = hashlib.sha256()
                    cached_path = location
                elif not is_github_url:
                    filename_rendered = '{}_{}{}'.format(
                        package, d['cran_version'], archive_details['ext'])
                    filename = '{}_{{{{ version }}}}'.format(package) + archive_details['ext']
                    contrib_url = '{{{{ cran_mirror }}}}/{}'.format(archive_details['dir'])
                    contrib_url_rendered = cran_url + '/{}'.format(archive_details['dir'])
                    package_url = contrib_url_rendered + filename_rendered
                    sha256 = hashlib.sha256()
                    print("Downloading {} from {}".format(archive_type, package_url))
                    # We may need to inspect the file later to determine which compilers are needed.
                    cached_path, _ = source.download_to_cache(
                        config.src_cache, '',
                        {'url': package_url, 'fn': archive_type + '-' + filename_rendered})
                available_details = {}
                available_details['selector'] = archive_details['selector']
                if cached_path:
                    sha256.update(open(cached_path, 'rb').read())
                    available_details['filename'] = filename
                    available_details['contrib_url'] = contrib_url
                    available_details['contrib_url_rendered'] = contrib_url_rendered
                    available_details['cranurl'] = package_url
                    available_details['hash_entry'] = 'sha256: {}'.format(sha256.hexdigest())
                    available_details['cached_path'] = cached_path
                # This is rubbish; d[] should be renamed global[] and should be
                #      merged into source and binaryN.
                if archive_type == 'source':
                    if is_github_url:
                        available_details['url_key'] = ''
                        available_details['fn_key'] = ''
                        available_details['git_url_key'] = 'git_url:'
                        available_details['git_tag_key'] = 'git_tag:'
                        hash_msg = '# You can add a hash for the file here, (md5, sha1 or sha256)'
                        available_details['hash_entry'] = hash_msg
                        available_details['filename'] = ''
                        available_details['cranurl'] = ''
                        available_details['git_url'] = url
                        available_details['git_tag'] = new_git_tag
                        available_details['archive_keys'] = ''
                    else:
                        available_details['url_key'] = 'url:'
                        available_details['fn_key'] = 'fn:'
                        available_details['git_url_key'] = ''
                        available_details['git_tag_key'] = ''
                        available_details['cranurl'] = ' ' + contrib_url + filename
                        available_details['git_url'] = ''
                        available_details['git_tag'] = ''
                available_details['patches'] = d['patches']
                available[archive_type] = available_details

        # Figure out the selectors according to what is available.
        _all = ['linux', 'win32', 'win64', 'osx']
        from_source = _all[:]
        binary_id = 1
        for archive_type, archive_details in iteritems(available):
            if archive_type != 'source':
                sel = archive_details['selector']
                from_source.remove(sel)
                binary_id += 1
            else:
                for k, v in iteritems(archive_details):
                    d[k] = v
        if from_source == _all:
            sel_src = ""
            sel_src_and_win = '  # [win]'
            sel_src_not_win = '  # [not win]'
        else:
            sel_src = '  # [' + ' or '.join(from_source) + ']'
            sel_src_and_win = '  # [' + ' or '.join(fs for fs in from_source if
                                                    fs.startswith('win')) + ']'
            sel_src_not_win = '  # [' + ' or '.join(fs for fs in from_source if not
                                                    fs.startswith('win')) + ']'

        d['sel_src'] = sel_src
        d['sel_src_and_win'] = sel_src_and_win
        d['sel_src_not_win'] = sel_src_not_win

        if 'source' in available:
            available_details = available['source']
            available_details['sel'] = sel_src
            filename = available_details['filename']
            if 'contrib_url' in available_details:
                contrib_url = available_details['contrib_url']
                if archive:
                    if is_tarfile:
                        available_details['cranurl'] = (INDENT + contrib_url)
                    else:
                        available_details['cranurl'] = (INDENT + contrib_url +
                            filename + sel_src + INDENT + contrib_url +
                            'Archive/{}/'.format(package) + filename + sel_src)
                else:
                    available_details['cranurl'] = ' ' + contrib_url + filename + sel_src
            if not is_github_url:
                available_details['archive_keys'] = '{fn_key} {filename} {sel}\n' \
                                                    '  {url_key}{sel}' \
                                                    '    {cranurl}\n' \
                                                    '  {hash_entry}{sel}'.format(
                    **available_details)

        d['cran_metadata'] = '\n'.join(['# %s' % l for l in
            cran_package['orig_lines'] if l])

        # Render the source and binaryN keys
        binary_id = 1
        for archive_type, archive_details in iteritems(available):
            if archive_type == 'source':
                d['source'] = SOURCE_META.format(**archive_details)
            else:
                archive_details['sel'] = '  # [' + archive_details['selector'] + ']'
                d['binary' + str(binary_id)] = BINARY_META.format(**archive_details)
                binary_id += 1

        # XXX: We should maybe normalize these
        d['license'] = cran_package.get("License", "None")
        d['license_family'] = guess_license_family(d['license'], allowed_license_families)

        if 'License_is_FOSS' in cran_package:
            d['license'] += ' (FOSS)'
        if cran_package.get('License_restricts_use') == 'yes':
            d['license'] += ' (Restricts use)'

        if "URL" in cran_package:
            d['home_comment'] = ''
            d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL'])
        else:
            # use CRAN page as homepage if nothing has been specified
            d['home_comment'] = ''
            if is_github_url:
                d['homeurl'] = ' {}'.format(location)
            else:
                d['homeurl'] = ' https://CRAN.R-project.org/package={}'.format(package)

        if not use_noarch_generic or cran_package.get("NeedsCompilation", 'no') == 'yes':
            d['noarch_generic'] = ''
        else:
            d['noarch_generic'] = 'noarch: generic'

        if 'Description' in cran_package:
            d['summary_comment'] = ''
            d['summary'] = ' ' + yaml_quote_string(cran_package['Description'])

        if "Suggests" in cran_package:
            d['suggests'] = "# Suggests: %s" % cran_package['Suggests']
        else:
            d['suggests'] = ''

        # Every package depends on at least R.
        # I'm not sure what the difference between depends and imports is.
        depends = [s.strip() for s in cran_package.get('Depends',
            '').split(',') if s.strip()]
        imports = [s.strip() for s in cran_package.get('Imports',
            '').split(',') if s.strip()]
        links = [s.strip() for s in cran_package.get("LinkingTo",
            '').split(',') if s.strip()]

        dep_dict = {}

        seen = set()
        for s in list(chain(imports, depends, links)):
            match = VERSION_DEPENDENCY_REGEX.match(s)
            if not match:
                sys.exit("Could not parse version from dependency of %s: %s" %
                    (package, s))
            name = match.group('name')
            if name in seen:
                continue
            seen.add(name)
            archs = match.group('archs')
            relop = match.group('relop') or ''
            ver = match.group('version') or ''
            ver = ver.replace('-', '_')
            # If there is a relop there should be a version
            assert not relop or ver

            if archs:
                sys.exit("Don't know how to handle archs from dependency of "
                "package %s: %s" % (package, s))

            dep_dict[name] = '{relop}{version}'.format(relop=relop, version=ver)

        if 'R' not in dep_dict:
            dep_dict['R'] = ''

        need_git = is_github_url
        if cran_package.get("NeedsCompilation", 'no') == 'yes':
            with tarfile.open(available['source']['cached_path']) as tf:
                need_f = any([f.name.lower().endswith(('.f', '.f90', '.f77')) for f in tf])
                # Fortran builds use CC to perform the link (they do not call the linker directly).
                need_c = True if need_f else \
                    any([f.name.lower().endswith('.c') for f in tf])
                need_cxx = any([f.name.lower().endswith(('.cxx', '.cpp', '.cc', '.c++'))
                                         for f in tf])
                need_autotools = any([f.name.lower().endswith('/configure') for f in tf])
                need_make = True if any((need_autotools, need_f, need_cxx, need_c)) else \
                    any([f.name.lower().endswith(('/makefile', '/makevars'))
                        for f in tf])
        else:
            need_c = need_cxx = need_f = need_autotools = need_make = False

        if 'Rcpp' in dep_dict or 'RcppArmadillo' in dep_dict:
            need_cxx = True

        if need_cxx:
            need_c = True

        for dep_type in ['build', 'host', 'run']:

            deps = []
            # Put non-R dependencies first.
            if dep_type == 'build':
                if need_c:
                    deps.append("{indent}{{{{ compiler('c') }}}}      {sel}".format(
                        indent=INDENT, sel=sel_src_not_win))
                if need_cxx:
                    deps.append("{indent}{{{{ compiler('cxx') }}}}    {sel}".format(
                        indent=INDENT, sel=sel_src_not_win))
                if need_f:
                    deps.append("{indent}{{{{ compiler('fortran') }}}}{sel}".format(
                        indent=INDENT, sel=sel_src_not_win))
                if use_rtools_win:
                    need_c = need_cxx = need_f = need_autotools = need_make = False
                    deps.append("{indent}{{{{native}}}}rtools      {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                    deps.append("{indent}{{{{native}}}}extsoft     {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                if need_c or need_cxx or need_f:
                    deps.append("{indent}{{{{native}}}}toolchain      {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                if need_autotools or need_make or need_git:
                    deps.append("{indent}{{{{posix}}}}filesystem      {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                if need_git:
                    deps.append("{indent}{{{{posix}}}}git".format(indent=INDENT))
                if need_autotools:
                    deps.append("{indent}{{{{posix}}}}sed             {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                    deps.append("{indent}{{{{posix}}}}grep            {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                    deps.append("{indent}{{{{posix}}}}autoconf        {sel}".format(
                        indent=INDENT, sel=sel_src))
                    deps.append("{indent}{{{{posix}}}}automake-wrapper{sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                    deps.append("{indent}{{{{posix}}}}automake        {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                    deps.append("{indent}{{{{posix}}}}pkg-config".format(indent=INDENT))
                if need_make:
                    deps.append("{indent}{{{{posix}}}}make            {sel}".format(
                        indent=INDENT, sel=sel_src))
            elif dep_type == 'run':
                if need_c or need_cxx or need_f:
                    deps.append("{indent}{{{{native}}}}gcc-libs       {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))

            if dep_type == 'host' or dep_type == 'run':
                for name in sorted(dep_dict):
                    if name in R_BASE_PACKAGE_NAMES:
                        continue
                    if name == 'R':
                        # Put R first
                        # Regarless of build or run, and whether this is a
                        # recommended package or not, it can only depend on
                        # r_interp since anything else can and will cause
                        # cycles in the dependency graph. The cran metadata
                        # lists all dependencies anyway, even those packages
                        # that are in the recommended group.
                        # We don't include any R version restrictions because
                        # conda-build always pins r-base and mro-base version.
                        deps.insert(0, '{indent}{r_name}'.format(indent=INDENT, r_name=r_interp))
                    else:
                        conda_name = 'r-' + name.lower()

                        if dep_dict[name]:
                            deps.append('{indent}{name} {version}'.format(name=conda_name,
                                version=dep_dict[name], indent=INDENT))
                        else:
                            deps.append('{indent}{name}'.format(name=conda_name,
                                indent=INDENT))
                        if recursive:
                            lower_name = name.lower()
                            if lower_name not in package_dicts:
                                inputs_dict = package_to_inputs_dict(output_dir, output_suffix,
                                                                     git_tag, lower_name)
                                assert lower_name == inputs_dict['pkg-name'], \
                                    "name %s != inputs_dict['pkg-name'] %s" % (
                                        name, inputs_dict['pkg-name'])
                                assert lower_name not in package_list
                                package_dicts.update({lower_name: {'inputs': inputs_dict}})
                                package_list.append(lower_name)

            d['%s_depends' % dep_type] = ''.join(deps)

    for package in package_dicts:
        d = package_dicts[package]
        dir_path = d['inputs']['new-location']
        if exists(dir_path) and not version_compare:
            if update_policy == 'error':
                raise RuntimeError("directory already exists "
                                   "(and --update-policy is 'error'): %s" % dir_path)
            elif update_policy == 'overwrite':
                rm_rf(dir_path)
        elif update_policy == 'skip-up-to-date' and up_to_date(cran_metadata,
                                                               d['inputs']['old-metadata']):
            continue
        elif update_policy == 'skip-existing' and d['inputs']['old-metadata']:
            continue

        # Normalize the metadata values
        d = {k: unicodedata.normalize("NFKD", text_type(v)).encode('ascii', 'ignore')
             .decode() for k, v in iteritems(d)}
        try:
            makedirs(join(dir_path))
        except:
            pass
        print("Writing recipe for %s" % package.lower())
        with open(join(dir_path, 'meta.yaml'), 'w') as f:
            f.write(clear_whitespace(CRAN_META.format(**d)))
        if not exists(join(dir_path, 'build.sh')) or update_policy == 'overwrite':
            with open(join(dir_path, 'build.sh'), 'w') as f:
                if from_source == all:
                    f.write(CRAN_BUILD_SH_SOURCE.format(**d))
                elif from_source == []:
                    f.write(CRAN_BUILD_SH_BINARY.format(**d))
                else:
                    tpbt = [target_platform_bash_test_by_sel[t] for t in from_source]
                    d['source_pf_bash'] = ' || '.join(['[[ $target_platform ' + s + ' ]]'
                                                  for s in tpbt])
                    f.write(CRAN_BUILD_SH_MIXED.format(**d))

        if not exists(join(dir_path, 'bld.bat')) or update_policy == 'overwrite':
            with open(join(dir_path, 'bld.bat'), 'w') as f:
                if len([fs for fs in from_source if fs.startswith('win')]) == 2:
                    f.write(CRAN_BLD_BAT_SOURCE.format(**d))
                else:
                    f.write(CRAN_BLD_BAT_MIXED.format(**d))