def render_travis(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('osx-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.travis.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the .travis.yml if it exists.
        forge_config["travis"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["travis"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)
        template = jinja_env.get_template('travis.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
Beispiel #2
0
def render_README(jinja_env, forge_config, forge_dir):
    # we only care about the first metadata object for sake of readme
    metas = conda_build.api.render(
        os.path.join(forge_dir, "recipe"),
        exclusive_config_file=forge_config["exclusive_config_file"],
        permit_undefined_jinja=True,
        finalize=False,
        bypass_env_check=True,
        trim_skip=False,
    )

    if "parent_recipe" in metas[0][0].meta["extra"]:
        package_name = metas[0][0].meta["extra"]["parent_recipe"]["name"]
    else:
        package_name = metas[0][0].name()

    template = jinja_env.get_template("README.md.tmpl")
    target_fname = os.path.join(forge_dir, "README.md")
    forge_config["noarch_python"] = all(meta[0].noarch for meta in metas)
    forge_config["package"] = metas[0][0]
    forge_config["package_name"] = package_name
    forge_config["outputs"] = sorted(
        list(OrderedDict((meta[0].name(), None) for meta in metas)))
    forge_config["maintainers"] = sorted(
        set(
            chain.from_iterable(
                meta[0].meta["extra"].get("recipe-maintainers", [])
                for meta in metas)))
    with write_file(target_fname) as fh:
        fh.write(template.render(**forge_config))
def render_travis(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('osx-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.travis.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the .travis.yml if it exists.
        forge_config["travis"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["travis"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)
        template = jinja_env.get_template('travis.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
Beispiel #4
0
def dump_subspace_config_files(metas, root_path, output_name):
    """With conda-build 3, it handles the build matrix.  We take what it spits out, and write a
    config.yaml file for each matrix entry that it spits out.  References to a specific file
    replace all of the old environment variables that specified a matrix entry."""

    # identify how to break up the complete set of used variables.  Anything considered
    #     "top-level" should be broken up into a separate CI job.

    configs, top_level_loop_vars = _collapse_subpackage_variants(metas)

    # get rid of the special object notation in the yaml file for objects that we dump
    yaml.add_representer(set, yaml.representer.SafeRepresenter.represent_list)
    yaml.add_representer(tuple, yaml.representer.SafeRepresenter.represent_list)

    result = []
    for config in configs:
        config_name = '{}_{}'.format(output_name, package_key(config, top_level_loop_vars,
                                                              metas[0].config.subdir))
        out_folder = os.path.join(root_path, '.ci_support')
        out_path = os.path.join(out_folder, config_name) + '.yaml'
        if not os.path.isdir(out_folder):
            os.makedirs(out_folder)

        with write_file(out_path) as f:
            yaml.dump(config, f, default_flow_style=False)
        target_platform = config.get("target_platform", [output_name])[0]
        result.append((config_name, target_platform))
    return sorted(result)
def render_appveyor(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    full_matrix = []
    for platform, arch in [['win-32', 'x86'], ['win-64', 'x64']]:
        with fudge_subdir(platform, build_config=meta_config(meta)):
            meta.parse_again()
            matrix = compute_build_matrix(meta, forge_config.get('matrix'))

            cases_not_skipped = []
            for case in matrix:
                pkgs, vars = split_case(case)
                with enable_vars(vars):
                    if not ResolvedDistribution(meta, pkgs).skip():
                        cases_not_skipped.append(vars + sorted(pkgs))
            if cases_not_skipped:
                arch_env = MatrixCaseEnvVar('TARGET_ARCH', arch)
                full_matrix.extend([arch_env] + list(case)
                                   for case in cases_not_skipped)

    matrix = sorted(full_matrix, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, 'appveyor.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the appveyor.yml if it exists.
        forge_config["appveyor"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["appveyor"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)
        template = jinja_env.get_template('appveyor.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
def render_appveyor(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    full_matrix = []
    for platform, arch in [['win-32', 'x86'], ['win-64', 'x64']]:
        with fudge_subdir(platform, build_config=meta_config(meta)):
            meta.parse_again()
            matrix = compute_build_matrix(meta, forge_config.get('matrix'))

            cases_not_skipped = []
            for case in matrix:
                pkgs, vars = split_case(case)
                with enable_vars(vars):
                    if not ResolvedDistribution(meta, pkgs).skip():
                        cases_not_skipped.append(vars + sorted(pkgs))
            if cases_not_skipped:
                arch_env = MatrixCaseEnvVar('TARGET_ARCH', arch)
                full_matrix.extend([arch_env] + list(case)
                                   for case in cases_not_skipped)

    matrix = sorted(full_matrix, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, 'appveyor.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the appveyor.yml if it exists.
        forge_config["appveyor"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["appveyor"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)

        # Specify AppVeyor Miniconda location.
        matrix, old_matrix = [], matrix
        for case in old_matrix:
            case = odict(case)

            # Set `root`'s `python` version.
            case["CONDA_INSTALL_LOCN"] = "C:\\\\Miniconda"
            if case.get("CONDA_PY") == "27":
                case["CONDA_INSTALL_LOCN"] += ""
            elif case.get("CONDA_PY") == "34":
                case["CONDA_INSTALL_LOCN"] += "3"
            elif case.get("CONDA_PY") == "35":
                case["CONDA_INSTALL_LOCN"] += "35"

            # Set architecture.
            if case.get("TARGET_ARCH") == "x86":
                case["CONDA_INSTALL_LOCN"] += ""
            if case.get("TARGET_ARCH") == "x64":
                case["CONDA_INSTALL_LOCN"] += "-x64"

            matrix.append(list(case.items()))
        del old_matrix

        forge_config = update_matrix(forge_config, matrix)
        template = jinja_env.get_template('appveyor.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
Beispiel #7
0
def _render_template_exe_files(forge_config, target_dir, jinja_env, template_files):
    for template_file in template_files:
        template = jinja_env.get_template(template_file)
        target_fname = os.path.join(target_dir, template_file[:-len('.tmpl')])
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
        # Fix permission of template shell files
        set_exe_file(target_fname, True)
def render_README(jinja_env, forge_config, forge_dir):
    # we only care about the first metadata object for sake of readme
    metas = conda_build.api.render(
        os.path.join(forge_dir, "recipe"),
        exclusive_config_file=forge_config["exclusive_config_file"],
        permit_undefined_jinja=True,
        finalize=False,
        bypass_env_check=True,
        trim_skip=False,
    )

    if "parent_recipe" in metas[0][0].meta["extra"]:
        package_name = metas[0][0].meta["extra"]["parent_recipe"]["name"]
    else:
        package_name = metas[0][0].name()

    ci_support_path = os.path.join(forge_dir, ".ci_support")
    variants = []
    if os.path.exists(ci_support_path):
        for filename in os.listdir(ci_support_path):
            if filename.endswith('.yaml'):
                variant_name, _ = os.path.splitext(filename)
                variants.append(variant_name)

    template = jinja_env.get_template("README.md.tmpl")
    target_fname = os.path.join(forge_dir, "README.md")
    forge_config["noarch_python"] = all(meta[0].noarch for meta in metas)
    forge_config["package"] = metas[0][0]
    forge_config["package_name"] = package_name
    forge_config["variants"] = sorted(variants)
    forge_config["outputs"] = sorted(
        list(OrderedDict((meta[0].name(), None) for meta in metas)))
    forge_config["maintainers"] = sorted(
        set(
            chain.from_iterable(
                meta[0].meta["extra"].get("recipe-maintainers", [])
                for meta in metas)))

    if forge_config['azure'].get('build_id') is None:
        # Try to retrieve the build_id from the interwebs
        try:
            import requests
            resp = requests.get(
                "https://dev.azure.com/{org}/{project_name}/_apis/build/definitions?name={repo}"
                .format(org=forge_config["azure"]["user_or_org"],
                        project_name=forge_config["azure"]["project_name"],
                        repo=forge_config["github"]["repo_name"]))
            build_def = resp.json()["value"][0]
            forge_config['azure']['build_id'] = build_def['id']
        except (IndexError, IOError):
            pass

    print("README")
    print(yaml.dump(forge_config))

    with write_file(target_fname) as fh:
        fh.write(template.render(**forge_config))
Beispiel #9
0
def main(forge_file_directory, no_check_uptodate, commit,
         exclusive_config_file):
    error_on_warn = False if no_check_uptodate else True
    index = conda_build.conda_interface.get_index(channel_urls=['conda-forge'])
    r = conda_build.conda_interface.Resolve(index)

    # Check that conda-smithy is up-to-date
    check_version_uptodate(r, "conda-smithy", __version__, error_on_warn)

    forge_dir = os.path.abspath(forge_file_directory)

    if exclusive_config_file is not None:
        exclusive_config_file = os.path.join(forge_dir, exclusive_config_file)
        if not os.path.exists(exclusive_config_file):
            raise RuntimeError("Given exclusive-config-file not found.")
        cf_pinning_ver = None
    else:
        exclusive_config_file, cf_pinning_ver = get_cfp_file_path(
            r, error_on_warn)

    config = _load_forge_config(forge_dir, exclusive_config_file)

    for each_ci in ["travis", "circle", "appveyor"]:
        if config[each_ci].pop("enabled", None):
            warnings.warn(
                "It is not allowed to set the `enabled` parameter for `%s`."
                " All CIs are enabled by default. To disable a CI, please"
                " add `skip: true` to the `build` section of `meta.yaml`"
                " and an appropriate selector so as to disable the build." \
                % each_ci
            )

    tmplt_dir = os.path.join(conda_forge_content, 'templates')
    # Load templates from the feedstock in preference to the smithy's templates.
    env = Environment(extensions=['jinja2.ext.do'],
                      loader=FileSystemLoader(
                          [os.path.join(forge_dir, 'templates'), tmplt_dir]))

    copy_feedstock_content(forge_dir)

    render_circle(env, config, forge_dir)
    render_travis(env, config, forge_dir)
    render_appveyor(env, config, forge_dir)
    render_azure(env, config, forge_dir)
    render_README(env, config, forge_dir)

    if os.path.isdir(os.path.join(forge_dir, '.ci_support')):
        with write_file(os.path.join(forge_dir, '.ci_support', 'README')) as f:
            f.write(
                "This file is automatically generated by conda-smithy.  To change "
                "any matrix elements, you should change conda-smithy's input "
                "conda_build_config.yaml and re-render the recipe, rather than editing "
                "these files directly.")

    commit_changes(forge_file_directory, commit, __version__, cf_pinning_ver,
                   conda_build_version)
Beispiel #10
0
def render_README(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    template = jinja_env.get_template('README.md.tmpl')
    target_fname = os.path.join(forge_dir, 'README.md')
    if meta.noarch:
        forge_config['noarch_python'] = True
    else:
        forge_config['noarch_python'] = False
    with write_file(target_fname) as fh:
        fh.write(template.render(**forge_config))
Beispiel #11
0
def render_README(jinja_env, forge_config, forge_dir):
    # we only care about the first metadata object for sake of readme
    metas = conda_build.api.render(os.path.join(forge_dir, 'recipe'),
                                  exclusive_config_file=forge_config['exclusive_config_file'],
                                  permit_undefined_jinja=True, finalize=False,
                                  bypass_env_check=True, trim_skip=False)
    template = jinja_env.get_template('README.md.tmpl')
    target_fname = os.path.join(forge_dir, 'README.md')
    forge_config['noarch_python'] = all(meta[0].noarch for meta in metas)
    forge_config['package'] = metas[0][0]
    forge_config['package_name'] = metas[0][0].meta['extra']['parent_recipe']['name']
    forge_config['outputs'] = sorted(list(OrderedDict((meta[0].name(), None) for meta in metas)))
    with write_file(target_fname) as fh:
        fh.write(template.render(**forge_config))
def render_README(jinja_env, forge_config, forge_dir):
    # we only care about the first metadata object for sake of readme
    meta = conda_build.api.render(
        os.path.join(forge_dir, 'recipe'),
        exclusive_config_file=forge_config['exclusive_config_file'],
        permit_undefined_jinja=True,
        finalize=False,
        bypass_env_check=True,
        trim_skip=False)[0][0]
    template = jinja_env.get_template('README.md.tmpl')
    target_fname = os.path.join(forge_dir, 'README.md')
    forge_config['noarch_python'] = meta.noarch
    forge_config['package'] = meta
    with write_file(target_fname) as fh:
        fh.write(template.render(**forge_config))
def dump_subspace_config_files(metas, root_path, platform, arch, upload,
                               forge_config):
    """With conda-build 3, it handles the build matrix.  We take what it spits out, and write a
    config.yaml file for each matrix entry that it spits out.  References to a specific file
    replace all of the old environment variables that specified a matrix entry."""

    # identify how to break up the complete set of used variables.  Anything considered
    #     "top-level" should be broken up into a separate CI job.

    configs, top_level_loop_vars = _collapse_subpackage_variants(metas)

    # get rid of the special object notation in the yaml file for objects that we dump
    yaml.add_representer(set, yaml.representer.SafeRepresenter.represent_list)
    yaml.add_representer(tuple,
                         yaml.representer.SafeRepresenter.represent_list)
    yaml.add_representer(OrderedDict, _yaml_represent_ordereddict)

    platform_arch = "{}-{}".format(platform, arch)
    if arch == "64":
        filename_arch = platform
    else:
        filename_arch = f"{platform}_{arch}"

    output_name = platform if arch == "64" else platform_arch

    result = []
    for config in configs:
        config_name = "{}_{}".format(
            filename_arch,
            package_key(config, top_level_loop_vars, metas[0].config.subdir),
        )
        out_folder = os.path.join(root_path, ".ci_support")
        out_path = os.path.join(out_folder, config_name) + ".yaml"
        if not os.path.isdir(out_folder):
            os.makedirs(out_folder)

        config = finalize_config(config, platform, forge_config)

        with write_file(out_path) as f:
            yaml.dump(config, f, default_flow_style=False)

        target_platform = config.get("target_platform", [platform_arch])[0]
        result.append((config_name, target_platform, upload, config))
    return sorted(result)
def render_circle(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('linux-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, 'circle.yml')
    matrix = prepare_matrix_for_env_vars(matrix)
    forge_config = update_matrix(forge_config, matrix)
    template = jinja_env.get_template('circle.yml.tmpl')
    with write_file(target_fname) as fh:
        fh.write(template.render(**forge_config))
Beispiel #15
0
def render_circle(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('linux-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, 'circle.yml')
    matrix = prepare_matrix_for_env_vars(matrix)
    forge_config = update_matrix(forge_config, matrix)
    template = jinja_env.get_template('circle.yml.tmpl')
    with write_file(target_fname) as fh:
        fh.write(template.render(**forge_config))
    def test_write_file(self):
        for tmp_dir, repo, pathfunc in parameterize():
            for filename in ["test.txt", "dir1/dir2/test.txt"]:
                filename = os.path.join(tmp_dir, filename)

                write_text = "text"

                with fio.write_file(pathfunc(filename)) as fh:
                    fh.write(write_text)
                if repo is not None:
                    repo.index.add([filename])

                read_text = ""
                with io.open(filename, "r", encoding="utf-8") as fh:
                    read_text = fh.read()

                self.assertEqual(write_text, read_text)

                if repo is not None:
                    blob = next(repo.index.iter_blobs(BlobFilter(filename)))[1]
                    read_text = blob.data_stream[3].read().decode("utf-8")

                    self.assertEqual(write_text, read_text)
    def test_write_file(self):
        for tmp_dir, repo, pathfunc in parameterize():
            for filename in ["test.txt", "dir1/dir2/test.txt"]:
                filename = os.path.join(tmp_dir, filename)

                write_text = "text"

                with fio.write_file(pathfunc(filename)) as fh:
                    fh.write(write_text)
                if repo is not None:
                    repo.index.add([filename])

                read_text = ""
                with io.open(filename, "r", encoding="utf-8") as fh:
                    read_text = fh.read()

                self.assertEqual(write_text, read_text)

                if repo is not None:
                    blob = next(repo.index.iter_blobs(BlobFilter(filename)))[1]
                    read_text = blob.data_stream[3].read().decode("utf-8")

                    self.assertEqual(write_text, read_text)
def render_run_docker_build(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('linux-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))
        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the run_docker_build.sh if it exists.
        forge_config["circle"]["enabled"] = False

        target_fnames = [
            os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh'),
            os.path.join(forge_dir, 'ci_support', 'checkout_merge_commit.sh'),
        ]
        for each_target_fname in target_fnames:
            remove_file(each_target_fname)
    else:
        forge_config["circle"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        # If there is a "yum_requirements.txt" file in the recipe, we honour it.
        yum_requirements_fpath = os.path.join(forge_dir, 'recipe',
                                              'yum_requirements.txt')
        if os.path.exists(yum_requirements_fpath):
            with open(yum_requirements_fpath) as fh:
                requirements = [line.strip() for line in fh
                                if line.strip() and not line.strip().startswith('#')]
            if not requirements:
                raise ValueError("No yum requirements enabled in the "
                                 "yum_requirements.txt, please remove the file "
                                 "or add some.")
            build_setup = textwrap.dedent("""\
                # Install the yum requirements defined canonically in the
                # "recipe/yum_requirements.txt" file. After updating that file,
                # run "conda smithy rerender" and this line be updated
                # automatically.
                yum install -y {}


            """.format(' '.join(requirements)))
            forge_config['build_setup'] = build_setup

        # TODO: Conda has a convenience for accessing nested yaml content.
        templates = forge_config.get('templates', {})
        template_name = templates.get('run_docker_build',
                                      'run_docker_build_matrix.tmpl')

        template = jinja_env.get_template(template_name)
        target_fname = os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))

        # Fix permissions.
        target_fnames = [
            os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh'),
            os.path.join(forge_dir, 'ci_support', 'checkout_merge_commit.sh'),
        ]
        for each_target_fname in target_fnames:
            mode = get_mode_file(each_target_fname)
            set_mode_file(
                each_target_fname,
                mode | stat.S_IXOTH | stat.S_IXGRP | stat.S_IXUSR
            )
Beispiel #19
0
def _render_ci_provider(
    provider_name,
    jinja_env,
    forge_config,
    forge_dir,
    platforms,
    archs,
    fast_finish_text,
    platform_target_path,
    platform_template_file,
    platform_specific_setup,
    keep_noarchs=None,
    extra_platform_files={},
    upload_packages=[],
):
    if keep_noarchs is None:
        keep_noarchs = [False] * len(platforms)

    metas_list_of_lists = []
    enable_platform = [False] * len(platforms)
    for i, (platform, arch,
            keep_noarch) in enumerate(zip(platforms, archs, keep_noarchs)):
        metas = conda_build.api.render(
            os.path.join(forge_dir, "recipe"),
            exclusive_config_file=forge_config["exclusive_config_file"],
            platform=platform,
            arch=arch,
            permit_undefined_jinja=True,
            finalize=False,
            bypass_env_check=True,
            channel_urls=forge_config.get("channels", {}).get("sources", []),
        )
        # render returns some download & reparsing info that we don't care about
        metas = [m for m, _, _ in metas]

        if not keep_noarch:
            to_delete = []
            for idx, meta in enumerate(metas):
                if meta.noarch:
                    # do not build noarch, including noarch: python, packages on Travis CI.
                    to_delete.append(idx)
            for idx in reversed(to_delete):
                del metas[idx]

        for meta in metas:
            if not meta.skip():
                enable_platform[i] = True
        metas_list_of_lists.append(metas)

    if os.path.isdir(os.path.join(forge_dir, ".ci_support")):
        configs = glob.glob(
            os.path.join(forge_dir, ".ci_support",
                         "{}_*".format(provider_name)))
        for config in configs:
            remove_file(config)

        for platform in platforms:
            configs = glob.glob(
                os.path.join(forge_dir, ".ci_support",
                             "{}_*".format(platform)))
            for config in configs:
                remove_file(config)

    if not any(enable_platform):
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the run_docker_build.sh if it exists.
        forge_config[provider_name]["enabled"] = False

        target_fnames = [platform_target_path]
        if extra_platform_files:
            for val in extra_platform_files.values():
                target_fnames.extend(val)
        for each_target_fname in target_fnames:
            remove_file(each_target_fname)
    else:
        forge_config[provider_name]["enabled"] = True
        fancy_name = {
            "linux": "Linux",
            "osx": "OSX",
            "win": "Windows",
            "linux_aarch64": "aarch64",
        }
        fancy_platforms = []
        unfancy_platforms = set()

        configs = []
        for metas, platform, arch, enable, upload in zip(
                metas_list_of_lists,
                platforms,
                archs,
                enable_platform,
                upload_packages,
        ):
            if enable:
                configs.extend(
                    dump_subspace_config_files(metas, forge_dir, platform,
                                               arch, upload))

                plat_arch = (platform if arch == "64" else "{}_{}".format(
                    platform, arch))
                forge_config[plat_arch]["enabled"] = True

                fancy_platforms.append(fancy_name[platform])
                unfancy_platforms.add(plat_arch)
            elif platform in extra_platform_files:
                for each_target_fname in extra_platform_files[platform]:
                    remove_file(each_target_fname)

        for key in extra_platform_files.keys():
            if key != "common" and key not in platforms:
                for each_target_fname in extra_platform_files[key]:
                    remove_file(each_target_fname)

        forge_config[provider_name]["platforms"] = ",".join(fancy_platforms)
        forge_config[provider_name]["all_platforms"] = list(unfancy_platforms)

        forge_config["configs"] = configs

        forge_config["fast_finish"] = _get_fast_finish_script(
            provider_name,
            forge_dir=forge_dir,
            forge_config=forge_config,
            fast_finish_text=fast_finish_text,
        )

        # If the recipe supplies its own upload_or_check_non_existence.py upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, "recipe",
                                    "upload_or_check_non_existence.py")
        if os.path.exists(upload_fpath):
            if provider_name == "circle":
                forge_config[
                    "upload_script"] = "/home/conda/recipe_root/upload_or_check_non_existence.py"
            elif provider_name == "travis":
                forge_config[
                    "upload_script"] = "{}/upload_or_check_non_existence.py".format(
                        forge_config["recipe_dir"])
            else:
                forge_config[
                    "upload_script"] = "{}\\upload_or_check_non_existence.py".format(
                        forge_config["recipe_dir"])
        else:
            forge_config["upload_script"] = "upload_or_check_non_existence"

        # hook for extending with whatever platform specific junk we need.
        #     Function passed in as argument
        for platform, enable in zip(platforms, enable_platform):
            if enable:
                platform_specific_setup(
                    jinja_env=jinja_env,
                    forge_dir=forge_dir,
                    forge_config=forge_config,
                    platform=platform,
                )

        template = jinja_env.get_template(platform_template_file)
        with write_file(platform_target_path) as fh:
            fh.write(template.render(**forge_config))

    # circleci needs a placeholder file of sorts - always write the output, even if no metas
    if provider_name == "circle":
        template = jinja_env.get_template(platform_template_file)
        with write_file(platform_target_path) as fh:
            fh.write(template.render(**forge_config))
    # TODO: azure-pipelines might need the same as circle
    return forge_config
Beispiel #20
0
def render_run_docker_build(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('linux-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))
        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the run_docker_build.sh if it exists.
        forge_config["circle"]["enabled"] = False

        target_fnames = [
            os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh'),
            os.path.join(forge_dir, 'ci_support', 'checkout_merge_commit.sh'),
        ]
        for each_target_fname in target_fnames:
            remove_file(each_target_fname)
    else:
        forge_config["circle"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        build_setup = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'run_conda_forge_build_setup_linux')
        if os.path.exists(cfbs_fpath):
            build_setup += textwrap.dedent("""\
                # Overriding global conda-forge-build-setup with local copy.
                source /recipe_root/run_conda_forge_build_setup_linux

            """)
        else:
            build_setup += textwrap.dedent("""\
                source run_conda_forge_build_setup

            """)

        # If there is a "yum_requirements.txt" file in the recipe, we honour it.
        yum_requirements_fpath = os.path.join(forge_dir, 'recipe',
                                              'yum_requirements.txt')
        if os.path.exists(yum_requirements_fpath):
            with open(yum_requirements_fpath) as fh:
                requirements = [line.strip() for line in fh
                                if line.strip() and not line.strip().startswith('#')]
            if not requirements:
                raise ValueError("No yum requirements enabled in the "
                                 "yum_requirements.txt, please remove the file "
                                 "or add some.")
            build_setup += textwrap.dedent("""\

                # Install the yum requirements defined canonically in the
                # "recipe/yum_requirements.txt" file. After updating that file,
                # run "conda smithy rerender" and this line be updated
                # automatically.
                yum install -y {}


            """.format(' '.join(requirements)))

        forge_config['build_setup'] = build_setup

        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "/recipe_root/upload_or_check_non_existence.py"
            )
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        # TODO: Conda has a convenience for accessing nested yaml content.
        templates = forge_config.get('templates', {})
        template_name = templates.get('run_docker_build',
                                      'run_docker_build_matrix.tmpl')

        template = jinja_env.get_template(template_name)
        target_fname = os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))

        # Fix permissions.
        target_fnames = [
            os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh'),
            os.path.join(forge_dir, 'ci_support', 'checkout_merge_commit.sh'),
        ]
        for each_target_fname in target_fnames:
            set_exe_file(each_target_fname, True)
Beispiel #21
0
def render_README(jinja_env, forge_config, forge_dir):
    template = jinja_env.get_template('README.md.tmpl')
    target_fname = os.path.join(forge_dir, 'README.md')
    with write_file(target_fname) as fh:
        fh.write(template.render(**forge_config))
Beispiel #22
0
def _circle_specific_setup(jinja_env, forge_config, forge_dir, platform):
    # If the recipe supplies its own run_conda_forge_build_setup script_linux,
    # we use it instead of the global one.
    if platform == 'linux':
        cfbs_fpath = os.path.join(forge_dir, 'recipe', 'run_conda_forge_build_setup_linux')
    else:
        cfbs_fpath = os.path.join(forge_dir, 'recipe', 'run_conda_forge_build_setup_osx')

    build_setup = ""
    if os.path.exists(cfbs_fpath):
        if platform == 'linux':
            build_setup += textwrap.dedent("""\
                # Overriding global run_conda_forge_build_setup_linux with local copy.
                source /home/conda/recipe_root/run_conda_forge_build_setup_linux

            """)
        else:
            build_setup += textwrap.dedent("""\
                # Overriding global run_conda_forge_build_setup_osx with local copy.
                source {recipe_dir}/run_conda_forge_build_setup_osx
            """.format(recipe_dir=forge_config["recipe_dir"]))
    else:
        build_setup += textwrap.dedent("""\
            source run_conda_forge_build_setup

        """)

    if platform == 'linux':
        # If there is a "yum_requirements.txt" file in the recipe, we honour it.
        yum_requirements_fpath = os.path.join(forge_dir, 'recipe',
                                              'yum_requirements.txt')
        if os.path.exists(yum_requirements_fpath):
            with open(yum_requirements_fpath) as fh:
                requirements = [line.strip() for line in fh
                                if line.strip() and not line.strip().startswith('#')]
            if not requirements:
                raise ValueError("No yum requirements enabled in the "
                                 "yum_requirements.txt, please remove the file "
                                 "or add some.")
            build_setup += textwrap.dedent("""\

                # Install the yum requirements defined canonically in the
                # "recipe/yum_requirements.txt" file. After updating that file,
                # run "conda smithy rerender" and this line be updated
                # automatically.
                /usr/bin/sudo -n yum install -y {}


            """.format(' '.join(requirements)))

    forge_config['build_setup'] = build_setup

    if platform == 'linux':
        run_file_name = 'run_docker_build'
    else:
        run_file_name = 'run_osx_build'

    # TODO: Conda has a convenience for accessing nested yaml content.
    template = jinja_env.get_template('{}.tmpl'.format(run_file_name))
    target_fname = os.path.join(forge_dir, '.circleci', '{}.sh'.format(run_file_name))
    with write_file(target_fname) as fh:
        fh.write(template.render(**forge_config))

    template_name = 'fast_finish_ci_pr_build.sh.tmpl'
    template = jinja_env.get_template(template_name)
    target_fname = os.path.join(forge_dir, '.circleci', 'fast_finish_ci_pr_build.sh')
    with write_file(target_fname) as fh:
        fh.write(template.render(**forge_config))

    # Fix permissions.
    target_fnames = [
        os.path.join(forge_dir, '.circleci', 'checkout_merge_commit.sh'),
        os.path.join(forge_dir, '.circleci', 'fast_finish_ci_pr_build.sh'),
        os.path.join(forge_dir, '.circleci', '{}.sh'.format(run_file_name)),
    ]
    for each_target_fname in target_fnames:
        set_exe_file(each_target_fname, True)
Beispiel #23
0
        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "{recipe_dir}\\upload_or_check_non_existence".format(
                    recipe_dir=forge_config["recipe_dir"]
                )
            )
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        template = jinja_env.get_template('appveyor.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))


def update_matrix(forge_config, new_matrix):
    """
    Return a new config with the build matrix updated.

    """
    forge_config = forge_config.copy()
    forge_config['matrix'] = new_matrix
    return forge_config


def prepare_matrix_for_env_vars(matrix):
    """
def render_circle(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('linux-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(
            meta,
            forge_config.get('matrix'),
            forge_config.get('channels', {}).get('sources', tuple())
        )
        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the run_docker_build.sh if it exists.
        forge_config["circle"]["enabled"] = False

        target_fnames = [
            os.path.join(forge_dir, 'ci_support', 'checkout_merge_commit.sh'),
            os.path.join(forge_dir, 'ci_support', 'fast_finish_ci_pr_build.sh'),
            os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh'),
        ]
        for each_target_fname in target_fnames:
            remove_file(each_target_fname)
    else:
        forge_config["circle"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        fast_finish = textwrap.dedent("""\
            {get_fast_finish_script} | \\
                 python - -v --ci "circle" "${{CIRCLE_PROJECT_USERNAME}}/${{CIRCLE_PROJECT_REPONAME}}" "${{CIRCLE_BUILD_NUM}}" "${{CIRCLE_PR_NUMBER}}"
        """)
        get_fast_finish_script = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'ff_ci_pr_build.py')
        if os.path.exists(cfbs_fpath):
            get_fast_finish_script += "cat {recipe_dir}/ff_ci_pr_build.py".format(recipe_dir=forge_config["recipe_dir"])
        else:
            get_fast_finish_script += "curl https://raw.githubusercontent.com/conda-forge/conda-forge-build-setup-feedstock/master/recipe/ff_ci_pr_build.py"

        fast_finish = fast_finish.format(
            get_fast_finish_script=get_fast_finish_script
        )

        fast_finish = fast_finish.strip()

        forge_config['fast_finish'] = fast_finish

        build_setup = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'run_conda_forge_build_setup_linux')
        if os.path.exists(cfbs_fpath):
            build_setup += textwrap.dedent("""\
                # Overriding global conda-forge-build-setup with local copy.
                source /recipe_root/run_conda_forge_build_setup_linux

            """)
        else:
            build_setup += textwrap.dedent("""\
                source run_conda_forge_build_setup

            """)

        # If there is a "yum_requirements.txt" file in the recipe, we honour it.
        yum_requirements_fpath = os.path.join(forge_dir, 'recipe',
                                              'yum_requirements.txt')
        if os.path.exists(yum_requirements_fpath):
            with open(yum_requirements_fpath) as fh:
                requirements = [line.strip() for line in fh
                                if line.strip() and not line.strip().startswith('#')]
            if not requirements:
                raise ValueError("No yum requirements enabled in the "
                                 "yum_requirements.txt, please remove the file "
                                 "or add some.")
            build_setup += textwrap.dedent("""\

                # Install the yum requirements defined canonically in the
                # "recipe/yum_requirements.txt" file. After updating that file,
                # run "conda smithy rerender" and this line be updated
                # automatically.
                /usr/bin/sudo -n yum install -y {}


            """.format(' '.join(requirements)))

        forge_config['build_setup'] = build_setup

        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "/recipe_root/upload_or_check_non_existence.py"
            )
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        # TODO: Conda has a convenience for accessing nested yaml content.
        templates = forge_config.get('templates', {})
        template_name = templates.get('run_docker_build',
                                      'run_docker_build_matrix.tmpl')
        template = jinja_env.get_template(template_name)
        target_fname = os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))

        template_name = 'fast_finish_ci_pr_build.sh.tmpl'
        template = jinja_env.get_template(template_name)
        target_fname = os.path.join(forge_dir, 'ci_support', 'fast_finish_ci_pr_build.sh')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))

        # Fix permissions.
        target_fnames = [
            os.path.join(forge_dir, 'ci_support', 'checkout_merge_commit.sh'),
            os.path.join(forge_dir, 'ci_support', 'fast_finish_ci_pr_build.sh'),
            os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh'),
        ]
        for each_target_fname in target_fnames:
            set_exe_file(each_target_fname, True)

    target_fname = os.path.join(forge_dir, 'circle.yml')
    template = jinja_env.get_template('circle.yml.tmpl')
    with write_file(target_fname) as fh:
        fh.write(template.render(**forge_config))
def render_appveyor(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    full_matrix = []
    for platform, arch in [['win-32', 'x86'], ['win-64', 'x64']]:
        with fudge_subdir(platform, build_config=meta_config(meta)):
            meta.parse_again()
            matrix = compute_build_matrix(
                meta,
                forge_config.get('matrix'),
                forge_config.get('channels', {}).get('sources', tuple())
            )

            cases_not_skipped = []
            for case in matrix:
                pkgs, vars = split_case(case)
                with enable_vars(vars):
                    if not ResolvedDistribution(meta, pkgs).skip():
                        cases_not_skipped.append(vars + sorted(pkgs))
            if cases_not_skipped:
                arch_env = MatrixCaseEnvVar('TARGET_ARCH', arch)
                full_matrix.extend([arch_env] + list(case)
                                   for case in cases_not_skipped)
    matrix = sorted(full_matrix, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, 'appveyor.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the appveyor.yml if it exists.
        forge_config["appveyor"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["appveyor"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)

        # Specify AppVeyor Miniconda location.
        matrix, old_matrix = [], matrix
        for case in old_matrix:
            case = odict(case)

            # Use Python 2.7 as a fallback when no Python version is set.
            case["CONDA_PY"] = case.get("CONDA_PY", "27")

            # Set `root`'s `python` version.
            case["CONDA_INSTALL_LOCN"] = "C:\\\\Miniconda"
            if case.get("CONDA_PY") == "27":
                case["CONDA_INSTALL_LOCN"] += ""
            elif case.get("CONDA_PY") == "35":
                case["CONDA_INSTALL_LOCN"] += "35"
            elif case.get("CONDA_PY") == "36":
                case["CONDA_INSTALL_LOCN"] += "36"

            # Set architecture.
            if case.get("TARGET_ARCH") == "x86":
                case["CONDA_INSTALL_LOCN"] += ""
            if case.get("TARGET_ARCH") == "x64":
                case["CONDA_INSTALL_LOCN"] += "-x64"

            matrix.append(list(case.items()))
        del old_matrix

        forge_config = update_matrix(forge_config, matrix)

        get_fast_finish_script = ""
        fast_finish_script = ""
        fast_finish = textwrap.dedent("""\
            {get_fast_finish_script}
            {fast_finish_script} -v --ci "appveyor" "%APPVEYOR_ACCOUNT_NAME%/%APPVEYOR_PROJECT_SLUG%" "%APPVEYOR_BUILD_NUMBER%" "%APPVEYOR_PULL_REQUEST_NUMBER%"
        """)

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'ff_ci_pr_build.py')
        if os.path.exists(cfbs_fpath):
            fast_finish_script += "{recipe_dir}\\ff_ci_pr_build".format(recipe_dir=forge_config["recipe_dir"])
        else:
            get_fast_finish_script += '''powershell -Command "(New-Object Net.WebClient).DownloadFile('https://raw.githubusercontent.com/conda-forge/conda-forge-build-setup-feedstock/master/recipe/ff_ci_pr_build.py', 'ff_ci_pr_build.py')"'''
            fast_finish_script += "ff_ci_pr_build"
            fast_finish += "del {fast_finish_script}.py"

        fast_finish = fast_finish.format(
            get_fast_finish_script=get_fast_finish_script,
            fast_finish_script=fast_finish_script,
        )

        fast_finish = fast_finish.strip()
        fast_finish = fast_finish.replace("\n", "\n        ")

        forge_config['fast_finish'] = fast_finish

        build_setup = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'run_conda_forge_build_setup_osx')
        if os.path.exists(cfbs_fpath):
            build_setup += textwrap.dedent("""\
                # Overriding global conda-forge-build-setup with local copy.
                {recipe_dir}\\run_conda_forge_build_setup_win
            """.format(recipe_dir=forge_config["recipe_dir"]))
        else:
            build_setup += textwrap.dedent("""\

                run_conda_forge_build_setup
            """)

        build_setup = build_setup.rstrip()
        build_setup = build_setup.replace("\n", "\n    - cmd: ")
        build_setup = build_setup.lstrip()

        forge_config['build_setup'] = build_setup

        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "{recipe_dir}\\upload_or_check_non_existence".format(
                    recipe_dir=forge_config["recipe_dir"]
                )
            )
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        template = jinja_env.get_template('appveyor.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
def render_README(jinja_env, forge_config, forge_dir):
    template = jinja_env.get_template('README.md.tmpl')
    target_fname = os.path.join(forge_dir, 'README.md')
    with write_file(target_fname) as fh:
        fh.write(template.render(**forge_config))
def _render_ci_provider(provider_name,
                        jinja_env,
                        forge_config,
                        forge_dir,
                        platform,
                        arch,
                        fast_finish_text,
                        platform_target_path,
                        platform_template_file,
                        platform_specific_setup,
                        keep_noarch=False,
                        extra_platform_files=None):
    metas = conda_build.api.render(
        os.path.join(forge_dir, 'recipe'),
        exclusive_config_file=forge_config['exclusive_config_file'],
        platform=platform,
        arch=arch,
        permit_undefined_jinja=True,
        finalize=False,
        bypass_env_check=True,
        channel_urls=forge_config.get('channels', {}).get('sources', []))
    # render returns some download & reparsing info that we don't care about
    metas = [m for m, _, _ in metas]

    if not keep_noarch:
        to_delete = []
        for idx, meta in enumerate(metas):
            if meta.noarch:
                # do not build noarch, including noarch: python, packages on Travis CI.
                to_delete.append(idx)
        for idx in reversed(to_delete):
            del metas[idx]

    if os.path.isdir(os.path.join(forge_dir, '.ci_support')):
        configs = glob.glob(
            os.path.join(forge_dir, '.ci_support',
                         '{}_*'.format(provider_name)))
        for config in configs:
            remove_file(config)

    if not metas or all(m.skip() for m in metas):
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the run_docker_build.sh if it exists.
        forge_config[provider_name]["enabled"] = False

        extra_platform_files = [] if not extra_platform_files else extra_platform_files
        target_fnames = [platform_target_path] + extra_platform_files
        for each_target_fname in target_fnames:
            remove_file(each_target_fname)
    else:
        forge_config[provider_name]["enabled"] = True

        forge_config['configs'] = dump_subspace_config_files(
            metas, forge_dir, provider_name)

        forge_config['fast_finish'] = _get_fast_finish_script(
            provider_name,
            forge_dir=forge_dir,
            forge_config=forge_config,
            fast_finish_text=fast_finish_text)

        # If the recipe supplies its own upload_or_check_non_existence.py upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            if provider_name == "circle":
                forge_config['upload_script'] = (
                    "/home/conda/recipe_root/upload_or_check_non_existence.py")
            elif provider_name == "travis":
                forge_config['upload_script'] = (
                    "{}/upload_or_check_non_existence.py".format(
                        forge_config["recipe_dir"]))
            else:
                forge_config['upload_script'] = (
                    "{}\\upload_or_check_non_existence.py".format(
                        forge_config["recipe_dir"]))
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        # hook for extending with whatever platform specific junk we need.
        #     Function passed in as argument
        platform_specific_setup(jinja_env=jinja_env,
                                forge_dir=forge_dir,
                                forge_config=forge_config)

        template = jinja_env.get_template(platform_template_file)
        with write_file(platform_target_path) as fh:
            fh.write(template.render(**forge_config))

    # circleci needs a placeholder file of sorts - always write the output, even if no metas
    if provider_name == 'circle':
        template = jinja_env.get_template(platform_template_file)
        with write_file(platform_target_path) as fh:
            fh.write(template.render(**forge_config))
    return forge_config
Beispiel #28
0
def _render_ci_provider(provider_name, jinja_env, forge_config, forge_dir, platforms, archs,
                        fast_finish_text, platform_target_path, platform_template_file,
                        platform_specific_setup, keep_noarchs=None, extra_platform_files={}):

    if keep_noarchs is None:
        keep_noarchs = [False]*len(platforms)

    metas_list_of_lists = []
    enable_platform = [False]*len(platforms)
    for i, (platform, arch, keep_noarch) in enumerate(zip(platforms, archs, keep_noarchs)):
        metas = conda_build.api.render(os.path.join(forge_dir, 'recipe'),
                                   exclusive_config_file=forge_config['exclusive_config_file'],
                                   platform=platform, arch=arch,
                                   permit_undefined_jinja=True, finalize=False,
                                   bypass_env_check=True,
                                   channel_urls=forge_config.get('channels', {}).get('sources', []))
        # render returns some download & reparsing info that we don't care about
        metas = [m for m, _, _ in metas]

        if not keep_noarch:
            to_delete = []
            for idx, meta in enumerate(metas):
                if meta.noarch:
                    # do not build noarch, including noarch: python, packages on Travis CI.
                    to_delete.append(idx)
            for idx in reversed(to_delete):
                del metas[idx]

        for meta in metas:
            if not meta.skip():
                enable_platform[i] = True
        metas_list_of_lists.append(metas)

    if os.path.isdir(os.path.join(forge_dir, '.ci_support')):
        configs = glob.glob(os.path.join(forge_dir, '.ci_support',
                                         '{}_*'.format(provider_name)))
        for config in configs:
            remove_file(config)

        for platform in platforms:
            configs = glob.glob(os.path.join(forge_dir, '.ci_support',
                                             '{}_*'.format(platform)))
            for config in configs:
                remove_file(config)

    if not any(enable_platform):
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the run_docker_build.sh if it exists.
        forge_config[provider_name]["enabled"] = False

        target_fnames = [platform_target_path]
        if extra_platform_files:
            for val in extra_platform_files.values():
                target_fnames.extend(val)
        for each_target_fname in target_fnames:
            remove_file(each_target_fname)
    else:
        forge_config[provider_name]["enabled"] = True
        fancy_name = {'linux': 'Linux', 'osx': 'OSX', 'win': 'Windows'}
        fancy_platforms = []

        configs = []
        for metas, platform, enable in zip(metas_list_of_lists, platforms, enable_platform):
            if enable:
                configs.extend(dump_subspace_config_files(metas, forge_dir, platform))
                forge_config[platform]["enabled"] = True
                fancy_platforms.append(fancy_name[platform])
            elif platform in extra_platform_files:
                    for each_target_fname in extra_platform_files[platform]:
                        remove_file(each_target_fname)

        for key in extra_platform_files.keys():
            if key != 'common' and key not in platforms:
                for each_target_fname in extra_platform_files[key]:
                    remove_file(each_target_fname)

        forge_config[provider_name]["platforms"] = ','.join(fancy_platforms)

        forge_config['configs'] = configs

        forge_config['fast_finish'] = _get_fast_finish_script(provider_name,
                                                              forge_dir=forge_dir,
                                                              forge_config=forge_config,
                                                              fast_finish_text=fast_finish_text)

        # If the recipe supplies its own upload_or_check_non_existence.py upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            if provider_name == "circle":
                forge_config['upload_script'] = (
                    "/home/conda/recipe_root/upload_or_check_non_existence.py"
                )
            elif provider_name == "travis":
                forge_config['upload_script'] = (
                    "{}/upload_or_check_non_existence.py".format(forge_config["recipe_dir"])
                )
            else:
                forge_config['upload_script'] = (
                    "{}\\upload_or_check_non_existence.py".format(forge_config["recipe_dir"])
                )
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        # hook for extending with whatever platform specific junk we need.
        #     Function passed in as argument
        for platform, enable in zip(platforms, enable_platform):
            if enable:
                platform_specific_setup(jinja_env=jinja_env, forge_dir=forge_dir,
                                    forge_config=forge_config, platform=platform)

        template = jinja_env.get_template(platform_template_file)
        with write_file(platform_target_path) as fh:
            fh.write(template.render(**forge_config))

    # circleci needs a placeholder file of sorts - always write the output, even if no metas
    if provider_name == 'circle':
        template = jinja_env.get_template(platform_template_file)
        with write_file(platform_target_path) as fh:
            fh.write(template.render(**forge_config))
    return forge_config
Beispiel #29
0
def render_travis(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('osx-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.travis.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the .travis.yml if it exists.
        forge_config["travis"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["travis"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        build_setup = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'run_conda_forge_build_setup_osx')
        if os.path.exists(cfbs_fpath):
            build_setup += textwrap.dedent("""\
                # Overriding global conda-forge-build-setup with local copy.
                source {recipe_dir}/run_conda_forge_build_setup_osx
            """.format(recipe_dir=forge_config["recipe_dir"]))
        else:
            build_setup += textwrap.dedent("""\
                source run_conda_forge_build_setup
            """)

        build_setup = build_setup.strip()
        build_setup = build_setup.replace("\n", "\n      ")

        forge_config['build_setup'] = build_setup

        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "{recipe_dir}/upload_or_check_non_existence.py".format(
                    recipe_dir=forge_config["recipe_dir"]
                )
            )
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        template = jinja_env.get_template('travis.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
def render_travis(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('osx-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(
            meta,
            forge_config.get('matrix'),
            forge_config.get('channels', {}).get('sources', tuple())
        )

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.travis.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the .travis.yml if it exists.
        forge_config["travis"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["travis"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        fast_finish = textwrap.dedent("""\
            ({get_fast_finish_script} | \\
                python - -v --ci "travis" "${{TRAVIS_REPO_SLUG}}" "${{TRAVIS_BUILD_NUMBER}}" "${{TRAVIS_PULL_REQUEST}}") || exit 1
        """)
        get_fast_finish_script = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'ff_ci_pr_build.py')
        if os.path.exists(cfbs_fpath):
            get_fast_finish_script += "cat {recipe_dir}/ff_ci_pr_build.py".format(recipe_dir=forge_config["recipe_dir"])
        else:
            get_fast_finish_script += "curl https://raw.githubusercontent.com/conda-forge/conda-forge-build-setup-feedstock/master/recipe/ff_ci_pr_build.py"

        fast_finish = fast_finish.format(
            get_fast_finish_script=get_fast_finish_script
        )

        fast_finish = fast_finish.strip()
        fast_finish = fast_finish.replace("\n", "\n      ")

        forge_config['fast_finish'] = fast_finish

        build_setup = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'run_conda_forge_build_setup_osx')
        if os.path.exists(cfbs_fpath):
            build_setup += textwrap.dedent("""\
                # Overriding global conda-forge-build-setup with local copy.
                source {recipe_dir}/run_conda_forge_build_setup_osx
            """.format(recipe_dir=forge_config["recipe_dir"]))
        else:
            build_setup += textwrap.dedent("""\
                source run_conda_forge_build_setup
            """)

        build_setup = build_setup.strip()
        build_setup = build_setup.replace("\n", "\n      ")

        forge_config['build_setup'] = build_setup

        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "{recipe_dir}/upload_or_check_non_existence.py".format(
                    recipe_dir=forge_config["recipe_dir"]
                )
            )
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        template = jinja_env.get_template('travis.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
Beispiel #31
0
def render_travis(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('osx-64', build_config=meta_config(meta)):
        meta.parse_again()
        if meta.noarch:
            # do not build noarch, including noarch: python, packages on Travis CI.
            matrix = []
        else:
            matrix = compute_build_matrix(
                meta, forge_config.get('matrix'),
                forge_config.get('channels', {}).get('sources', tuple()))

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.travis.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the .travis.yml if it exists.
        forge_config["travis"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["travis"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        fast_finish = textwrap.dedent("""\
            ({get_fast_finish_script} | \\
                python - -v --ci "travis" "${{TRAVIS_REPO_SLUG}}" "${{TRAVIS_BUILD_NUMBER}}" "${{TRAVIS_PULL_REQUEST}}") || exit 1
        """)
        get_fast_finish_script = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe', 'ff_ci_pr_build.py')
        if os.path.exists(cfbs_fpath):
            get_fast_finish_script += "cat {recipe_dir}/ff_ci_pr_build.py".format(
                recipe_dir=forge_config["recipe_dir"])
        else:
            get_fast_finish_script += "curl https://raw.githubusercontent.com/conda-forge/conda-forge-build-setup-feedstock/master/recipe/ff_ci_pr_build.py"

        fast_finish = fast_finish.format(
            get_fast_finish_script=get_fast_finish_script)

        fast_finish = fast_finish.strip()
        fast_finish = fast_finish.replace("\n", "\n      ")

        forge_config['fast_finish'] = fast_finish

        build_setup = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'run_conda_forge_build_setup_osx')
        if os.path.exists(cfbs_fpath):
            build_setup += textwrap.dedent("""\
                # Overriding global conda-forge-build-setup with local copy.
                source {recipe_dir}/run_conda_forge_build_setup_osx
            """.format(recipe_dir=forge_config["recipe_dir"]))
        else:
            build_setup += textwrap.dedent("""\
                source run_conda_forge_build_setup
            """)

        build_setup = build_setup.strip()
        build_setup = build_setup.replace("\n", "\n      ")

        forge_config['build_setup'] = build_setup

        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "{recipe_dir}/upload_or_check_non_existence.py".format(
                    recipe_dir=forge_config["recipe_dir"]))
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        template = jinja_env.get_template('travis.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))