def render_travis(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('osx-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.travis.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the .travis.yml if it exists.
        forge_config["travis"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["travis"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)
        template = jinja_env.get_template('travis.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
    def test_remove_file(self):
        for tmp_dir, repo, pathfunc in parameterize():
            for filename in ["test.txt", "dir1/dir2/test.txt"]:
                dirname = os.path.dirname(filename)
                if dirname and not os.path.exists(dirname):
                    os.makedirs(dirname)

                filename = os.path.join(tmp_dir, filename)

                with io.open(filename, "w", encoding="utf-8") as fh:
                    fh.write("")
                if repo is not None:
                    repo.index.add([filename])

                self.assertTrue(os.path.exists(filename))
                if dirname:
                    self.assertTrue(os.path.exists(dirname))
                    self.assertTrue(os.path.exists(os.path.dirname(dirname)))
                if repo is not None:
                    self.assertTrue(
                        list(repo.index.iter_blobs(BlobFilter(filename)))
                    )

                fio.remove_file(pathfunc(filename))

                self.assertFalse(os.path.exists(filename))
                if dirname:
                    self.assertFalse(os.path.exists(dirname))
                    self.assertFalse(os.path.exists(os.path.dirname(dirname)))
                if repo is not None:
                    self.assertFalse(
                        list(repo.index.iter_blobs(BlobFilter(filename)))
                    )
def render_travis(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('osx-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.travis.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the .travis.yml if it exists.
        forge_config["travis"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["travis"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)
        template = jinja_env.get_template('travis.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
def render_appveyor(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    full_matrix = []
    for platform, arch in [['win-32', 'x86'], ['win-64', 'x64']]:
        with fudge_subdir(platform, build_config=meta_config(meta)):
            meta.parse_again()
            matrix = compute_build_matrix(meta, forge_config.get('matrix'))

            cases_not_skipped = []
            for case in matrix:
                pkgs, vars = split_case(case)
                with enable_vars(vars):
                    if not ResolvedDistribution(meta, pkgs).skip():
                        cases_not_skipped.append(vars + sorted(pkgs))
            if cases_not_skipped:
                arch_env = MatrixCaseEnvVar('TARGET_ARCH', arch)
                full_matrix.extend([arch_env] + list(case)
                                   for case in cases_not_skipped)

    matrix = sorted(full_matrix, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, 'appveyor.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the appveyor.yml if it exists.
        forge_config["appveyor"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["appveyor"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)
        template = jinja_env.get_template('appveyor.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
    def test_remove_file(self):
        for tmp_dir, repo, pathfunc in parameterize():
            for filename in ["test.txt", "dir1/dir2/test.txt"]:
                dirname = os.path.dirname(filename)
                if dirname and not os.path.exists(dirname):
                    os.makedirs(dirname)

                filename = os.path.join(tmp_dir, filename)

                with io.open(
                    filename, "w", encoding="utf-8", newline="\n"
                ) as fh:
                    fh.write("")
                if repo is not None:
                    repo.index.add([filename])

                self.assertTrue(os.path.exists(filename))
                if dirname:
                    self.assertTrue(os.path.exists(dirname))
                    self.assertTrue(os.path.exists(os.path.dirname(dirname)))
                if repo is not None:
                    self.assertTrue(
                        list(repo.index.iter_blobs(BlobFilter(filename)))
                    )

                fio.remove_file(pathfunc(filename))

                self.assertFalse(os.path.exists(filename))
                if dirname:
                    self.assertFalse(os.path.exists(dirname))
                    self.assertFalse(os.path.exists(os.path.dirname(dirname)))
                if repo is not None:
                    self.assertFalse(
                        list(repo.index.iter_blobs(BlobFilter(filename)))
                    )
def render_appveyor(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    full_matrix = []
    for platform, arch in [['win-32', 'x86'], ['win-64', 'x64']]:
        with fudge_subdir(platform, build_config=meta_config(meta)):
            meta.parse_again()
            matrix = compute_build_matrix(meta, forge_config.get('matrix'))

            cases_not_skipped = []
            for case in matrix:
                pkgs, vars = split_case(case)
                with enable_vars(vars):
                    if not ResolvedDistribution(meta, pkgs).skip():
                        cases_not_skipped.append(vars + sorted(pkgs))
            if cases_not_skipped:
                arch_env = MatrixCaseEnvVar('TARGET_ARCH', arch)
                full_matrix.extend([arch_env] + list(case)
                                   for case in cases_not_skipped)

    matrix = sorted(full_matrix, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, 'appveyor.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the appveyor.yml if it exists.
        forge_config["appveyor"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["appveyor"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)

        # Specify AppVeyor Miniconda location.
        matrix, old_matrix = [], matrix
        for case in old_matrix:
            case = odict(case)

            # Set `root`'s `python` version.
            case["CONDA_INSTALL_LOCN"] = "C:\\\\Miniconda"
            if case.get("CONDA_PY") == "27":
                case["CONDA_INSTALL_LOCN"] += ""
            elif case.get("CONDA_PY") == "34":
                case["CONDA_INSTALL_LOCN"] += "3"
            elif case.get("CONDA_PY") == "35":
                case["CONDA_INSTALL_LOCN"] += "35"

            # Set architecture.
            if case.get("TARGET_ARCH") == "x86":
                case["CONDA_INSTALL_LOCN"] += ""
            if case.get("TARGET_ARCH") == "x64":
                case["CONDA_INSTALL_LOCN"] += "-x64"

            matrix.append(list(case.items()))
        del old_matrix

        forge_config = update_matrix(forge_config, matrix)
        template = jinja_env.get_template('appveyor.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
Exemple #7
0
def render_appveyor(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    full_matrix = []
    for platform, arch in [['win-32', 'x86'], ['win-64', 'x64']]:
        with fudge_subdir(platform, build_config=meta_config(meta)):
            meta.parse_again()
            if meta.noarch:
                # do not build noarch, include noarch: python packages on AppVeyor.
                matrix = []
            else:
                matrix = compute_build_matrix(
                    meta, forge_config.get('matrix'),
                    forge_config.get('channels', {}).get('sources', tuple()))

            cases_not_skipped = []
            for case in matrix:
                pkgs, vars = split_case(case)
                with enable_vars(vars):
                    if not ResolvedDistribution(meta, pkgs).skip():
                        cases_not_skipped.append(vars + sorted(pkgs))
            if cases_not_skipped:
                arch_env = MatrixCaseEnvVar('TARGET_ARCH', arch)
                full_matrix.extend([arch_env] + list(case)
                                   for case in cases_not_skipped)
    matrix = sorted(full_matrix, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.appveyor.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the appveyor.yml if it exists.
        forge_config["appveyor"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["appveyor"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)

        # Specify AppVeyor Miniconda location.
        matrix, old_matrix = [], matrix
        for case in old_matrix:
            case = odict(case)

            # Use Python 2.7 as a fallback when no Python version is set.
            case["CONDA_PY"] = case.get("CONDA_PY", "27")

            # Set `root`'s `python` version.
            case["CONDA_INSTALL_LOCN"] = "C:\\\\Miniconda"
            if case.get("CONDA_PY") == "27":
                case["CONDA_INSTALL_LOCN"] += ""
            elif case.get("CONDA_PY") == "35":
                case["CONDA_INSTALL_LOCN"] += "35"
Exemple #8
0
def render_run_docker_build(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('linux-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))
        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the run_docker_build.sh if it exists.
        forge_config["circle"]["enabled"] = False

        target_fnames = [
            os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh'),
            os.path.join(forge_dir, 'ci_support', 'checkout_merge_commit.sh'),
        ]
        for each_target_fname in target_fnames:
            remove_file(each_target_fname)
    else:
        forge_config["circle"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        build_setup = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'run_conda_forge_build_setup_linux')
        if os.path.exists(cfbs_fpath):
            build_setup += textwrap.dedent("""\
                # Overriding global conda-forge-build-setup with local copy.
                source /recipe_root/run_conda_forge_build_setup_linux

            """)
        else:
            build_setup += textwrap.dedent("""\
                source run_conda_forge_build_setup

            """)

        # If there is a "yum_requirements.txt" file in the recipe, we honour it.
        yum_requirements_fpath = os.path.join(forge_dir, 'recipe',
                                              'yum_requirements.txt')
        if os.path.exists(yum_requirements_fpath):
            with open(yum_requirements_fpath) as fh:
                requirements = [line.strip() for line in fh
                                if line.strip() and not line.strip().startswith('#')]
            if not requirements:
                raise ValueError("No yum requirements enabled in the "
                                 "yum_requirements.txt, please remove the file "
                                 "or add some.")
            build_setup += textwrap.dedent("""\

                # Install the yum requirements defined canonically in the
                # "recipe/yum_requirements.txt" file. After updating that file,
                # run "conda smithy rerender" and this line be updated
                # automatically.
                yum install -y {}


            """.format(' '.join(requirements)))

        forge_config['build_setup'] = build_setup

        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "/recipe_root/upload_or_check_non_existence.py"
            )
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        # TODO: Conda has a convenience for accessing nested yaml content.
        templates = forge_config.get('templates', {})
        template_name = templates.get('run_docker_build',
                                      'run_docker_build_matrix.tmpl')

        template = jinja_env.get_template(template_name)
        target_fname = os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))

        # Fix permissions.
        target_fnames = [
            os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh'),
            os.path.join(forge_dir, 'ci_support', 'checkout_merge_commit.sh'),
        ]
        for each_target_fname in target_fnames:
            set_exe_file(each_target_fname, True)
Exemple #9
0
def _load_forge_config(forge_dir, exclusive_config_file):
    config = {
        'docker': {
            'executable': 'docker',
            'image': 'condaforge/linux-anvil',
            'command': 'bash'
        },
        'templates': {},
        'travis': {},
        'circle': {},
        'appveyor': {},
        'azure': {
            # disallow publication of azure artifacts for now.
            'upload_packages': False,
            # Force building all supported providers.
            'force': True,
        },
        'provider': {
            'linux': 'circle',
            'osx': 'travis',
            'win': 'appveyor'
        },
        'win': {
            'enabled': False
        },
        'osx': {
            'enabled': False
        },
        'linux': {
            'enabled': False
        },
        # Compiler stack environment variable
        'compiler_stack': 'comp4',
        # Stack variables,  These can be used to impose global defaults for how far we build out
        'min_py_ver': '27',
        'max_py_ver': '36',
        'min_r_ver': '34',
        'max_r_ver': '34',
        'channels': {
            'sources': ['conda-forge', 'defaults'],
            'targets': [['conda-forge', 'main']]
        },
        'github': {
            'user_or_org': 'conda-forge',
            'repo_name': '',
            'branch_name': 'master'
        },
        'recipe_dir': 'recipe'
    }

    # An older conda-smithy used to have some files which should no longer exist,
    # remove those now.
    old_files = [
        'disabled_appveyor.yml',
        os.path.join('ci_support', 'upload_or_check_non_existence.py'),
        'circle.yml',
        'appveyor.yml',
        os.path.join('ci_support', 'checkout_merge_commit.sh'),
        os.path.join('ci_support', 'fast_finish_ci_pr_build.sh'),
        os.path.join('ci_support', 'run_docker_build.sh'),
        'LICENSE',
    ]
    for old_file in old_files:
        remove_file(os.path.join(forge_dir, old_file))

    forge_yml = os.path.join(forge_dir, "conda-forge.yml")
    if not os.path.exists(forge_yml):
        warnings.warn('No conda-forge.yml found. Assuming default options.')
    else:
        with open(forge_yml, "r") as fh:
            file_config = list(yaml.load_all(fh))[0] or {}

        # check for conda-smithy 2.x matrix which we can't auto-migrate
        # to conda_build_config
        if file_config.get('matrix') and not os.path.exists(
                os.path.join(forge_dir, 'recipe', 'conda_build_config.yaml')):
            # FIXME: update docs URL
            raise ValueError(
                'Cannot rerender with matrix in conda-forge.yml.'
                ' Please migrate matrix to conda_build_config.yaml and try again.'
                ' See https://github.com/conda-forge/conda-smithy/wiki/Release-Notes-3.0.0.rc1'
                ' for more info.')

        # The config is just the union of the defaults, and the overriden
        # values.
        for key, value in file_config.items():
            # Deal with dicts within dicts.
            if isinstance(value, dict):
                config_item = config.setdefault(key, value)
                config_item.update(value)
            else:
                config[key] = value

    # Set the environment variable for the compiler stack
    os.environ['CF_COMPILER_STACK'] = config['compiler_stack']
    # Set valid ranger for the supported platforms
    os.environ['CF_MIN_PY_VER'] = config['min_py_ver']
    os.environ['CF_MAX_PY_VER'] = config['max_py_ver']
    os.environ['CF_MIN_R_VER'] = config['min_r_ver']
    os.environ['CF_MAX_R_VER'] = config['max_r_ver']

    config['package'] = os.path.basename(forge_dir)
    if not config['github']['repo_name']:
        feedstock_name = os.path.basename(forge_dir)
        if not feedstock_name.endswith("-feedstock"):
            feedstock_name += "-feedstock"
        config['github']['repo_name'] = feedstock_name
    config['exclusive_config_file'] = exclusive_config_file
    return config
Exemple #10
0
def render_travis(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('osx-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.travis.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the .travis.yml if it exists.
        forge_config["travis"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["travis"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        build_setup = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'run_conda_forge_build_setup_osx')
        if os.path.exists(cfbs_fpath):
            build_setup += textwrap.dedent("""\
                # Overriding global conda-forge-build-setup with local copy.
                source {recipe_dir}/run_conda_forge_build_setup_osx
            """.format(recipe_dir=forge_config["recipe_dir"]))
        else:
            build_setup += textwrap.dedent("""\
                source run_conda_forge_build_setup
            """)

        build_setup = build_setup.strip()
        build_setup = build_setup.replace("\n", "\n      ")

        forge_config['build_setup'] = build_setup

        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "{recipe_dir}/upload_or_check_non_existence.py".format(
                    recipe_dir=forge_config["recipe_dir"]
                )
            )
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        template = jinja_env.get_template('travis.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
def main(forge_file_directory):
    if hasattr(conda_build, 'api'):
        build_config = conda_build.api.Config()
    else:
        build_config = conda_build.config.config

    # conda-build has some really fruity behaviour where it needs CONDA_NPY
    # and CONDA_PY in order to even read a meta. Because we compute version
    # matricies anyway the actual number makes absolutely no difference.
    build_config.CONDA_NPY = '99.9'
    build_config.CONDA_PY = 10

    recipe_dir = 'recipe'
    config = {'docker': {'executable': 'docker',
                         'image': 'condaforge/linux-anvil',
                         'command': 'bash'},
              'templates': {'run_docker_build': 'run_docker_build_matrix.tmpl'},
              'travis': {},
              'circle': {},
              'appveyor': {},
              'channels': {'sources': ['conda-forge', 'defaults'],
                           'targets': [['conda-forge', 'main']]},
              'github': {'user_or_org': 'conda-forge', 'repo_name': ''},
              'recipe_dir': recipe_dir}
    forge_dir = os.path.abspath(forge_file_directory)

    # An older conda-smithy used to have some files which should no longer exist,
    # remove those now.
    old_files = [
        'disabled_appveyor.yml',
        os.path.join('ci_support', 'upload_or_check_non_existence.py'),
    ]
    for old_file in old_files:
        remove_file(os.path.join(forge_dir, old_file))

    forge_yml = os.path.join(forge_dir, "conda-forge.yml")
    if not os.path.exists(forge_yml):
        warnings.warn('No conda-forge.yml found. Assuming default options.')
    else:
        with open(forge_yml, "r") as fh:
            file_config = list(yaml.load_all(fh))[0] or {}
        # The config is just the union of the defaults, and the overriden
        # values.
        for key, value in file_config.items():
            config_item = config.setdefault(key, value)
            # Deal with dicts within dicts.
            if isinstance(value, dict):
                config_item.update(value)
    config['package'] = meta = meta_of_feedstock(forge_file_directory, config=build_config)
    if not config['github']['repo_name']:
        feedstock_name = os.path.basename(forge_dir)
        if not feedstock_name.endswith("-feedstock"):
            feedstock_name += "-feedstock"
        config['github']['repo_name'] = feedstock_name

    for each_ci in ["travis", "circle", "appveyor"]:
        if config[each_ci].pop("enabled", None):
            warnings.warn(
                "It is not allowed to set the `enabled` parameter for `%s`."
                " All CIs are enabled by default. To disable a CI, please"
                " add `skip: true` to the `build` section of `meta.yaml`"
                " and an appropriate selector so as to disable the build." \
                % each_ci
            )

    tmplt_dir = os.path.join(conda_forge_content, 'templates')
    # Load templates from the feedstock in preference to the smithy's templates.
    env = Environment(loader=FileSystemLoader([os.path.join(forge_dir, 'templates'),
                                               tmplt_dir]))

    copy_feedstock_content(forge_dir)

    render_circle(env, config, forge_dir)
    render_travis(env, config, forge_dir)
    render_appveyor(env, config, forge_dir)
    render_README(env, config, forge_dir)
def render_circle(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('linux-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(
            meta,
            forge_config.get('matrix'),
            forge_config.get('channels', {}).get('sources', tuple())
        )
        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the run_docker_build.sh if it exists.
        forge_config["circle"]["enabled"] = False

        target_fnames = [
            os.path.join(forge_dir, 'ci_support', 'checkout_merge_commit.sh'),
            os.path.join(forge_dir, 'ci_support', 'fast_finish_ci_pr_build.sh'),
            os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh'),
        ]
        for each_target_fname in target_fnames:
            remove_file(each_target_fname)
    else:
        forge_config["circle"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        fast_finish = textwrap.dedent("""\
            {get_fast_finish_script} | \\
                 python - -v --ci "circle" "${{CIRCLE_PROJECT_USERNAME}}/${{CIRCLE_PROJECT_REPONAME}}" "${{CIRCLE_BUILD_NUM}}" "${{CIRCLE_PR_NUMBER}}"
        """)
        get_fast_finish_script = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'ff_ci_pr_build.py')
        if os.path.exists(cfbs_fpath):
            get_fast_finish_script += "cat {recipe_dir}/ff_ci_pr_build.py".format(recipe_dir=forge_config["recipe_dir"])
        else:
            get_fast_finish_script += "curl https://raw.githubusercontent.com/conda-forge/conda-forge-build-setup-feedstock/master/recipe/ff_ci_pr_build.py"

        fast_finish = fast_finish.format(
            get_fast_finish_script=get_fast_finish_script
        )

        fast_finish = fast_finish.strip()

        forge_config['fast_finish'] = fast_finish

        build_setup = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'run_conda_forge_build_setup_linux')
        if os.path.exists(cfbs_fpath):
            build_setup += textwrap.dedent("""\
                # Overriding global conda-forge-build-setup with local copy.
                source /recipe_root/run_conda_forge_build_setup_linux

            """)
        else:
            build_setup += textwrap.dedent("""\
                source run_conda_forge_build_setup

            """)

        # If there is a "yum_requirements.txt" file in the recipe, we honour it.
        yum_requirements_fpath = os.path.join(forge_dir, 'recipe',
                                              'yum_requirements.txt')
        if os.path.exists(yum_requirements_fpath):
            with open(yum_requirements_fpath) as fh:
                requirements = [line.strip() for line in fh
                                if line.strip() and not line.strip().startswith('#')]
            if not requirements:
                raise ValueError("No yum requirements enabled in the "
                                 "yum_requirements.txt, please remove the file "
                                 "or add some.")
            build_setup += textwrap.dedent("""\

                # Install the yum requirements defined canonically in the
                # "recipe/yum_requirements.txt" file. After updating that file,
                # run "conda smithy rerender" and this line be updated
                # automatically.
                /usr/bin/sudo -n yum install -y {}


            """.format(' '.join(requirements)))

        forge_config['build_setup'] = build_setup

        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "/recipe_root/upload_or_check_non_existence.py"
            )
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        # TODO: Conda has a convenience for accessing nested yaml content.
        templates = forge_config.get('templates', {})
        template_name = templates.get('run_docker_build',
                                      'run_docker_build_matrix.tmpl')
        template = jinja_env.get_template(template_name)
        target_fname = os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))

        template_name = 'fast_finish_ci_pr_build.sh.tmpl'
        template = jinja_env.get_template(template_name)
        target_fname = os.path.join(forge_dir, 'ci_support', 'fast_finish_ci_pr_build.sh')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))

        # Fix permissions.
        target_fnames = [
            os.path.join(forge_dir, 'ci_support', 'checkout_merge_commit.sh'),
            os.path.join(forge_dir, 'ci_support', 'fast_finish_ci_pr_build.sh'),
            os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh'),
        ]
        for each_target_fname in target_fnames:
            set_exe_file(each_target_fname, True)

    target_fname = os.path.join(forge_dir, 'circle.yml')
    template = jinja_env.get_template('circle.yml.tmpl')
    with write_file(target_fname) as fh:
        fh.write(template.render(**forge_config))
def render_run_docker_build(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('linux-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))
        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the run_docker_build.sh if it exists.
        forge_config["circle"]["enabled"] = False

        target_fnames = [
            os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh'),
            os.path.join(forge_dir, 'ci_support', 'checkout_merge_commit.sh'),
        ]
        for each_target_fname in target_fnames:
            remove_file(each_target_fname)
    else:
        forge_config["circle"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        # If there is a "yum_requirements.txt" file in the recipe, we honour it.
        yum_requirements_fpath = os.path.join(forge_dir, 'recipe',
                                              'yum_requirements.txt')
        if os.path.exists(yum_requirements_fpath):
            with open(yum_requirements_fpath) as fh:
                requirements = [line.strip() for line in fh
                                if line.strip() and not line.strip().startswith('#')]
            if not requirements:
                raise ValueError("No yum requirements enabled in the "
                                 "yum_requirements.txt, please remove the file "
                                 "or add some.")
            build_setup = textwrap.dedent("""\
                # Install the yum requirements defined canonically in the
                # "recipe/yum_requirements.txt" file. After updating that file,
                # run "conda smithy rerender" and this line be updated
                # automatically.
                yum install -y {}


            """.format(' '.join(requirements)))
            forge_config['build_setup'] = build_setup

        # TODO: Conda has a convenience for accessing nested yaml content.
        templates = forge_config.get('templates', {})
        template_name = templates.get('run_docker_build',
                                      'run_docker_build_matrix.tmpl')

        template = jinja_env.get_template(template_name)
        target_fname = os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))

        # Fix permissions.
        target_fnames = [
            os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh'),
            os.path.join(forge_dir, 'ci_support', 'checkout_merge_commit.sh'),
        ]
        for each_target_fname in target_fnames:
            mode = get_mode_file(each_target_fname)
            set_mode_file(
                each_target_fname,
                mode | stat.S_IXOTH | stat.S_IXGRP | stat.S_IXUSR
            )
Exemple #14
0
def _render_ci_provider(provider_name, jinja_env, forge_config, forge_dir, platforms, archs,
                        fast_finish_text, platform_target_path, platform_template_file,
                        platform_specific_setup, keep_noarchs=None, extra_platform_files={}):

    if keep_noarchs is None:
        keep_noarchs = [False]*len(platforms)

    metas_list_of_lists = []
    enable_platform = [False]*len(platforms)
    for i, (platform, arch, keep_noarch) in enumerate(zip(platforms, archs, keep_noarchs)):
        metas = conda_build.api.render(os.path.join(forge_dir, 'recipe'),
                                   exclusive_config_file=forge_config['exclusive_config_file'],
                                   platform=platform, arch=arch,
                                   permit_undefined_jinja=True, finalize=False,
                                   bypass_env_check=True,
                                   channel_urls=forge_config.get('channels', {}).get('sources', []))
        # render returns some download & reparsing info that we don't care about
        metas = [m for m, _, _ in metas]

        if not keep_noarch:
            to_delete = []
            for idx, meta in enumerate(metas):
                if meta.noarch:
                    # do not build noarch, including noarch: python, packages on Travis CI.
                    to_delete.append(idx)
            for idx in reversed(to_delete):
                del metas[idx]

        for meta in metas:
            if not meta.skip():
                enable_platform[i] = True
        metas_list_of_lists.append(metas)

    if os.path.isdir(os.path.join(forge_dir, '.ci_support')):
        configs = glob.glob(os.path.join(forge_dir, '.ci_support',
                                         '{}_*'.format(provider_name)))
        for config in configs:
            remove_file(config)

        for platform in platforms:
            configs = glob.glob(os.path.join(forge_dir, '.ci_support',
                                             '{}_*'.format(platform)))
            for config in configs:
                remove_file(config)

    if not any(enable_platform):
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the run_docker_build.sh if it exists.
        forge_config[provider_name]["enabled"] = False

        target_fnames = [platform_target_path]
        if extra_platform_files:
            for val in extra_platform_files.values():
                target_fnames.extend(val)
        for each_target_fname in target_fnames:
            remove_file(each_target_fname)
    else:
        forge_config[provider_name]["enabled"] = True
        fancy_name = {'linux': 'Linux', 'osx': 'OSX', 'win': 'Windows'}
        fancy_platforms = []

        configs = []
        for metas, platform, enable in zip(metas_list_of_lists, platforms, enable_platform):
            if enable:
                configs.extend(dump_subspace_config_files(metas, forge_dir, platform))
                forge_config[platform]["enabled"] = True
                fancy_platforms.append(fancy_name[platform])
            elif platform in extra_platform_files:
                    for each_target_fname in extra_platform_files[platform]:
                        remove_file(each_target_fname)

        for key in extra_platform_files.keys():
            if key != 'common' and key not in platforms:
                for each_target_fname in extra_platform_files[key]:
                    remove_file(each_target_fname)

        forge_config[provider_name]["platforms"] = ','.join(fancy_platforms)

        forge_config['configs'] = configs

        forge_config['fast_finish'] = _get_fast_finish_script(provider_name,
                                                              forge_dir=forge_dir,
                                                              forge_config=forge_config,
                                                              fast_finish_text=fast_finish_text)

        # If the recipe supplies its own upload_or_check_non_existence.py upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            if provider_name == "circle":
                forge_config['upload_script'] = (
                    "/home/conda/recipe_root/upload_or_check_non_existence.py"
                )
            elif provider_name == "travis":
                forge_config['upload_script'] = (
                    "{}/upload_or_check_non_existence.py".format(forge_config["recipe_dir"])
                )
            else:
                forge_config['upload_script'] = (
                    "{}\\upload_or_check_non_existence.py".format(forge_config["recipe_dir"])
                )
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        # hook for extending with whatever platform specific junk we need.
        #     Function passed in as argument
        for platform, enable in zip(platforms, enable_platform):
            if enable:
                platform_specific_setup(jinja_env=jinja_env, forge_dir=forge_dir,
                                    forge_config=forge_config, platform=platform)

        template = jinja_env.get_template(platform_template_file)
        with write_file(platform_target_path) as fh:
            fh.write(template.render(**forge_config))

    # circleci needs a placeholder file of sorts - always write the output, even if no metas
    if provider_name == 'circle':
        template = jinja_env.get_template(platform_template_file)
        with write_file(platform_target_path) as fh:
            fh.write(template.render(**forge_config))
    return forge_config
Exemple #15
0
def _render_ci_provider(
    provider_name,
    jinja_env,
    forge_config,
    forge_dir,
    platforms,
    archs,
    fast_finish_text,
    platform_target_path,
    platform_template_file,
    platform_specific_setup,
    keep_noarchs=None,
    extra_platform_files={},
    upload_packages=[],
):
    if keep_noarchs is None:
        keep_noarchs = [False] * len(platforms)

    metas_list_of_lists = []
    enable_platform = [False] * len(platforms)
    for i, (platform, arch,
            keep_noarch) in enumerate(zip(platforms, archs, keep_noarchs)):
        metas = conda_build.api.render(
            os.path.join(forge_dir, "recipe"),
            exclusive_config_file=forge_config["exclusive_config_file"],
            platform=platform,
            arch=arch,
            permit_undefined_jinja=True,
            finalize=False,
            bypass_env_check=True,
            channel_urls=forge_config.get("channels", {}).get("sources", []),
        )
        # render returns some download & reparsing info that we don't care about
        metas = [m for m, _, _ in metas]

        if not keep_noarch:
            to_delete = []
            for idx, meta in enumerate(metas):
                if meta.noarch:
                    # do not build noarch, including noarch: python, packages on Travis CI.
                    to_delete.append(idx)
            for idx in reversed(to_delete):
                del metas[idx]

        for meta in metas:
            if not meta.skip():
                enable_platform[i] = True
        metas_list_of_lists.append(metas)

    if os.path.isdir(os.path.join(forge_dir, ".ci_support")):
        configs = glob.glob(
            os.path.join(forge_dir, ".ci_support",
                         "{}_*".format(provider_name)))
        for config in configs:
            remove_file(config)

        for platform in platforms:
            configs = glob.glob(
                os.path.join(forge_dir, ".ci_support",
                             "{}_*".format(platform)))
            for config in configs:
                remove_file(config)

    if not any(enable_platform):
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the run_docker_build.sh if it exists.
        forge_config[provider_name]["enabled"] = False

        target_fnames = [platform_target_path]
        if extra_platform_files:
            for val in extra_platform_files.values():
                target_fnames.extend(val)
        for each_target_fname in target_fnames:
            remove_file(each_target_fname)
    else:
        forge_config[provider_name]["enabled"] = True
        fancy_name = {
            "linux": "Linux",
            "osx": "OSX",
            "win": "Windows",
            "linux_aarch64": "aarch64",
        }
        fancy_platforms = []
        unfancy_platforms = set()

        configs = []
        for metas, platform, arch, enable, upload in zip(
                metas_list_of_lists,
                platforms,
                archs,
                enable_platform,
                upload_packages,
        ):
            if enable:
                configs.extend(
                    dump_subspace_config_files(metas, forge_dir, platform,
                                               arch, upload))

                plat_arch = (platform if arch == "64" else "{}_{}".format(
                    platform, arch))
                forge_config[plat_arch]["enabled"] = True

                fancy_platforms.append(fancy_name[platform])
                unfancy_platforms.add(plat_arch)
            elif platform in extra_platform_files:
                for each_target_fname in extra_platform_files[platform]:
                    remove_file(each_target_fname)

        for key in extra_platform_files.keys():
            if key != "common" and key not in platforms:
                for each_target_fname in extra_platform_files[key]:
                    remove_file(each_target_fname)

        forge_config[provider_name]["platforms"] = ",".join(fancy_platforms)
        forge_config[provider_name]["all_platforms"] = list(unfancy_platforms)

        forge_config["configs"] = configs

        forge_config["fast_finish"] = _get_fast_finish_script(
            provider_name,
            forge_dir=forge_dir,
            forge_config=forge_config,
            fast_finish_text=fast_finish_text,
        )

        # If the recipe supplies its own upload_or_check_non_existence.py upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, "recipe",
                                    "upload_or_check_non_existence.py")
        if os.path.exists(upload_fpath):
            if provider_name == "circle":
                forge_config[
                    "upload_script"] = "/home/conda/recipe_root/upload_or_check_non_existence.py"
            elif provider_name == "travis":
                forge_config[
                    "upload_script"] = "{}/upload_or_check_non_existence.py".format(
                        forge_config["recipe_dir"])
            else:
                forge_config[
                    "upload_script"] = "{}\\upload_or_check_non_existence.py".format(
                        forge_config["recipe_dir"])
        else:
            forge_config["upload_script"] = "upload_or_check_non_existence"

        # hook for extending with whatever platform specific junk we need.
        #     Function passed in as argument
        for platform, enable in zip(platforms, enable_platform):
            if enable:
                platform_specific_setup(
                    jinja_env=jinja_env,
                    forge_dir=forge_dir,
                    forge_config=forge_config,
                    platform=platform,
                )

        template = jinja_env.get_template(platform_template_file)
        with write_file(platform_target_path) as fh:
            fh.write(template.render(**forge_config))

    # circleci needs a placeholder file of sorts - always write the output, even if no metas
    if provider_name == "circle":
        template = jinja_env.get_template(platform_template_file)
        with write_file(platform_target_path) as fh:
            fh.write(template.render(**forge_config))
    # TODO: azure-pipelines might need the same as circle
    return forge_config
Exemple #16
0
def main(forge_file_directory):
    if hasattr(conda_build, 'api'):
        build_config = conda_build.api.Config()
    else:
        build_config = conda_build.config.config

    # conda-build has some really fruity behaviour where it needs CONDA_NPY
    # and CONDA_PY in order to even read a meta. Because we compute version
    # matricies anyway the actual number makes absolutely no difference.
    build_config.CONDA_NPY = '99.9'
    build_config.CONDA_PY = 10

    recipe_dir = 'recipe'
    config = {'docker': {'image': 'condaforge/linux-anvil', 'command': 'bash'},
              'templates': {'run_docker_build': 'run_docker_build_matrix.tmpl'},
              'travis': {},
              'circle': {},
              'appveyor': {},
              'channels': {'sources': ['conda-forge', 'defaults'],
                           'targets': [['conda-forge', 'main']]},
              'github': {'user_or_org': 'conda-forge', 'repo_name': ''},
              'recipe_dir': recipe_dir}
    forge_dir = os.path.abspath(forge_file_directory)

    # An older conda-smithy used to have some files which should no longer exist,
    # remove those now.
    old_files = [
        'disabled_appveyor.yml',
        os.path.join('ci_support', 'upload_or_check_non_existence.py'),
    ]
    for old_file in old_files:
        remove_file(os.path.join(forge_dir, old_file))

    forge_yml = os.path.join(forge_dir, "conda-forge.yml")
    if not os.path.exists(forge_yml):
        warnings.warn('No conda-forge.yml found. Assuming default options.')
    else:
        with open(forge_yml, "r") as fh:
            file_config = list(yaml.load_all(fh))[0] or {}
        # The config is just the union of the defaults, and the overriden
        # values.
        for key, value in file_config.items():
            config_item = config.setdefault(key, value)
            # Deal with dicts within dicts.
            if isinstance(value, dict):
                config_item.update(value)
    config['package'] = meta = meta_of_feedstock(forge_file_directory, config=build_config)
    if not config['github']['repo_name']:
        feedstock_name = os.path.basename(forge_dir)
        if not feedstock_name.endswith("-feedstock"):
            feedstock_name += "-feedstock"
        config['github']['repo_name'] = feedstock_name

    for each_ci in ["travis", "circle", "appveyor"]:
        if config[each_ci].pop("enabled", None):
            warnings.warn(
                "It is not allowed to set the `enabled` parameter for `%s`."
                " All CIs are enabled by default. To disable a CI, please"
                " add `skip: true` to the `build` section of `meta.yaml`"
                " and an appropriate selector so as to disable the build." \
                % each_ci
            )

    tmplt_dir = os.path.join(conda_forge_content, 'templates')
    # Load templates from the feedstock in preference to the smithy's templates.
    env = Environment(loader=FileSystemLoader([os.path.join(forge_dir, 'templates'),
                                               tmplt_dir]))

    copy_feedstock_content(forge_dir)

    render_run_docker_build(env, config, forge_dir)
    render_circle(env, config, forge_dir)
    render_travis(env, config, forge_dir)
    render_appveyor(env, config, forge_dir)
    render_README(env, config, forge_dir)
Exemple #17
0
def render_travis(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('osx-64', build_config=meta_config(meta)):
        meta.parse_again()
        if meta.noarch:
            # do not build noarch, including noarch: python, packages on Travis CI.
            matrix = []
        else:
            matrix = compute_build_matrix(
                meta, forge_config.get('matrix'),
                forge_config.get('channels', {}).get('sources', tuple()))

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.travis.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the .travis.yml if it exists.
        forge_config["travis"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["travis"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        fast_finish = textwrap.dedent("""\
            ({get_fast_finish_script} | \\
                python - -v --ci "travis" "${{TRAVIS_REPO_SLUG}}" "${{TRAVIS_BUILD_NUMBER}}" "${{TRAVIS_PULL_REQUEST}}") || exit 1
        """)
        get_fast_finish_script = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe', 'ff_ci_pr_build.py')
        if os.path.exists(cfbs_fpath):
            get_fast_finish_script += "cat {recipe_dir}/ff_ci_pr_build.py".format(
                recipe_dir=forge_config["recipe_dir"])
        else:
            get_fast_finish_script += "curl https://raw.githubusercontent.com/conda-forge/conda-forge-build-setup-feedstock/master/recipe/ff_ci_pr_build.py"

        fast_finish = fast_finish.format(
            get_fast_finish_script=get_fast_finish_script)

        fast_finish = fast_finish.strip()
        fast_finish = fast_finish.replace("\n", "\n      ")

        forge_config['fast_finish'] = fast_finish

        build_setup = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'run_conda_forge_build_setup_osx')
        if os.path.exists(cfbs_fpath):
            build_setup += textwrap.dedent("""\
                # Overriding global conda-forge-build-setup with local copy.
                source {recipe_dir}/run_conda_forge_build_setup_osx
            """.format(recipe_dir=forge_config["recipe_dir"]))
        else:
            build_setup += textwrap.dedent("""\
                source run_conda_forge_build_setup
            """)

        build_setup = build_setup.strip()
        build_setup = build_setup.replace("\n", "\n      ")

        forge_config['build_setup'] = build_setup

        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "{recipe_dir}/upload_or_check_non_existence.py".format(
                    recipe_dir=forge_config["recipe_dir"]))
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        template = jinja_env.get_template('travis.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
Exemple #18
0
def _load_forge_config(forge_dir, exclusive_config_file):
    config = {
        "docker": {
            "executable": "docker",
            "image": "condaforge/linux-anvil-comp7",
            "command": "bash",
            "interactive": True,
        },
        "templates": {},
        "travis": {},
        "circle": {},
        "appveyor": {},
        "azure": {
            # disallow publication of azure artifacts for now.
            "upload_packages": False,
            # Force building all supported providers.
            "force": True,
        },
        "provider": {
            "linux": "circle",
            "osx": "travis",
            "win": "appveyor",
            # Following platforms are disabled by default
            "linux_aarch64": None,
            "linux_ppc64le": None,
        },
        "win": {
            "enabled": False
        },
        "osx": {
            "enabled": False
        },
        "linux": {
            "enabled": False
        },
        "linux_aarch64": {
            "enabled": False
        },
        "linux_ppc64le": {
            "enabled": False
        },
        # Configurable idle timeout.  Used for packages that don't have chatty enough builds
        # Applicable only to circleci and travis
        "idle_timeout_minutes": None,
        # Compiler stack environment variable
        "compiler_stack": "comp7",
        # Stack variables,  These can be used to impose global defaults for how far we build out
        "min_py_ver": "27",
        "max_py_ver": "37",
        "min_r_ver": "34",
        "max_r_ver": "34",
        "channels": {
            "sources": ["conda-forge", "defaults"],
            "targets": [["conda-forge", "main"]],
        },
        "github": {
            "user_or_org": "conda-forge",
            "repo_name": "",
            "branch_name": "master",
        },
        "recipe_dir": "recipe",
    }

    # An older conda-smithy used to have some files which should no longer exist,
    # remove those now.
    old_files = [
        "disabled_appveyor.yml",
        os.path.join("ci_support", "upload_or_check_non_existence.py"),
        "circle.yml",
        "appveyor.yml",
        os.path.join("ci_support", "checkout_merge_commit.sh"),
        os.path.join("ci_support", "fast_finish_ci_pr_build.sh"),
        os.path.join("ci_support", "run_docker_build.sh"),
        "LICENSE",
    ]
    for old_file in old_files:
        remove_file(os.path.join(forge_dir, old_file))

    forge_yml = os.path.join(forge_dir, "conda-forge.yml")
    if not os.path.exists(forge_yml):
        warnings.warn("No conda-forge.yml found. Assuming default options.")
    else:
        with open(forge_yml, "r") as fh:
            file_config = list(yaml.safe_load_all(fh))[0] or {}

        # check for conda-smithy 2.x matrix which we can't auto-migrate
        # to conda_build_config
        if file_config.get("matrix") and not os.path.exists(
                os.path.join(forge_dir, "recipe", "conda_build_config.yaml")):
            # FIXME: update docs URL
            raise ValueError(
                "Cannot rerender with matrix in conda-forge.yml."
                " Please migrate matrix to conda_build_config.yaml and try again."
                " See https://github.com/conda-forge/conda-smithy/wiki/Release-Notes-3.0.0.rc1"
                " for more info.")

        # The config is just the union of the defaults, and the overriden
        # values.
        for key, value in file_config.items():
            # Deal with dicts within dicts.
            if isinstance(value, dict):
                config_item = config.setdefault(key, value)
                config_item.update(value)
            else:
                config[key] = value

    log = yaml.safe_dump(config)
    print("## CONFIGURATION USED\n")
    print(log)
    print("## END CONFIGURATION\n")

    for platform in ["linux_aarch64", "linux_ppc64le"]:
        if config["provider"][platform] == "default":
            config["provider"][platform] = "azure"

    # Set the environment variable for the compiler stack
    os.environ["CF_COMPILER_STACK"] = config["compiler_stack"]
    # Set valid ranger for the supported platforms
    os.environ["CF_MIN_PY_VER"] = config["min_py_ver"]
    os.environ["CF_MAX_PY_VER"] = config["max_py_ver"]
    os.environ["CF_MIN_R_VER"] = config["min_r_ver"]
    os.environ["CF_MAX_R_VER"] = config["max_r_ver"]

    config["package"] = os.path.basename(forge_dir)
    if not config["github"]["repo_name"]:
        feedstock_name = os.path.basename(forge_dir)
        if not feedstock_name.endswith("-feedstock"):
            feedstock_name += "-feedstock"
        config["github"]["repo_name"] = feedstock_name
    config["exclusive_config_file"] = exclusive_config_file
    return config
def _render_ci_provider(provider_name,
                        jinja_env,
                        forge_config,
                        forge_dir,
                        platform,
                        arch,
                        fast_finish_text,
                        platform_target_path,
                        platform_template_file,
                        platform_specific_setup,
                        keep_noarch=False,
                        extra_platform_files=None):
    metas = conda_build.api.render(
        os.path.join(forge_dir, 'recipe'),
        exclusive_config_file=forge_config['exclusive_config_file'],
        platform=platform,
        arch=arch,
        permit_undefined_jinja=True,
        finalize=False,
        bypass_env_check=True,
        channel_urls=forge_config.get('channels', {}).get('sources', []))
    # render returns some download & reparsing info that we don't care about
    metas = [m for m, _, _ in metas]

    if not keep_noarch:
        to_delete = []
        for idx, meta in enumerate(metas):
            if meta.noarch:
                # do not build noarch, including noarch: python, packages on Travis CI.
                to_delete.append(idx)
        for idx in reversed(to_delete):
            del metas[idx]

    if os.path.isdir(os.path.join(forge_dir, '.ci_support')):
        configs = glob.glob(
            os.path.join(forge_dir, '.ci_support',
                         '{}_*'.format(provider_name)))
        for config in configs:
            remove_file(config)

    if not metas or all(m.skip() for m in metas):
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the run_docker_build.sh if it exists.
        forge_config[provider_name]["enabled"] = False

        extra_platform_files = [] if not extra_platform_files else extra_platform_files
        target_fnames = [platform_target_path] + extra_platform_files
        for each_target_fname in target_fnames:
            remove_file(each_target_fname)
    else:
        forge_config[provider_name]["enabled"] = True

        forge_config['configs'] = dump_subspace_config_files(
            metas, forge_dir, provider_name)

        forge_config['fast_finish'] = _get_fast_finish_script(
            provider_name,
            forge_dir=forge_dir,
            forge_config=forge_config,
            fast_finish_text=fast_finish_text)

        # If the recipe supplies its own upload_or_check_non_existence.py upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            if provider_name == "circle":
                forge_config['upload_script'] = (
                    "/home/conda/recipe_root/upload_or_check_non_existence.py")
            elif provider_name == "travis":
                forge_config['upload_script'] = (
                    "{}/upload_or_check_non_existence.py".format(
                        forge_config["recipe_dir"]))
            else:
                forge_config['upload_script'] = (
                    "{}\\upload_or_check_non_existence.py".format(
                        forge_config["recipe_dir"]))
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        # hook for extending with whatever platform specific junk we need.
        #     Function passed in as argument
        platform_specific_setup(jinja_env=jinja_env,
                                forge_dir=forge_dir,
                                forge_config=forge_config)

        template = jinja_env.get_template(platform_template_file)
        with write_file(platform_target_path) as fh:
            fh.write(template.render(**forge_config))

    # circleci needs a placeholder file of sorts - always write the output, even if no metas
    if provider_name == 'circle':
        template = jinja_env.get_template(platform_template_file)
        with write_file(platform_target_path) as fh:
            fh.write(template.render(**forge_config))
    return forge_config
def render_travis(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('osx-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(
            meta,
            forge_config.get('matrix'),
            forge_config.get('channels', {}).get('sources', tuple())
        )

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.travis.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the .travis.yml if it exists.
        forge_config["travis"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["travis"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        fast_finish = textwrap.dedent("""\
            ({get_fast_finish_script} | \\
                python - -v --ci "travis" "${{TRAVIS_REPO_SLUG}}" "${{TRAVIS_BUILD_NUMBER}}" "${{TRAVIS_PULL_REQUEST}}") || exit 1
        """)
        get_fast_finish_script = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'ff_ci_pr_build.py')
        if os.path.exists(cfbs_fpath):
            get_fast_finish_script += "cat {recipe_dir}/ff_ci_pr_build.py".format(recipe_dir=forge_config["recipe_dir"])
        else:
            get_fast_finish_script += "curl https://raw.githubusercontent.com/conda-forge/conda-forge-build-setup-feedstock/master/recipe/ff_ci_pr_build.py"

        fast_finish = fast_finish.format(
            get_fast_finish_script=get_fast_finish_script
        )

        fast_finish = fast_finish.strip()
        fast_finish = fast_finish.replace("\n", "\n      ")

        forge_config['fast_finish'] = fast_finish

        build_setup = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'run_conda_forge_build_setup_osx')
        if os.path.exists(cfbs_fpath):
            build_setup += textwrap.dedent("""\
                # Overriding global conda-forge-build-setup with local copy.
                source {recipe_dir}/run_conda_forge_build_setup_osx
            """.format(recipe_dir=forge_config["recipe_dir"]))
        else:
            build_setup += textwrap.dedent("""\
                source run_conda_forge_build_setup
            """)

        build_setup = build_setup.strip()
        build_setup = build_setup.replace("\n", "\n      ")

        forge_config['build_setup'] = build_setup

        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "{recipe_dir}/upload_or_check_non_existence.py".format(
                    recipe_dir=forge_config["recipe_dir"]
                )
            )
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        template = jinja_env.get_template('travis.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
def render_appveyor(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    full_matrix = []
    for platform, arch in [['win-32', 'x86'], ['win-64', 'x64']]:
        with fudge_subdir(platform, build_config=meta_config(meta)):
            meta.parse_again()
            matrix = compute_build_matrix(
                meta,
                forge_config.get('matrix'),
                forge_config.get('channels', {}).get('sources', tuple())
            )

            cases_not_skipped = []
            for case in matrix:
                pkgs, vars = split_case(case)
                with enable_vars(vars):
                    if not ResolvedDistribution(meta, pkgs).skip():
                        cases_not_skipped.append(vars + sorted(pkgs))
            if cases_not_skipped:
                arch_env = MatrixCaseEnvVar('TARGET_ARCH', arch)
                full_matrix.extend([arch_env] + list(case)
                                   for case in cases_not_skipped)
    matrix = sorted(full_matrix, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, 'appveyor.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the appveyor.yml if it exists.
        forge_config["appveyor"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["appveyor"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)

        # Specify AppVeyor Miniconda location.
        matrix, old_matrix = [], matrix
        for case in old_matrix:
            case = odict(case)

            # Use Python 2.7 as a fallback when no Python version is set.
            case["CONDA_PY"] = case.get("CONDA_PY", "27")

            # Set `root`'s `python` version.
            case["CONDA_INSTALL_LOCN"] = "C:\\\\Miniconda"
            if case.get("CONDA_PY") == "27":
                case["CONDA_INSTALL_LOCN"] += ""
            elif case.get("CONDA_PY") == "35":
                case["CONDA_INSTALL_LOCN"] += "35"
            elif case.get("CONDA_PY") == "36":
                case["CONDA_INSTALL_LOCN"] += "36"

            # Set architecture.
            if case.get("TARGET_ARCH") == "x86":
                case["CONDA_INSTALL_LOCN"] += ""
            if case.get("TARGET_ARCH") == "x64":
                case["CONDA_INSTALL_LOCN"] += "-x64"

            matrix.append(list(case.items()))
        del old_matrix

        forge_config = update_matrix(forge_config, matrix)

        get_fast_finish_script = ""
        fast_finish_script = ""
        fast_finish = textwrap.dedent("""\
            {get_fast_finish_script}
            {fast_finish_script} -v --ci "appveyor" "%APPVEYOR_ACCOUNT_NAME%/%APPVEYOR_PROJECT_SLUG%" "%APPVEYOR_BUILD_NUMBER%" "%APPVEYOR_PULL_REQUEST_NUMBER%"
        """)

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'ff_ci_pr_build.py')
        if os.path.exists(cfbs_fpath):
            fast_finish_script += "{recipe_dir}\\ff_ci_pr_build".format(recipe_dir=forge_config["recipe_dir"])
        else:
            get_fast_finish_script += '''powershell -Command "(New-Object Net.WebClient).DownloadFile('https://raw.githubusercontent.com/conda-forge/conda-forge-build-setup-feedstock/master/recipe/ff_ci_pr_build.py', 'ff_ci_pr_build.py')"'''
            fast_finish_script += "ff_ci_pr_build"
            fast_finish += "del {fast_finish_script}.py"

        fast_finish = fast_finish.format(
            get_fast_finish_script=get_fast_finish_script,
            fast_finish_script=fast_finish_script,
        )

        fast_finish = fast_finish.strip()
        fast_finish = fast_finish.replace("\n", "\n        ")

        forge_config['fast_finish'] = fast_finish

        build_setup = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'run_conda_forge_build_setup_osx')
        if os.path.exists(cfbs_fpath):
            build_setup += textwrap.dedent("""\
                # Overriding global conda-forge-build-setup with local copy.
                {recipe_dir}\\run_conda_forge_build_setup_win
            """.format(recipe_dir=forge_config["recipe_dir"]))
        else:
            build_setup += textwrap.dedent("""\

                run_conda_forge_build_setup
            """)

        build_setup = build_setup.rstrip()
        build_setup = build_setup.replace("\n", "\n    - cmd: ")
        build_setup = build_setup.lstrip()

        forge_config['build_setup'] = build_setup

        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "{recipe_dir}\\upload_or_check_non_existence".format(
                    recipe_dir=forge_config["recipe_dir"]
                )
            )
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        template = jinja_env.get_template('appveyor.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
Exemple #22
0
def _load_forge_config(forge_dir, exclusive_config_file):
    config = {'docker': {'executable': 'docker',
                         'image': 'condaforge/linux-anvil',
                         'command': 'bash'},
              'templates': {},
              'travis': {},
              'circle': {},
              'appveyor': {},
              'provider': {'linux': 'circle', 'osx': 'travis', 'win': 'appveyor'},
              'win': {'enabled': False},
              'osx': {'enabled': False},
              'linux': {'enabled': False},
              'channels': {'sources': ['conda-forge', 'defaults'],
                           'targets': [['conda-forge', 'main']]},
              'github': {'user_or_org': 'conda-forge', 'repo_name': ''},
              'recipe_dir': 'recipe'}

    # An older conda-smithy used to have some files which should no longer exist,
    # remove those now.
    old_files = [
        'disabled_appveyor.yml',
        os.path.join('ci_support', 'upload_or_check_non_existence.py'),
        'circle.yml',
        'appveyor.yml',
        os.path.join('ci_support', 'checkout_merge_commit.sh'),
        os.path.join('ci_support', 'fast_finish_ci_pr_build.sh'),
        os.path.join('ci_support', 'run_docker_build.sh'),
        'LICENSE',
    ]
    for old_file in old_files:
        remove_file(os.path.join(forge_dir, old_file))

    forge_yml = os.path.join(forge_dir, "conda-forge.yml")
    if not os.path.exists(forge_yml):
        warnings.warn('No conda-forge.yml found. Assuming default options.')
    else:
        with open(forge_yml, "r") as fh:
            file_config = list(yaml.load_all(fh))[0] or {}

        # check for conda-smithy 2.x matrix which we can't auto-migrate
        # to conda_build_config
        if file_config.get('matrix') and not os.path.exists(
            os.path.join(forge_dir, 'recipe', 'conda_build_config.yaml')
        ):
            # FIXME: update docs URL
            raise ValueError(
                'Cannot rerender with matrix in conda-forge.yml.'
                ' Please migrate matrix to conda_build_config.yaml and try again.'
                ' See https://github.com/conda-forge/conda-smithy/wiki/Release-Notes-3.0.0.rc1'
                ' for more info.')

        # The config is just the union of the defaults, and the overriden
        # values.
        for key, value in file_config.items():
            # Deal with dicts within dicts.
            if isinstance(value, dict):
                config_item = config.setdefault(key, value)
                config_item.update(value)
            else:
                config[key] = value
    config['package'] = os.path.basename(forge_dir)
    if not config['github']['repo_name']:
        feedstock_name = os.path.basename(forge_dir)
        if not feedstock_name.endswith("-feedstock"):
            feedstock_name += "-feedstock"
        config['github']['repo_name'] = feedstock_name
    config['exclusive_config_file'] = exclusive_config_file
    return config
def clear_variants(forge_dir):
    "Remove all variant files placed in the .ci_support path"
    if os.path.isdir(os.path.join(forge_dir, ".ci_support")):
        configs = glob.glob(os.path.join(forge_dir, ".ci_support", "*"))
        for config in configs:
            remove_file(config)