def test(self):
        # Build a recipe.
        py2 = self.write_meta('py1', """
                    package:
                        name: python
                        version: 1.2.3
                    """)
        py2 = self.write_meta('py2', """
                    package:
                        name: python
                        version: 2.1.10
                    """)
        a = self.write_meta('a', """
                    package:
                        name: a
                        version: 3.1.4
                    requirements:
                        build:
                            - python
                        run:
                            - python
                    """)

        a_py12 = ResolvedDistribution(a, (('python', '12', ), ))
        a_py21 = ResolvedDistribution(a, (('python', '21', ), ))
        a_py99 = ResolvedDistribution(a, (('python', '99', ), ))

        testing_channel = '{}/channel/{}'.format(OWNER, 'testing')
        self.call([self.recipes_root_dir, '--upload-channel', testing_channel])

        # Check that we have started on the right footing - the distribution should be on testing,
        # but not on main.
        self.assertTrue(distribution_exists_on_channel(CLIENT, OWNER, py2, channel='testing'))
        self.assertFalse(distribution_exists_on_channel(CLIENT, OWNER, py2, channel='main'))

        # Check that we've had a py21 and py12, but not a py99 for a.
        self.assertTrue(distribution_exists_on_channel(CLIENT, OWNER, a_py12, channel='testing'))
        self.assertTrue(distribution_exists_on_channel(CLIENT, OWNER, a_py21, channel='testing'))
        self.assertFalse(distribution_exists_on_channel(CLIENT, OWNER, a_py99, channel='testing'))

        # Remove the built distribution, re-run, and assert that we didn't bother re-building.
        dist_path = os.path.join(self.conda_bld_root, conda.config.subdir, a_py21.pkg_fn())
        self.assertTrue(os.path.exists(dist_path))
        os.remove(dist_path)
        self.call([self.recipes_root_dir, '--inspect-channel', testing_channel, '--upload-channel', testing_channel])
        self.assertFalse(os.path.exists(dist_path))

        # Now put a condition in. In this case, only build dists for py<2
        CLIENT.remove_dist(OWNER, a_py21.name(), a_py21.version(), '{}/{}'.format(conda.config.subdir, a_py21.pkg_fn()))
        self.assertFalse(distribution_exists_on_channel(CLIENT, OWNER, a_py21, channel='testing'))
        self.call([self.recipes_root_dir, '--inspect-channel', testing_channel, '--upload-channel', testing_channel,
                   '--matrix-condition', 'python <2'])
        self.assertFalse(distribution_exists_on_channel(CLIENT, OWNER, a_py21, channel='testing'))
        self.assertFalse(os.path.exists(dist_path))

        # Without the condition, we should be re-building the distribution
        self.call([self.recipes_root_dir, '--inspect-channel', testing_channel, '--upload-channel', testing_channel])
        self.assertTrue(os.path.exists(dist_path))
        self.assertTrue(distribution_exists_on_channel(CLIENT, OWNER, a_py21, channel='testing'))
def render_travis(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('osx-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.travis.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the .travis.yml if it exists.
        forge_config["travis"]["enabled"] = False
        if os.path.exists(target_fname):
            os.remove(target_fname)
    else:
        forge_config["travis"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)
        template = jinja_env.get_template('travis.yml.tmpl')
        with open(target_fname, 'w') as fh:
            fh.write(template.render(**forge_config))
def render_travis(jinja_env, forge_config, forge_dir):
    with fudge_subdir('osx-64'):
        meta = forge_config['package']
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.travis.yml')

    if not matrix:
        # There is nothing to be built (it is all skipped), but to keep the
        # show on the road, we put in a basic matrix configuration (it will
        # be skipped anyway).
        matrix = [()]

    matrix = prepare_matrix_for_env_vars(matrix)
    forge_config = update_matrix(forge_config, matrix)

    template = jinja_env.get_template('travis.yml.tmpl')
    with open(target_fname, 'w') as fh:
        fh.write(template.render(**forge_config))
def render_appveyor(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    full_matrix = []
    for platform, arch in [['win-32', 'x86'], ['win-64', 'x64']]:
        with fudge_subdir(platform, build_config=meta_config(meta)):
            meta.parse_again()
            matrix = compute_build_matrix(meta, forge_config.get('matrix'))

            cases_not_skipped = []
            for case in matrix:
                pkgs, vars = split_case(case)
                with enable_vars(vars):
                    if not ResolvedDistribution(meta, pkgs).skip():
                        cases_not_skipped.append(vars + sorted(pkgs))
            if cases_not_skipped:
                arch_env = MatrixCaseEnvVar('TARGET_ARCH', arch)
                full_matrix.extend([arch_env] + list(case)
                                   for case in cases_not_skipped)

    matrix = sorted(full_matrix, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, 'appveyor.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the appveyor.yml if it exists.
        forge_config["appveyor"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["appveyor"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)
        template = jinja_env.get_template('appveyor.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
def render_run_docker_build(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('linux-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))
        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh')
    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the run_docker_build.sh if it exists.
        forge_config["circle"]["enabled"] = False
        if os.path.exists(target_fname):
            os.remove(target_fname)
    else:
        forge_config["circle"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        # If there is a "yum_requirements.txt" file in the recipe, we honour it.
        yum_requirements_fpath = os.path.join(forge_dir, 'recipe',
                                              'yum_requirements.txt')
        if os.path.exists(yum_requirements_fpath):
            with open(yum_requirements_fpath) as fh:
                requirements = [
                    line.strip() for line in fh
                    if line.strip() and not line.strip().startswith('#')
                ]
            if not requirements:
                raise ValueError(
                    "No yum requirements enabled in the "
                    "yum_requirements.txt, please remove the file "
                    "or add some.")
            build_setup = textwrap.dedent("""\
                # Install the yum requirements defined canonically in the
                # "recipe/yum_requirements.txt" file. After updating that file,
                # run "conda smithy rerender" and this line be updated
                # automatically.
                yum install -y {}


            """.format(' '.join(requirements)))
            forge_config['build_setup'] = build_setup

        # TODO: Conda has a convenience for accessing nested yaml content.
        templates = forge_config.get('templates', {})
        template_name = templates.get('run_docker_build',
                                      'run_docker_build_matrix.tmpl')

        template = jinja_env.get_template(template_name)
        with open(target_fname, 'w') as fh:
            fh.write(template.render(**forge_config))
        st = os.stat(target_fname)
        os.chmod(target_fname, st.st_mode | stat.S_IEXEC)
 def test(self):
     pkg1 = self.write_meta(
         'pkg1', """
                 package:
                     name: pkg1
                     version: 1.0
                 """)
     pkg1_resolved = ResolvedDistribution(pkg1, (()))
     builder = Builder(None, None, None, None, None)
     r = builder.build(pkg1_resolved)
     self.assertTrue(os.path.exists(r))
     self.assertEqual(os.path.abspath(r), r)
     self.assertEqual(os.path.basename(r), 'pkg1-1.0-0.tar.bz2')
Example #7
0
def render_appveyor(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    full_matrix = []
    for platform, arch in [['win-32', 'x86'], ['win-64', 'x64']]:
        with fudge_subdir(platform, build_config=meta_config(meta)):
            meta.parse_again()
            if meta.noarch:
                # do not build noarch, include noarch: python packages on AppVeyor.
                matrix = []
            else:
                matrix = compute_build_matrix(
                    meta, forge_config.get('matrix'),
                    forge_config.get('channels', {}).get('sources', tuple()))

            cases_not_skipped = []
            for case in matrix:
                pkgs, vars = split_case(case)
                with enable_vars(vars):
                    if not ResolvedDistribution(meta, pkgs).skip():
                        cases_not_skipped.append(vars + sorted(pkgs))
            if cases_not_skipped:
                arch_env = MatrixCaseEnvVar('TARGET_ARCH', arch)
                full_matrix.extend([arch_env] + list(case)
                                   for case in cases_not_skipped)
    matrix = sorted(full_matrix, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.appveyor.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the appveyor.yml if it exists.
        forge_config["appveyor"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["appveyor"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)

        # Specify AppVeyor Miniconda location.
        matrix, old_matrix = [], matrix
        for case in old_matrix:
            case = odict(case)

            # Use Python 2.7 as a fallback when no Python version is set.
            case["CONDA_PY"] = case.get("CONDA_PY", "27")

            # Set `root`'s `python` version.
            case["CONDA_INSTALL_LOCN"] = "C:\\\\Miniconda"
            if case.get("CONDA_PY") == "27":
                case["CONDA_INSTALL_LOCN"] += ""
            elif case.get("CONDA_PY") == "35":
                case["CONDA_INSTALL_LOCN"] += "35"
Example #8
0
            def test(expect_skip=False):
                meta.parse_again(**kwargs)

                matrix = cnfgr_fdstk.compute_build_matrix(
                    meta, channel_sources=["defaults", "conda-forge"])

                cases_not_skipped = []
                for case in matrix:
                    pkgs, vars = cnfgr_fdstk.split_case(case)
                    with cnfgr_fdstk.enable_vars(vars):
                        if not ResolvedDistribution(meta, pkgs).skip():
                            cases_not_skipped.append(vars + sorted(pkgs))

                if expect_skip:
                    self.assertEqual(cases_not_skipped, [[]])
 def test_no_source(self):
     pkg1 = self.write_meta('pkg1', """
                 package:
                     name: pkg1
                     version: 1.0
                 """)
     pkg1_resolved = ResolvedDistribution(pkg1, (()))
     builder = Builder(None, None, None, None, None)
     if hasattr(conda_build, 'api'):
         r = builder.build(pkg1_resolved, conda_build.api.Config())
     else:
         r = builder.build(pkg1_resolved, conda_build.config.config)
     self.assertTrue(os.path.exists(r))
     self.assertEqual(os.path.abspath(r), r)
     self.assertEqual(os.path.basename(r), 'pkg1-1.0-0.tar.bz2')
 def test_extra_conditions(self):
     meta = self.write_meta("""
             package:
                 name: test_recipe
             requirements:
                 build:
                     - python
                 run:
                     - python
             """)
     self.index.add_pkg('python', '2.7.2')
     self.index.add_pkg('python', '2.6.2')
     self.index.add_pkg('python', '3.5.0')
     resolved = ResolvedDistribution.resolve_all(meta, self.index,
                                    extra_conditions=['python 2.6.*|>=3'])
     ids = [dist.build_id() for dist in resolved]
     self.assertEqual(ids, ['py26_0', 'py35_0'])
Example #11
0
 def test_extra_conditions(self):
     meta = self.write_meta("""
             package:
                 name: test_recipe
             requirements:
                 build:
                     - python
                 run:
                     - python
             """)
     self.index.add_pkg('python', '2.7.2')
     self.index.add_pkg('python', '2.6.2')
     self.index.add_pkg('python', '3.5.0')
     resolved = ResolvedDistribution.resolve_all(
         meta, self.index, extra_conditions=['python 2.6.*|>=3'])
     ids = [dist.build_id() for dist in resolved]
     self.assertEqual(ids, ['py26_0', 'py35_0'])
Example #12
0
    def test_py_version_selector_skip(self):
        meta = self.write_meta("""
            package:
                name: recipe_which_depends_on_py_version
            build:  # [py35]
                skip: True  # [py3k]
            """)
        dist1 = ResolvedDistribution(meta, (('python', '35', ), ))
        dist2 = ResolvedDistribution(meta, (('python', '34', ), ))

        self.assertEqual(dist1.skip(), True)
        self.assertEqual(dist2.skip(), False)
Example #13
0
    def test_py_version_selector(self):
        meta = self.write_meta("""
            package:
                name: recipe_which_depends_on_py_version
                version: 3  # [py3k]
                version: 2  # [not py3k]
            """)
        dist1 = ResolvedDistribution(meta, (('python', '27', ), ))
        dist2 = ResolvedDistribution(meta, (('python', '35', ), ))

        self.assertEqual(dist1.version(), u'2')
        self.assertEqual(dist2.version(), u'3')
 def test_skip_build(self):
     meta = self.write_meta("""
         package:
             name: recipe_which_depends_on_py_version
             version: 2
         build: # [py3k]
             skip: True  # [py3k]
         requirements:
             build:
                 - python
             run:
                 - python
         """)
     self.index.add_pkg('python', '2.7.2')
     self.index.add_pkg('python', '2.6.2')
     self.index.add_pkg('python', '3.5.0')
     resolved = ResolvedDistribution.resolve_all(meta, self.index)
     ids = [dist.build_id() for dist in resolved]
     self.assertEqual(ids, ['py26_0', 'py27_0'])
Example #15
0
 def test_skip_build(self):
     meta = self.write_meta("""
         package:
             name: recipe_which_depends_on_py_version
             version: 2
         build: # [py3k]
             skip: True  # [py3k]
         requirements:
             build:
                 - python
             run:
                 - python
         """)
     self.index.add_pkg('python', '2.7.2')
     self.index.add_pkg('python', '2.6.2')
     self.index.add_pkg('python', '3.5.0')
     resolved = ResolvedDistribution.resolve_all(meta, self.index)
     ids = [dist.build_id() for dist in resolved]
     self.assertEqual(ids, ['py26_0', 'py27_0'])
Example #16
0
def render_circle(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('linux-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, 'circle.yml')
    matrix = prepare_matrix_for_env_vars(matrix)
    forge_config = update_matrix(forge_config, matrix)
    template = jinja_env.get_template('circle.yml.tmpl')
    with write_file(target_fname) as fh:
        fh.write(template.render(**forge_config))
 def test_py_xx_version(self):
     meta = self.write_meta("""
         package:
             name: recipe_which_depends_on_py_version
             version: 2
         requirements:
             build:
              - python >=2.7
              - numpy x.x
             run:
              - python x.x
              - numpy x.x
         """)
     self.index.add_pkg('python', '2.7.2')
     self.index.add_pkg('python', '2.6.2')
     self.index.add_pkg('python', '3.5.0')
     self.index.add_pkg('numpy', '1.8.0', depends=['python'])
     resolved = ResolvedDistribution.resolve_all(meta, self.index)
     ids = [dist.build_id() for dist in resolved]
     self.assertEqual(ids, ['np18py27_0', 'np18py35_0'])
Example #18
0
 def test_py_xx_version(self):
     meta = self.write_meta("""
         package:
             name: recipe_which_depends_on_py_version
             version: 2
         requirements:
             build:
              - python >=2.7
              - numpy x.x
             run:
              - python x.x
              - numpy x.x
         """)
     self.index.add_pkg('python', '2.7.2')
     self.index.add_pkg('python', '2.6.2')
     self.index.add_pkg('python', '3.5.0')
     self.index.add_pkg('numpy', '1.8.0', depends=['python'])
     resolved = ResolvedDistribution.resolve_all(meta, self.index)
     ids = [dist.build_id() for dist in resolved]
     self.assertEqual(ids, ['np18py27_0', 'np18py35_0'])
Example #19
0
 def test_numpy_dep(self):
     pkg1 = self.write_meta('pkg1', """
                 package:
                     name: pkg1
                     version: 1.0
                 requirements:
                     build:
                         - python
                         - numpy x.x
                     run:
                         - python
                         - numpy x.x
                 """)
     pkg1_resolved = ResolvedDistribution(pkg1, (['python', '3.5'], ['numpy', '1.11']))
     builder = Builder(None, None, None, None, None)
     if hasattr(conda_build, 'api'):
         r = builder.build(pkg1_resolved, conda_build.api.Config())
     else:
         r = builder.build(pkg1_resolved, conda_build.config.config)
     self.assertTrue(os.path.exists(r))
     self.assertEqual(os.path.abspath(r), r)
     self.assertEqual(os.path.basename(r), 'pkg1-1.0-np111py35_0.tar.bz2')
Example #20
0
    def test(self):
        # Build a recipe.
        py2 = self.write_meta(
            'py1', """
                    package:
                        name: python
                        version: 1.2.3
                    """)
        py2 = self.write_meta(
            'py2', """
                    package:
                        name: python
                        version: 2.1.10
                    """)
        a = self.write_meta(
            'a', """
                    package:
                        name: a
                        version: 3.1.4
                    requirements:
                        build:
                            - python
                        run:
                            - python
                    """)

        a_py12 = ResolvedDistribution(a, ((
            'python',
            '12',
        ), ))
        a_py21 = ResolvedDistribution(a, ((
            'python',
            '21',
        ), ))
        a_py99 = ResolvedDistribution(a, ((
            'python',
            '99',
        ), ))

        testing_channel = '{}/channel/{}'.format(OWNER, 'testing')
        self.call([self.recipes_root_dir, '--upload-channel', testing_channel])

        # Check that we have started on the right footing - the distribution should be on testing,
        # but not on main.
        self.assertTrue(
            distribution_exists_on_channel(CLIENT,
                                           OWNER,
                                           py2,
                                           channel='testing'))
        self.assertFalse(
            distribution_exists_on_channel(CLIENT, OWNER, py2, channel='main'))

        # Check that we've had a py21 and py12, but not a py99 for a.
        self.assertTrue(
            distribution_exists_on_channel(CLIENT,
                                           OWNER,
                                           a_py12,
                                           channel='testing'))
        self.assertTrue(
            distribution_exists_on_channel(CLIENT,
                                           OWNER,
                                           a_py21,
                                           channel='testing'))
        self.assertFalse(
            distribution_exists_on_channel(CLIENT,
                                           OWNER,
                                           a_py99,
                                           channel='testing'))

        # Remove the built distribution, re-run, and assert that we didn't bother re-building.
        dist_path = os.path.join(self.conda_bld_root, conda.config.subdir,
                                 a_py21.pkg_fn())
        self.assertTrue(os.path.exists(dist_path))
        os.remove(dist_path)
        self.call([
            self.recipes_root_dir, '--inspect-channel', testing_channel,
            '--upload-channel', testing_channel
        ])
        self.assertFalse(os.path.exists(dist_path))

        # Now put a condition in. In this case, only build dists for py<2
        CLIENT.remove_dist(
            OWNER, a_py21.name(), a_py21.version(),
            '{}/{}'.format(conda.config.subdir, a_py21.pkg_fn()))
        self.assertFalse(
            distribution_exists_on_channel(CLIENT,
                                           OWNER,
                                           a_py21,
                                           channel='testing'))
        self.call([
            self.recipes_root_dir, '--inspect-channel', testing_channel,
            '--upload-channel', testing_channel, '--matrix-condition',
            'python <2'
        ])
        self.assertFalse(
            distribution_exists_on_channel(CLIENT,
                                           OWNER,
                                           a_py21,
                                           channel='testing'))
        self.assertFalse(os.path.exists(dist_path))

        # Without the condition, we should be re-building the distribution
        self.call([
            self.recipes_root_dir, '--inspect-channel', testing_channel,
            '--upload-channel', testing_channel
        ])
        self.assertTrue(os.path.exists(dist_path))
        self.assertTrue(
            distribution_exists_on_channel(CLIENT,
                                           OWNER,
                                           a_py21,
                                           channel='testing'))
Example #21
0
def render_travis(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('osx-64', build_config=meta_config(meta)):
        meta.parse_again()
        if meta.noarch:
            # do not build noarch, including noarch: python, packages on Travis CI.
            matrix = []
        else:
            matrix = compute_build_matrix(
                meta, forge_config.get('matrix'),
                forge_config.get('channels', {}).get('sources', tuple()))

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.travis.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the .travis.yml if it exists.
        forge_config["travis"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["travis"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        fast_finish = textwrap.dedent("""\
            ({get_fast_finish_script} | \\
                python - -v --ci "travis" "${{TRAVIS_REPO_SLUG}}" "${{TRAVIS_BUILD_NUMBER}}" "${{TRAVIS_PULL_REQUEST}}") || exit 1
        """)
        get_fast_finish_script = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe', 'ff_ci_pr_build.py')
        if os.path.exists(cfbs_fpath):
            get_fast_finish_script += "cat {recipe_dir}/ff_ci_pr_build.py".format(
                recipe_dir=forge_config["recipe_dir"])
        else:
            get_fast_finish_script += "curl https://raw.githubusercontent.com/conda-forge/conda-forge-build-setup-feedstock/master/recipe/ff_ci_pr_build.py"

        fast_finish = fast_finish.format(
            get_fast_finish_script=get_fast_finish_script)

        fast_finish = fast_finish.strip()
        fast_finish = fast_finish.replace("\n", "\n      ")

        forge_config['fast_finish'] = fast_finish

        build_setup = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'run_conda_forge_build_setup_osx')
        if os.path.exists(cfbs_fpath):
            build_setup += textwrap.dedent("""\
                # Overriding global conda-forge-build-setup with local copy.
                source {recipe_dir}/run_conda_forge_build_setup_osx
            """.format(recipe_dir=forge_config["recipe_dir"]))
        else:
            build_setup += textwrap.dedent("""\
                source run_conda_forge_build_setup
            """)

        build_setup = build_setup.strip()
        build_setup = build_setup.replace("\n", "\n      ")

        forge_config['build_setup'] = build_setup

        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "{recipe_dir}/upload_or_check_non_existence.py".format(
                    recipe_dir=forge_config["recipe_dir"]))
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        template = jinja_env.get_template('travis.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
Example #22
0
def render_travis(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('osx-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))

        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    target_fname = os.path.join(forge_dir, '.travis.yml')

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the .travis.yml if it exists.
        forge_config["travis"]["enabled"] = False
        remove_file(target_fname)
    else:
        forge_config["travis"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        build_setup = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'run_conda_forge_build_setup_osx')
        if os.path.exists(cfbs_fpath):
            build_setup += textwrap.dedent("""\
                # Overriding global conda-forge-build-setup with local copy.
                source {recipe_dir}/run_conda_forge_build_setup_osx
            """.format(recipe_dir=forge_config["recipe_dir"]))
        else:
            build_setup += textwrap.dedent("""\
                source run_conda_forge_build_setup
            """)

        build_setup = build_setup.strip()
        build_setup = build_setup.replace("\n", "\n      ")

        forge_config['build_setup'] = build_setup

        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "{recipe_dir}/upload_or_check_non_existence.py".format(
                    recipe_dir=forge_config["recipe_dir"]
                )
            )
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        template = jinja_env.get_template('travis.yml.tmpl')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))
Example #23
0
def render_run_docker_build(jinja_env, forge_config, forge_dir):
    meta = forge_config['package']
    with fudge_subdir('linux-64', build_config=meta_config(meta)):
        meta.parse_again()
        matrix = compute_build_matrix(meta, forge_config.get('matrix'))
        cases_not_skipped = []
        for case in matrix:
            pkgs, vars = split_case(case)
            with enable_vars(vars):
                if not ResolvedDistribution(meta, pkgs).skip():
                    cases_not_skipped.append(vars + sorted(pkgs))
        matrix = sorted(cases_not_skipped, key=sort_without_target_arch)

    if not matrix:
        # There are no cases to build (not even a case without any special
        # dependencies), so remove the run_docker_build.sh if it exists.
        forge_config["circle"]["enabled"] = False

        target_fnames = [
            os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh'),
            os.path.join(forge_dir, 'ci_support', 'checkout_merge_commit.sh'),
        ]
        for each_target_fname in target_fnames:
            remove_file(each_target_fname)
    else:
        forge_config["circle"]["enabled"] = True
        matrix = prepare_matrix_for_env_vars(matrix)
        forge_config = update_matrix(forge_config, matrix)

        build_setup = ""

        # If the recipe supplies its own conda-forge-build-setup script,
        # we use it instead of the global one.
        cfbs_fpath = os.path.join(forge_dir, 'recipe',
                                  'run_conda_forge_build_setup_linux')
        if os.path.exists(cfbs_fpath):
            build_setup += textwrap.dedent("""\
                # Overriding global conda-forge-build-setup with local copy.
                source /recipe_root/run_conda_forge_build_setup_linux

            """)
        else:
            build_setup += textwrap.dedent("""\
                source run_conda_forge_build_setup

            """)

        # If there is a "yum_requirements.txt" file in the recipe, we honour it.
        yum_requirements_fpath = os.path.join(forge_dir, 'recipe',
                                              'yum_requirements.txt')
        if os.path.exists(yum_requirements_fpath):
            with open(yum_requirements_fpath) as fh:
                requirements = [line.strip() for line in fh
                                if line.strip() and not line.strip().startswith('#')]
            if not requirements:
                raise ValueError("No yum requirements enabled in the "
                                 "yum_requirements.txt, please remove the file "
                                 "or add some.")
            build_setup += textwrap.dedent("""\

                # Install the yum requirements defined canonically in the
                # "recipe/yum_requirements.txt" file. After updating that file,
                # run "conda smithy rerender" and this line be updated
                # automatically.
                yum install -y {}


            """.format(' '.join(requirements)))

        forge_config['build_setup'] = build_setup

        # If the recipe supplies its own conda-forge-build-setup upload script,
        # we use it instead of the global one.
        upload_fpath = os.path.join(forge_dir, 'recipe',
                                    'upload_or_check_non_existence.py')
        if os.path.exists(upload_fpath):
            forge_config['upload_script'] = (
                "/recipe_root/upload_or_check_non_existence.py"
            )
        else:
            forge_config['upload_script'] = "upload_or_check_non_existence"

        # TODO: Conda has a convenience for accessing nested yaml content.
        templates = forge_config.get('templates', {})
        template_name = templates.get('run_docker_build',
                                      'run_docker_build_matrix.tmpl')

        template = jinja_env.get_template(template_name)
        target_fname = os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh')
        with write_file(target_fname) as fh:
            fh.write(template.render(**forge_config))

        # Fix permissions.
        target_fnames = [
            os.path.join(forge_dir, 'ci_support', 'run_docker_build.sh'),
            os.path.join(forge_dir, 'ci_support', 'checkout_merge_commit.sh'),
        ]
        for each_target_fname in target_fnames:
            set_exe_file(each_target_fname, True)