Example #1
0
def test_filter_recipes_skip_py27():
    """
    When we add build/skip = True # [py27] to recipe, it should not be
    filtered out. This is because python version is not encoded in the output
    package name, and so one-0.1-0.tar.bz2 will still be created for py35.
    """
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"
            build:
              skip: true  # [py27]
        """, from_string=True)
    r.write_recipes()
    env_matrix = {
        'CONDA_PY': [27, 35],
        'CONDA_BOOST': '1.60'
    }
    recipes = list(r.recipe_dirs.values())
    filtered = list(
        utils.filter_recipes(recipes, env_matrix, channels=['bioconda']))
    assert len(filtered) == 1
Example #2
0
def test_compiler(config_fixture):
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
            requirements:
              build:
                - {{ compiler('c') }}
              host:
                - python
              run:
                - python
        """, from_string=True)
    r.write_recipes()
    build_result = build.build_recipes(
        r.basedir,
        config=config_fixture,
        packages="*",
        testonly=False,
        force=False,
        mulled_test=False,
    )
    assert build_result

    for k, v in r.recipe_dirs.items():
        for i in utils.built_package_paths(v):
            assert os.path.exists(i)
            ensure_missing(i)
Example #3
0
def test_filter_recipes_extra_in_build_string():
    """
    If CONDA_EXTRA is in os.environ, the pkg name should still be identifiable.

    This helps test env vars that don't have other defaults like CONDA_PY does
    (e.g., CONDA_BOOST in bioconda)
    """
    r = Recipes("""
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"
            build:
              number: 0
              string: {{CONDA_EXTRA}}_{{PKG_BUILDNUM}}
        """,
                from_string=True)
    r.write_recipes()
    recipe = r.recipe_dirs['one']

    from conda_build.render import bldpkg_path

    metadata = MetaData(recipe, api.Config(**dict(CONDA_EXTRA='asdf')))
    print(bldpkg_path(metadata, metadata.config))

    os.environ['CONDA_EXTRA'] = 'asdf'
    pkg = utils.built_package_path(recipe)
    assert os.path.basename(pkg) == 'one-0.1-asdf_0.tar.bz2'
Example #4
0
def test_variants():
    """
    Multiple variants should return multiple metadata
    """
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"
            requirements:
              build:
                - mypkg {{ mypkg }}
        """, from_string=True)
    r.write_recipes()
    recipe = r.recipe_dirs['one']

    # Write a temporary conda_build_config.yaml that we'll point the config
    # object to:
    tmp = tempfile.NamedTemporaryFile(delete=False).name
    with open(tmp, 'w') as fout:
        fout.write(
            dedent(
                """
                mypkg:
                  - 1.0
                  - 2.0
                """))
    config = utils.load_conda_build_config()
    config.exclusive_config_file = tmp

    assert len(utils.load_all_meta(recipe, config)) == 2
Example #5
0
def test_cb3_outputs(config_fixture):
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"

            outputs:
              - name: libone
              - name: py-one
                requirements:
                  - {{ pin_subpackage('libone', exact=True) }}
                  - python  {{ python }}

        """, from_string=True)
    r.write_recipes()
    r.recipe_dirs['one']

    build_result = build.build_recipes(
        r.basedir,
        config=config_fixture,
        packages="*",
        testonly=False,
        force=False,
        mulled_test=False,
    )
    assert build_result

    for k, v in r.recipe_dirs.items():
        for i in utils.built_package_paths(v):
            assert os.path.exists(i)
            ensure_missing(i)
Example #6
0
def test_bioconda_pins(caplog, config_fixture):
    """
    htslib currently only provided by bioconda pinnings
    """
    caplog.set_level(logging.DEBUG)
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
            requirements:
              run:
                - htslib
        """, from_string=True)
    r.write_recipes()
    build_result = build.build_recipes(
        r.basedir,
        config=config_fixture,
        packages="*",
        testonly=False,
        force=False,
        mulled_test=False,
    )
    assert build_result

    for k, v in r.recipe_dirs.items():
        for i in utils.built_package_paths(v):
            assert os.path.exists(i)
            ensure_missing(i)
Example #7
0
def test_filter_recipes_existing_package():
    "use a known-to-exist package in bioconda"

    # note that we need python as a run requirement in order to get the "pyXY"
    # in the build string that matches the existing bioconda built package.
    r = Recipes("""
        one:
          meta.yaml: |
            package:
              name: gffutils
              version: "0.8.7.1"
            requirements:
              run:
                - python
        """,
                from_string=True)
    r.write_recipes()
    recipes = list(r.recipe_dirs.values())
    env_matrix = {
        'CONDA_PY': [27, 35],
    }
    pkgs = utils.get_channel_packages('bioconda')
    pth = utils.built_package_path(recipes[0])
    filtered = list(
        utils.filter_recipes(recipes, env_matrix, channels=['bioconda']))
    assert len(filtered) == 0
Example #8
0
def test_build_empty_extra_container():
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
            extra:
              container:
                # empty
        """, from_string=True)
    r.write_recipes()
    pkgs = utils.built_package_paths(r.recipe_dirs['one'])

    build_result = build.build(
        recipe=r.recipe_dirs['one'],
        recipe_folder='.',
        pkg_paths=pkgs,
        mulled_test=True,
    )
    assert build_result.success
    for pkg in pkgs:
        assert os.path.exists(pkg)
        ensure_missing(pkg)
Example #9
0
def test_built_package_paths():
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"
            requirements:
              build:
                - python 3.6
              run:
                - python 3.6

        two:
          meta.yaml: |
            package:
              name: two
              version: "0.1"
            build:
              number: 0
              string: ncurses{{ CONDA_NCURSES }}_{{ PKG_BUILDNUM }}
        """, from_string=True)
    r.write_recipes()

    assert os.path.basename(
        utils.built_package_paths(r.recipe_dirs['one'])[0]
    ) == 'one-0.1-py36_0.tar.bz2'
Example #10
0
def test_get_deps():
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
        two:
          meta.yaml: |
            package:
              name: two
              version: 0.1
            requirements:
              build:
                - one
        three:
          meta.yaml: |
            package:
              name: three
              version: 0.1
            requirements:
              build:
                - one
              run:
                - two
    """, from_string=True)
    r.write_recipes()
    assert list(utils.get_deps(r.recipe_dirs['two'])) == ['one']
    assert list(utils.get_deps(r.recipe_dirs['three'], build=True)) == ['one']
    assert list(utils.get_deps(r.recipe_dirs['three'], build=False)) == ['two']
Example #11
0
def test_filter_recipes_custom_buildstring():
    "use a known-to-exist package in bioconda"

    # note that we need python as a run requirement in order to get the "pyXY"
    # in the build string that matches the existing bioconda built package.
    r = Recipes("""
        one:
          meta.yaml: |
            package:
              name: pindel
              version: "0.2.5b8"
            build:
              number: 2
              skip: True  # [osx]
              string: "htslib{{CONDA_HTSLIB}}_{{PKG_BUILDNUM}}"
            requirements:
              run:
                - python
        """,
                from_string=True)
    r.write_recipes()
    recipes = list(r.recipe_dirs.values())
    env_matrix = {
        'CONDA_HTSLIB': "1.4",
    }
    filtered = list(
        utils.filter_recipes(recipes, env_matrix, channels=['bioconda']))
    assert len(filtered) == 0
def test_conda_forge_pins(caplog):
    caplog.set_level(logging.DEBUG)
    r = Recipes("""
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
            requirements:
              run:
                - zlib {{ zlib }}
        """,
                from_string=True)
    r.write_recipes()
    build_result = build.build_recipes(
        r.basedir,
        config={},
        packages="*",
        testonly=False,
        force=False,
        mulled_test=False,
    )
    assert build_result

    for k, v in r.recipe_dirs.items():
        for i in utils.built_package_paths(v):
            assert os.path.exists(i)
            ensure_missing(i)
Example #13
0
def test_bioconda_pins(caplog, config_fixture):
    """
    htslib currently only provided by bioconda pinnings
    """
    caplog.set_level(logging.DEBUG)
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
            requirements:
              run:
                - htslib
        """, from_string=True)
    r.write_recipes()
    build_result = build.build_recipes(
        r.basedir,
        config=config_fixture,
        packages="*",
        testonly=False,
        force=False,
        mulled_test=False,
    )
    assert build_result

    for k, v in r.recipe_dirs.items():
        for i in utils.built_package_paths(v):
            assert os.path.exists(i)
            ensure_missing(i)
Example #14
0
def test_variants():
    """
    Multiple variants should return multiple metadata
    """
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"
            requirements:
              build:
                - mypkg {{ mypkg }}
        """, from_string=True)
    r.write_recipes()
    recipe = r.recipe_dirs['one']

    # Write a temporary conda_build_config.yaml that we'll point the config
    # object to:
    tmp = tempfile.NamedTemporaryFile(delete=False).name
    with open(tmp, 'w') as fout:
        fout.write(
            dedent(
                """
                mypkg:
                  - 1.0
                  - 2.0
                """))
    config = utils.load_conda_build_config()
    config.exclusive_config_file = tmp

    assert len(utils.load_all_meta(recipe, config)) == 2
Example #15
0
def test_filter_recipes_force_existing_package():
    "same as above but force the recipe"

    # same as above, but this time force the recipe
    # TODO: refactor as py.test fixture
    r = Recipes("""
        one:
          meta.yaml: |
            package:
              name: gffutils
              version: "0.8.7.1"
            requirements:
              run:
                - python
        """,
                from_string=True)
    r.write_recipes()
    recipes = list(r.recipe_dirs.values())
    env_matrix = {
        'CONDA_PY': [27, 35],
    }
    pkgs = utils.get_channel_packages('bioconda')
    pth = utils.built_package_path(recipes[0])
    filtered = list(
        utils.filter_recipes(recipes,
                             env_matrix,
                             channels=['bioconda'],
                             force=True))
    assert len(filtered) == 1
Example #16
0
def test_compiler(config_fixture):
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
            requirements:
              build:
                - {{ compiler('c') }}
              host:
                - python
              run:
                - python
        """, from_string=True)
    r.write_recipes()
    build_result = build.build_recipes(
        r.basedir,
        config=config_fixture,
        packages="*",
        testonly=False,
        force=False,
        mulled_test=False,
    )
    assert build_result

    for k, v in r.recipe_dirs.items():
        for i in utils.built_package_paths(v):
            assert os.path.exists(i)
            ensure_missing(i)
Example #17
0
def test_built_package_paths():
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"
            requirements:
              build:
                - python 3.6
              run:
                - python 3.6

        two:
          meta.yaml: |
            package:
              name: two
              version: "0.1"
            build:
              number: 0
              string: ncurses{{ CONDA_NCURSES }}_{{ PKG_BUILDNUM }}
        """, from_string=True)
    r.write_recipes()

    # Newer conda-build versions add the channel_targets and target_platform to the hash
    platform = 'linux' if sys.platform == 'linux' else 'osx'
    d = {"channel_targets": "bioconda main", "target_platform": "{}-64".format(platform)}
    h = metadata._hash_dependencies(d, 7)

    assert os.path.basename(
        utils.built_package_paths(r.recipe_dirs['one'])[0]
    ) == 'one-0.1-py36{}_0.tar.bz2'.format(h)
Example #18
0
def test_built_package_paths():
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"
            requirements:
              build:
                - python 3.6
              run:
                - python 3.6

        two:
          meta.yaml: |
            package:
              name: two
              version: "0.1"
            build:
              number: 0
              string: ncurses{{ CONDA_NCURSES }}_{{ PKG_BUILDNUM }}
        """, from_string=True)
    r.write_recipes()

    assert os.path.basename(
        utils.built_package_paths(r.recipe_dirs['one'])[0]
    ) == 'one-0.1-py36_0.tar.bz2'
Example #19
0
def test_env_sandboxing():
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
          build.sh: |
            #!/bin/bash
            if [[ -z $GITHUB_TOKEN ]]
            then
                exit 0
            else
                echo "\$GITHUB_TOKEN has leaked into the build environment!"
                exit 1
            fi
    """, from_string=True)
    r.write_recipes()
    pkg_paths = utils.built_package_paths(r.recipe_dirs['one'])

    with utils.temp_env({'GITHUB_TOKEN': 'token_here'}):
        build.build(
            recipe=r.recipe_dirs['one'],
            recipe_folder='.',
            pkg_paths=pkg_paths,
            mulled_test=False
        )

    for pkg in pkg_paths:
        assert os.path.exists(pkg)
        ensure_missing(pkg)
Example #20
0
def test_filter_recipes_extra_in_build_string():
    """
    If CONDA_EXTRA is in os.environ, the pkg name should still be identifiable.

    This helps test env vars that don't have other defaults like CONDA_PY does
    (e.g., CONDA_BOOST in bioconda)
    """
    r = Recipes("""
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"
            build:
              number: 0
              string: {{CONDA_EXTRA}}_{{PKG_BUILDNUM}}
        """,
                from_string=True)
    r.write_recipes()
    recipe = r.recipe_dirs['one']

    env = {
        'CONDA_EXTRA': 'asdf',
    }
    pkg = os.path.basename(utils.built_package_path(recipe, env))

    assert os.path.basename(pkg) == 'one-0.1-asdf_0.tar.bz2'
Example #21
0
def test_built_package_path2():
    r = Recipes("""
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"
            requirements:
              run:
                - python

        two:
          meta.yaml: |
            package:
              name: two
              version: "0.1"
            build:
              number: 0
              string: ncurses{{ CONDA_NCURSES }}_{{ PKG_BUILDNUM }}
        """,
                from_string=True)
    r.write_recipes()

    os.environ['CONDA_NCURSES'] = '9.0'
    assert os.path.basename(
        utils.built_package_path(
            r.recipe_dirs['two'],
            env=os.environ)) == 'two-0.1-ncurses9.0_0.tar.bz2'

    del os.environ['CONDA_NCURSES']
    assert os.path.basename(
        utils.built_package_path(
            r.recipe_dirs['two'],
            env=dict(CONDA_NCURSES='9.0'))) == 'two-0.1-ncurses9.0_0.tar.bz2'
Example #22
0
def test_env_sandboxing():
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
          build.sh: |
            #!/bin/bash
            if [[ -z $GITHUB_TOKEN ]]
            then
                exit 0
            else
                echo "\$GITHUB_TOKEN has leaked into the build environment!"
                exit 1
            fi
    """, from_string=True)
    r.write_recipes()
    pkg_paths = utils.built_package_paths(r.recipe_dirs['one'])

    with utils.temp_env({'GITHUB_TOKEN': 'token_here'}):
        build.build(
            recipe=r.recipe_dirs['one'],
            recipe_folder='.',
            pkg_paths=pkg_paths,
            mulled_test=False
        )

    for pkg in pkg_paths:
        assert os.path.exists(pkg)
        ensure_missing(pkg)
Example #23
0
def test_conda_forge_pins(caplog, config_fixture):
    caplog.set_level(logging.DEBUG)
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
            requirements:
              run:
                - zlib {{ zlib }}
        """, from_string=True)
    r.write_recipes()
    build_result = build.build_recipes(
        r.basedir,
        config=config_fixture,
        packages="*",
        testonly=False,
        force=False,
        mulled_test=False,
    )
    assert build_result

    for k, v in r.recipe_dirs.items():
        for i in utils.built_package_paths(v):
            print(os.listdir(os.path.dirname(i)))
            assert os.path.exists(i)
            ensure_missing(i)
Example #24
0
def test_build_empty_extra_container():
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
            extra:
              container:
                # empty
        """, from_string=True)
    r.write_recipes()
    pkgs = utils.built_package_paths(r.recipe_dirs['one'])

    build_result = build.build(
        recipe=r.recipe_dirs['one'],
        recipe_folder='.',
        pkg_paths=pkgs,
        mulled_test=True,
    )
    assert build_result.success
    for pkg in pkgs:
        assert os.path.exists(pkg)
        ensure_missing(pkg)
Example #25
0
def test_get_deps():
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
        two:
          meta.yaml: |
            package:
              name: two
              version: 0.1
            requirements:
              build:
                - one
        three:
          meta.yaml: |
            package:
              name: three
              version: 0.1
            requirements:
              build:
                - one
              run:
                - two
    """, from_string=True)
    r.write_recipes()
    assert list(utils.get_deps(r.recipe_dirs['two'])) == ['one']
    assert list(utils.get_deps(r.recipe_dirs['three'], build=True)) == ['one']
    assert list(utils.get_deps(r.recipe_dirs['three'], build=False)) == ['two']
Example #26
0
def test_cb3_outputs(config_fixture):
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"

            outputs:
              - name: libone
              - name: py-one
                requirements:
                  - {{ pin_subpackage('libone', exact=True) }}
                  - python  {{ python }}

        """, from_string=True)
    r.write_recipes()
    r.recipe_dirs['one']

    build_result = build.build_recipes(
        r.basedir,
        config=config_fixture,
        packages="*",
        testonly=False,
        force=False,
        mulled_test=False,
    )
    assert build_result

    for k, v in r.recipe_dirs.items():
        for i in utils.built_package_paths(v):
            assert os.path.exists(i)
            ensure_missing(i)
Example #27
0
def _build_pkg(recipe):
    r = Recipes(recipe, from_string=True)
    r.write_recipes()
    env_matrix = list(utils.EnvMatrix(tmp_env_matrix()))[0]
    recipe = r.recipe_dirs['one']
    built_package = utils.built_package_path(recipe)
    ensure_missing(built_package)
    build.build(recipe=r.recipe_dirs['one'], recipe_folder='.', env=env_matrix)
    return built_package
Example #28
0
def test_skip_dependencies(config_fixture):
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: skip_dependencies_one
              version: 0.1
        two:
          meta.yaml: |
            package:
              name: skip_dependencies_two
              version: 0.1
            requirements:
              build:
                - skip_dependencies_one
                - nonexistent
        three:
          meta.yaml: |
            package:
              name: skip_dependencies_three
              version: 0.1
            requirements:
              build:
                - skip_dependencies_one
              run:
                - skip_dependencies_two
    """, from_string=True)
    r.write_recipes()
    pkgs = {}
    for k, v in r.recipe_dirs.items():
        pkgs[k] = utils.built_package_paths(v)

    for _pkgs in pkgs.values():
        for pkg in _pkgs:
            ensure_missing(pkg)

    build.build_recipes(
        r.basedir,
        config=config_fixture,
        packages="*",
        testonly=False,
        force=False,
        mulled_test=False,
    )
    for pkg in pkgs['one']:
        assert os.path.exists(pkg)
    for pkg in pkgs['two']:
        assert not os.path.exists(pkg)
    for pkg in pkgs['three']:
        assert not os.path.exists(pkg)

    # clean up
    for _pkgs in pkgs.values():
        for pkg in _pkgs:
            ensure_missing(pkg)
Example #29
0
def test_skip_dependencies(config_fixture):
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: skip_dependencies_one
              version: 0.1
        two:
          meta.yaml: |
            package:
              name: skip_dependencies_two
              version: 0.1
            requirements:
              build:
                - skip_dependencies_one
                - nonexistent
        three:
          meta.yaml: |
            package:
              name: skip_dependencies_three
              version: 0.1
            requirements:
              build:
                - skip_dependencies_one
              run:
                - skip_dependencies_two
    """, from_string=True)
    r.write_recipes()
    pkgs = {}
    for k, v in r.recipe_dirs.items():
        pkgs[k] = utils.built_package_paths(v)

    for _pkgs in pkgs.values():
        for pkg in _pkgs:
            ensure_missing(pkg)

    build.build_recipes(
        r.basedir,
        config=config_fixture,
        packages="*",
        testonly=False,
        force=False,
        mulled_test=False,
    )
    for pkg in pkgs['one']:
        assert os.path.exists(pkg)
    for pkg in pkgs['two']:
        assert not os.path.exists(pkg)
    for pkg in pkgs['three']:
        assert not os.path.exists(pkg)

    # clean up
    for _pkgs in pkgs.values():
        for pkg in _pkgs:
            ensure_missing(pkg)
Example #30
0
def test_rendering_sandboxing():
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
            extra:
              var: {{ GITHUB_TOKEN }}
    """, from_string=True)

    r.write_recipes()
    env = {
        # None of these should be passed to the recipe
        'CONDA_ARBITRARY_VAR': 'conda-val-here',
        'TRAVIS_ARBITRARY_VAR': 'travis-val-here',
        'GITHUB_TOKEN': 'asdf',
        'BUILDKITE_TOKEN': 'asdf',
    }

    # If GITHUB_TOKEN is already set in the bash environment, then we get
    # a message on stdout+stderr (this is the case on travis-ci).
    #
    # However if GITHUB_TOKEN is not already set in the bash env (e.g., when
    # testing locally), then we get a SystemError.
    #
    # In both cases we're passing in the `env` dict, which does contain
    # GITHUB_TOKEN.

    if 'GITHUB_TOKEN' in os.environ:
        with pytest.raises(sp.CalledProcessError) as excinfo:
            pkg_paths = utils.built_package_paths(r.recipe_dirs['one'])
            build.build(
                recipe=r.recipe_dirs['one'],
                recipe_folder='.',
                pkg_paths=pkg_paths,
                mulled_test=False,
                _raise_error=True,
            )
        assert ("'GITHUB_TOKEN' is undefined" in str(excinfo.value.stdout))
    else:
        # recipe for "one" should fail because GITHUB_TOKEN is not a jinja var.
        with pytest.raises(SystemExit) as excinfo:
            pkg_paths = utils.built_package_paths(r.recipe_dirs['one'])
            build.build(
                recipe=r.recipe_dirs['one'],
                recipe_folder='.',
                pkg_paths=pkg_paths,
                mulled_test=False,
            )
        assert "'GITHUB_TOKEN' is undefined" in str(excinfo.value)
Example #31
0
def test_skip_dependencies():
    r = Recipes("""
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
        two:
          meta.yaml: |
            package:
              name: two
              version: 0.1
            requirements:
              build:
                - one
                - nonexistent
        three:
          meta.yaml: |
            package:
              name: three
              version: 0.1
            requirements:
              build:
                - one
              run:
                - two
    """,
                from_string=True)
    r.write_recipes()
    pkgs = {}
    for k, v in r.recipe_dirs.items():
        pkgs[k] = utils.built_package_path(v)

    for p in pkgs.values():
        ensure_missing(p)

    build.build_recipes(
        r.basedir,
        config={},
        packages="*",
        testonly=False,
        force=False,
        mulled_test=False,
        disable_upload=True,
    )
    assert os.path.exists(pkgs['one'])
    assert not os.path.exists(pkgs['two'])
    assert not os.path.exists(pkgs['three'])

    # clean up
    for p in pkgs.values():
        ensure_missing(p)
Example #32
0
def test_rendering_sandboxing():
    r = Recipes("""
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
            extra:
              var: {{ GITHUB_TOKEN }}
    """,
                from_string=True)

    r.write_recipes()
    env = {
        # None of these should be passed to the recipe
        'CONDA_ARBITRARY_VAR': 'conda-val-here',
        'TRAVIS_ARBITRARY_VAR': 'travis-val-here',
        'GITHUB_TOKEN': 'asdf',
        'BUILDKITE_TOKEN': 'asdf',
    }

    # If GITHUB_TOKEN is already set in the bash environment, then we get
    # a message on stdout+stderr (this is the case on travis-ci).
    #
    # However if GITHUB_TOKEN is not already set in the bash env (e.g., when
    # testing locally), then we get a SystemError.
    #
    # In both cases we're passing in the `env` dict, which does contain
    # GITHUB_TOKEN.

    if 'GITHUB_TOKEN' in os.environ:
        with pytest.raises(sp.CalledProcessError) as excinfo:
            pkg_paths = utils.built_package_paths(r.recipe_dirs['one'])
            build.build(
                recipe=r.recipe_dirs['one'],
                recipe_folder='.',
                pkg_paths=pkg_paths,
                mulled_test=False,
                _raise_error=True,
            )
        assert ("'GITHUB_TOKEN' is undefined" in str(excinfo.value.stdout))
    else:
        # recipe for "one" should fail because GITHUB_TOKEN is not a jinja var.
        with pytest.raises(SystemExit) as excinfo:
            pkg_paths = utils.built_package_paths(r.recipe_dirs['one'])
            build.build(
                recipe=r.recipe_dirs['one'],
                recipe_folder='.',
                pkg_paths=pkg_paths,
                mulled_test=False,
            )
        assert "'GITHUB_TOKEN' is undefined" in str(excinfo.value)
Example #33
0
def _build_pkg(recipe, mulled_test=False):
    r = Recipes(recipe, from_string=True)
    r.write_recipes()
    recipe = r.recipe_dirs['one']
    built_packages = utils.built_package_paths(recipe)
    for pkg in built_packages:
        ensure_missing(pkg)
    build.build(
        recipe=r.recipe_dirs['one'],
        pkg_paths=built_packages,
        mulled_test=mulled_test,
    )
    return built_packages
Example #34
0
def test_lint_skip_in_recipe():

    # should fail (note we're only linting `missing_home`)
    r = Recipes(
        '''
        missing_home:
          meta.yaml: |
            package:
              name: missing_home
              version: "0.1"
        ''', from_string=True)
    r.write_recipes()
    res = linting.lint(
        r.recipe_dirs.values(),
        linting.LintArgs(registry=[lint_functions.missing_home]),
        basedir=r.basedir)
    assert res is not None

    # should now pass with the extra:skip-lints (only linting for `missing_home`)
    r = Recipes(
        '''
        missing_home:
          meta.yaml: |
            package:
              name: missing_home
              version: "0.1"
            extra:
              skip-lints:
                - missing_home
        ''', from_string=True)
    r.write_recipes()
    res = linting.lint(
        r.recipe_dirs.values(),
        linting.LintArgs(registry=[lint_functions.missing_home]),
        basedir=r.basedir)
    assert res is None

    # should pass; minimal recipe needs to skip these lints
    r = Recipes(
        '''
        missing_home:
          meta.yaml: |
            package:
              name: missing_home
              version: "0.1"
            extra:
              skip-lints:
                - missing_home
                - missing_license
                - no_tests
                - in_other_channels  # avoid triggering RepoData load
        ''', from_string=True)
    r.write_recipes()
    res = linting.lint(r.recipe_dirs.values(), linting.LintArgs(),
        basedir=r.basedir)
    assert res is not None
Example #35
0
def recipes_fixture():
    """
    Writes example recipes (based on test_case.yaml), figures out the package
    paths and attaches them to the Recipes instance, and cleans up afterward.
    """
    r = Recipes('test_case.yaml')
    r.write_recipes()
    r.pkgs = {}
    for k, v in r.recipe_dirs.items():
        r.pkgs[k] = utils.built_package_paths(v)
    yield r
    for pkgs in r.pkgs.values():
        for pkg in pkgs:
            ensure_missing(pkg)
Example #36
0
def _build_pkg(recipe, mulled_test=False):
    r = Recipes(recipe, from_string=True)
    r.write_recipes()
    recipe = r.recipe_dirs['one']
    built_packages = utils.built_package_paths(recipe)
    for pkg in built_packages:
        ensure_missing(pkg)
    build.build(
        recipe=r.recipe_dirs['one'],
        recipe_folder='.',
        pkg_paths=built_packages,
        mulled_test=mulled_test,
    )
    return built_packages
Example #37
0
 def _run(contents, expect_pass=True):
     """
     Build the recipe and run the lint function on the rendered recipe
     """
     r = Recipes(contents, from_string=True)
     r.write_recipes()
     assert len(r.recipe_dirs) == 1
     name = list(r.recipe_dirs.keys())[0]
     recipe, meta, df = r.recipe_dirs[name], r.recipes[name]['meta.yaml'], should_pass_df
     meta = yaml.load(meta)
     if expect_pass:
         assert func(recipe, meta, df) is None, "lint did not pass"
     else:
         assert func(recipe, meta, df) is not None, "lint did not fail"
Example #38
0
def test_empty_build_section():
    r = Recipes(
        '''
        empty_build_section:
          meta.yaml: |
            package:
              name: empty_build_section
              version: "0.1"
            build:
        ''', from_string=True)
    r.write_recipes()
    # access to contents of possibly empty build section can happen in
    # `should_be_noarch` and `should_not_be_noarch`
    registry = [lint_functions.should_be_noarch, lint_functions.should_not_be_noarch]
    res = linting.lint(r.recipe_dirs.values(), config={}, df=None, registry=registry)
    assert res is None
Example #39
0
 def _run(contents, expect_pass=True):
     """
     Build the recipe and run the lint function on the rendered recipe
     """
     r = Recipes(contents, from_string=True)
     r.write_recipes()
     assert len(r.recipe_dirs) == 1
     name = list(r.recipe_dirs.keys())[0]
     recipe = Recipe.from_file(r.basedir, r.recipe_dirs[name])
     metas = []
     for platform in ["linux", "osx"]:
         config = utils.load_conda_build_config(platform=platform, trim_skip=False)
         metas.extend(utils.load_all_meta(r.recipe_dirs[name], config=config, finalize=False))
     if expect_pass:
         assert func(recipe, metas) is None, "lint did not pass"
     else:
         assert func(recipe, metas) is not None, "lint did not fail"
Example #40
0
def test_load_meta_skipping():
    """
    Ensure that a skipped recipe returns no metadata
    """
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"
            build:
              skip: true
        """, from_string=True)
    r.write_recipes()
    recipe = r.recipe_dirs['one']
    assert utils.load_all_meta(recipe) == []
Example #41
0
def test_load_meta_skipping():
    """
    Ensure that a skipped recipe returns no metadata
    """
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"
            build:
              skip: true
        """, from_string=True)
    r.write_recipes()
    recipe = r.recipe_dirs['one']
    assert utils.load_all_meta(recipe) == []
Example #42
0
def test_filter_recipes_skip_py27_in_build_string():
    """
    When CONDA_PY is in the build string, py27 should be skipped
    """
    r = Recipes("""
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"
            requirements:
              build:
                - python
              run:
                - python
        """,
                from_string=True)
    r.write_recipes()
    env_matrix = {
        'CONDA_PY': [27, 35],
    }
    recipes = list(r.recipe_dirs.values())
    filtered = list(
        utils.filter_recipes(recipes, env_matrix, channels=['bioconda']))

    # one recipe, two targets
    assert len(filtered) == 1
    assert len(filtered[0][1]) == 2

    r = Recipes("""
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"
            build:
              skip: True # [py27]
            requirements:
              build:
                - python
              run:
                - python
        """,
                from_string=True)
    r.write_recipes()
    env_matrix = {
        'CONDA_PY': [27, 35],
    }
    recipes = list(r.recipe_dirs.values())
    filtered = list(
        utils.filter_recipes(recipes, env_matrix, channels=['bioconda']))

    # one recipe, one target
    assert len(filtered) == 1
    assert len(filtered[0][1]) == 1
Example #43
0
def test_built_package_path():
    r = Recipes("""
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"
            requirements:
              run:
                - python

        two:
          meta.yaml: |
            package:
              name: two
              version: "0.1"
            build:
              number: 0
              string: ncurses{{ CONDA_NCURSES }}_{{ PKG_BUILDNUM }}
        """,
                from_string=True)
    r.write_recipes()

    assert os.path.basename(
        utils.built_package_path(r.recipe_dirs['one'])
    ) == 'one-0.1-py{ver.major}{ver.minor}_0.tar.bz2'.format(
        ver=sys.version_info)

    # resetting with a different CONDA_PY passed as env dict
    assert os.path.basename(
        utils.built_package_path(
            r.recipe_dirs['one'],
            env=dict(CONDA_PY=27))) == 'one-0.1-py27_0.tar.bz2'

    # resetting CONDA_PY using os.environ
    existing_env = dict(os.environ)
    try:
        os.environ['CONDA_PY'] = '27'
        assert os.path.basename(utils.built_package_path(
            r.recipe_dirs['one'])) == 'one-0.1-py27_0.tar.bz2'
        os.environ = existing_env
    except:
        os.environ = existing_env
        raise
Example #44
0
def test_build_container_no_default_gcc(tmpdir):
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
            test:
              commands:
                - gcc --version
        """, from_string=True)
    r.write_recipes()

    # Tests with the repository's Dockerfile instead of already uploaded images.
    # Copy repository to image build directory so everything is in docker context.
    image_build_dir = os.path.join(tmpdir, "repo")
    src_repo_dir = os.path.join(os.path.dirname(__file__), "..")
    shutil.copytree(src_repo_dir, image_build_dir)
    # Dockerfile will be recreated by RecipeBuilder => extract template and delete file
    dockerfile = os.path.join(image_build_dir, "Dockerfile")
    with open(dockerfile) as f:
        dockerfile_template = f.read().replace("{", "{{").replace("}", "}}")
    os.remove(dockerfile)

    docker_builder = docker_utils.RecipeBuilder(
        dockerfile_template=dockerfile_template,
        use_host_conda_bld=True,
        image_build_dir=image_build_dir,
    )

    pkg_paths = utils.built_package_paths(r.recipe_dirs['one'])
    build_result = build.build(
        recipe=r.recipe_dirs['one'],
        pkg_paths=pkg_paths,
        docker_builder=docker_builder,
        mulled_test=False,
    )
    assert build_result.success

    for k, v in r.recipe_dirs.items():
        for i in utils.built_package_paths(v):
            assert os.path.exists(i)
            ensure_missing(i)
Example #45
0
def test_rendering_sandboxing(caplog):
    r = Recipes("""
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
            extra:
              var: {{ GITHUB_TOKEN }}
    """,
                from_string=True)

    r.write_recipes()
    env = {
        # First one is allowed, others are not
        'CONDA_ARBITRARY_VAR': 'conda-val-here',
        'TRAVIS_ARBITRARY_VAR': 'travis-val-here',
        'GITHUB_TOKEN': 'asdf',
        'BUILDKITE_TOKEN': 'asdf',
    }

    # recipe for "one" should fail because GITHUB_TOKEN is not a jinja var.
    res = build.build(recipe=r.recipe_dirs['one'],
                      recipe_folder='.',
                      env=env,
                      mulled_test=False)
    assert "Undefined Jinja2 variables remain (['GITHUB_TOKEN']).  Please enable source downloading and try again." in caplog.text

    r = Recipes("""
        two:
          meta.yaml: |
            package:
              name: two
              version: 0.1
            extra:
              var2: {{ CONDA_ARBITRARY_VAR }}

    """,
                from_string=True)
    r.write_recipes()
    pkg = utils.built_package_path(r.recipe_dirs['two'], env=env)
    ensure_missing(pkg)
    res = build.build(recipe=r.recipe_dirs['two'],
                      recipe_folder='.',
                      env=env,
                      mulled_test=False)

    t = tarfile.open(pkg)
    tmp = tempfile.mkdtemp()
    target = 'info/recipe/meta.yaml'
    t.extract(target, path=tmp)
    contents = yaml.load(open(os.path.join(tmp, target)).read())
    assert contents['extra']['var2'] == 'conda-val-here', contents
Example #46
0
def test_filter_recipes_skip_is_true():
    """

    """
    r = Recipes("""
        one:
          meta.yaml: |
            package:
              name: one
              version: "0.1"
            build:
              skip: true
        """,
                from_string=True)
    r.write_recipes()
    env_matrix = {}
    recipes = list(r.recipe_dirs.values())
    filtered = list(utils.filter_recipes(recipes, env_matrix))
    assert len(filtered) == 0
Example #47
0
def test_empty_build_section():
    r = Recipes(
        '''
        empty_build_section:
          meta.yaml: |
            package:
              name: empty_build_section
              version: "0.1"
            build:
        ''', from_string=True)
    r.write_recipes()
    # access to contents of possibly empty build section can happen in
    # `should_be_noarch` and `should_not_be_noarch`
    registry = [lint_functions.should_be_noarch, lint_functions.should_not_be_noarch]
    res = linting.lint(
        r.recipe_dirs.values(),
        linting.LintArgs(registry=registry),
        basedir=r.basedir
    )
    assert res is None
Example #48
0
def test_build_container_no_default_gcc(tmpdir):
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
            test:
              commands:
                - gcc --version
        """, from_string=True)
    r.write_recipes()

    # Tests with the repository's Dockerfile instead of already uploaded images.
    # Copy repository to image build directory so everything is in docker context.
    image_build_dir = os.path.join(tmpdir, "repo")
    src_repo_dir = os.path.join(os.path.dirname(__file__), "..")
    shutil.copytree(src_repo_dir, image_build_dir)
    # Dockerfile will be recreated by RecipeBuilder => extract template and delete file
    dockerfile = os.path.join(image_build_dir, "Dockerfile")
    with open(dockerfile) as f:
        dockerfile_template = f.read().replace("{", "{{").replace("}", "}}")
    os.remove(dockerfile)

    docker_builder = docker_utils.RecipeBuilder(
        dockerfile_template=dockerfile_template,
        use_host_conda_bld=True,
        image_build_dir=image_build_dir,
    )

    pkg_paths = utils.built_package_paths(r.recipe_dirs['one'])
    build_result = build.build(
        recipe=r.recipe_dirs['one'],
        recipe_folder='.',
        pkg_paths=pkg_paths,
        docker_builder=docker_builder,
        mulled_test=False,
    )
    assert build_result.success
Example #49
0
def test_conda_as_dep(config_fixture):
    r = Recipes(
        """
        one:
          meta.yaml: |
            package:
              name: one
              version: 0.1
            requirements:
              run:
                - conda
        """, from_string=True)
    r.write_recipes()
    build_result = build.build_recipes(
        r.basedir,
        config=config_fixture,
        packages="*",
        testonly=False,
        force=False,
        mulled_test=True,
    )
    assert build_result
Example #50
0
def single_upload():
    """
    Creates a randomly-named recipe and uploads it using a label so that it
    doesn't affect the main bioconda channel. Tests that depend on this fixture
    get a tuple of name, pakage, recipe dir. Cleans up when it's done.
    """
    name = 'upload-test-' + str(uuid.uuid4()).split('-')[0]
    r = Recipes(
        '''
        {0}:
          meta.yaml: |
            package:
              name: {0}
              version: "0.1"
        '''.format(name), from_string=True)
    r.write_recipes()
    r.pkgs = {}
    r.pkgs[name] = utils.built_package_paths(r.recipe_dirs[name])

    build.build(
        recipe=r.recipe_dirs[name],
        recipe_folder='.',
        pkg_paths=r.pkgs[name],
        docker_builder=None,
        mulled_test=False
    )
    pkg = r.pkgs[name][0]

    upload.anaconda_upload(pkg, label=TEST_LABEL)

    yield (name, pkg, r.recipe_dirs[name])

    sp.run(
        ['anaconda', '-t', os.environ.get('ANACONDA_TOKEN'), 'remove',
         'bioconda/{0}'.format(name), '--force'],
        stdout=sp.PIPE, stderr=sp.STDOUT, check=True,
        universal_newlines=True)
Example #51
0
def test_nested_recipes(config_fixture):
    """
    Test get_recipes ability to identify different nesting depths of recipes
    """
    r = Recipes(

        """
        shallow:
            meta.yaml: |
                package:
                    name: shallow
                    version: "0.1"
            build.sh: |
                #!/bin/bash
                echo "Shallow Created"
                pwd
        normal/normal:
            meta.yaml: |
                package:
                    name: normal
                    version: "0.1"
                build:
                    skip: true
                requirements:
                    build:
                        - python 3.6
            build.sh: |
                #!/bin/bash
                echo "Testing build.sh through python"
                python -h
        deep/deep/deep:
            meta.yaml: |
                package:
                    name: deep
                    version: "0.1"
                requirements:
                    build:
                        - python
                    run:
                        - python
            build.sh: |
                #!/bin/bash
                ## Empty script
        F/I/V/E/deep:
            meta.yaml: |
                package:
                    name: fivedeep
                    version: "0.1"
                requirements:
                    build:
                        - python 3.6
                    run:
                        - python 3.6
        """, from_string=True)
    r.write_recipes()

    build_results = build.build_recipes(
        r.basedir,
        config=config_fixture,
        packages="*",
        testonly=False,
        force=False,
        mulled_test=False,
    )
    assert build_results

    assert len(list(utils.get_recipes(r.basedir))) == 4

    for k, v in r.recipe_dirs.items():
        for i in utils.built_package_paths(v):
            assert os.path.exists(i)
            ensure_missing(i)