def test_rendering_sandboxing(): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 extra: var: {{ GITHUB_TOKEN }} """, from_string=True) r.write_recipes() env = { # None of these should be passed to the recipe 'CONDA_ARBITRARY_VAR': 'conda-val-here', 'TRAVIS_ARBITRARY_VAR': 'travis-val-here', 'GITHUB_TOKEN': 'asdf', 'BUILDKITE_TOKEN': 'asdf', } # If GITHUB_TOKEN is already set in the bash environment, then we get # a message on stdout+stderr (this is the case on travis-ci). # # However if GITHUB_TOKEN is not already set in the bash env (e.g., when # testing locally), then we get a SystemError. # # In both cases we're passing in the `env` dict, which does contain # GITHUB_TOKEN. if 'GITHUB_TOKEN' in os.environ: with pytest.raises(sp.CalledProcessError) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False, _raise_error=True, ) assert ("'GITHUB_TOKEN' is undefined" in str(excinfo.value.stdout)) else: # recipe for "one" should fail because GITHUB_TOKEN is not a jinja var. with pytest.raises(SystemExit) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False, ) assert "'GITHUB_TOKEN' is undefined" in str(excinfo.value)
def test_rendering_sandboxing(): r = Recipes(""" one: meta.yaml: | package: name: one version: 0.1 extra: var: {{ GITHUB_TOKEN }} """, from_string=True) r.write_recipes() env = { # None of these should be passed to the recipe 'CONDA_ARBITRARY_VAR': 'conda-val-here', 'TRAVIS_ARBITRARY_VAR': 'travis-val-here', 'GITHUB_TOKEN': 'asdf', 'BUILDKITE_TOKEN': 'asdf', } # If GITHUB_TOKEN is already set in the bash environment, then we get # a message on stdout+stderr (this is the case on travis-ci). # # However if GITHUB_TOKEN is not already set in the bash env (e.g., when # testing locally), then we get a SystemError. # # In both cases we're passing in the `env` dict, which does contain # GITHUB_TOKEN. if 'GITHUB_TOKEN' in os.environ: with pytest.raises(sp.CalledProcessError) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build.build( recipe=r.recipe_dirs['one'], pkg_paths=pkg_paths, mulled_test=False, raise_error=True, ) assert ("'GITHUB_TOKEN' is undefined" in str(excinfo.value.stdout)) else: # recipe for "one" should fail because GITHUB_TOKEN is not a jinja var. with pytest.raises(SystemExit) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build.build( recipe=r.recipe_dirs['one'], pkg_paths=pkg_paths, mulled_test=False, ) assert "'GITHUB_TOKEN' is undefined" in str(excinfo.value)
def test_env_sandboxing(): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 build.sh: | #!/bin/bash if [[ -z $GITHUB_TOKEN ]] then exit 0 else echo "\$GITHUB_TOKEN has leaked into the build environment!" exit 1 fi """, from_string=True) r.write_recipes() pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) with utils.temp_env({'GITHUB_TOKEN': 'token_here'}): build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False ) for pkg in pkg_paths: assert os.path.exists(pkg) ensure_missing(pkg)
def test_built_package_paths(): r = Recipes( """ one: meta.yaml: | package: name: one version: "0.1" requirements: build: - python 3.6 run: - python 3.6 two: meta.yaml: | package: name: two version: "0.1" build: number: 0 string: ncurses{{ CONDA_NCURSES }}_{{ PKG_BUILDNUM }} """, from_string=True) r.write_recipes() assert os.path.basename( utils.built_package_paths(r.recipe_dirs['one'])[0] ) == 'one-0.1-py36_0.tar.bz2'
def test_cb3_outputs(config_fixture): r = Recipes( """ one: meta.yaml: | package: name: one version: "0.1" outputs: - name: libone - name: py-one requirements: - {{ pin_subpackage('libone', exact=True) }} - python {{ python }} """, from_string=True) r.write_recipes() r.recipe_dirs['one'] build_result = build.build_recipes( r.basedir, config=config_fixture, packages="*", testonly=False, force=False, mulled_test=False, ) assert build_result for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def test_conda_forge_pins(caplog): caplog.set_level(logging.DEBUG) r = Recipes(""" one: meta.yaml: | package: name: one version: 0.1 requirements: run: - zlib {{ zlib }} """, from_string=True) r.write_recipes() build_result = build.build_recipes( r.basedir, config={}, packages="*", testonly=False, force=False, mulled_test=False, ) assert build_result for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def test_built_package_paths(): r = Recipes( """ one: meta.yaml: | package: name: one version: "0.1" requirements: build: - python 3.6 run: - python 3.6 two: meta.yaml: | package: name: two version: "0.1" build: number: 0 string: ncurses{{ CONDA_NCURSES }}_{{ PKG_BUILDNUM }} """, from_string=True) r.write_recipes() # Newer conda-build versions add the channel_targets and target_platform to the hash platform = 'linux' if sys.platform == 'linux' else 'osx' d = {"channel_targets": "bioconda main", "target_platform": "{}-64".format(platform)} h = metadata._hash_dependencies(d, 7) assert os.path.basename( utils.built_package_paths(r.recipe_dirs['one'])[0] ) == 'one-0.1-py36{}_0.tar.bz2'.format(h)
def test_build_empty_extra_container(): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 extra: container: # empty """, from_string=True) r.write_recipes() pkgs = utils.built_package_paths(r.recipe_dirs['one']) build_result = build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkgs, mulled_test=True, ) assert build_result.success for pkg in pkgs: assert os.path.exists(pkg) ensure_missing(pkg)
def test_bioconda_pins(caplog, config_fixture): """ htslib currently only provided by bioconda pinnings """ caplog.set_level(logging.DEBUG) r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 requirements: run: - htslib """, from_string=True) r.write_recipes() build_result = build.build_recipes( r.basedir, config=config_fixture, packages="*", testonly=False, force=False, mulled_test=False, ) assert build_result for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def test_compiler(config_fixture): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 requirements: build: - {{ compiler('c') }} host: - python run: - python """, from_string=True) r.write_recipes() build_result = build.build_recipes( r.basedir, config=config_fixture, packages="*", testonly=False, force=False, mulled_test=False, ) assert build_result for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def test_conda_forge_pins(caplog, config_fixture): caplog.set_level(logging.DEBUG) r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 requirements: run: - zlib {{ zlib }} """, from_string=True) r.write_recipes() build_result = build.build_recipes( r.basedir, config=config_fixture, packages="*", testonly=False, force=False, mulled_test=False, ) assert build_result for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): print(os.listdir(os.path.dirname(i))) assert os.path.exists(i) ensure_missing(i)
def test_build_container_no_default_gcc(tmpdir): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 test: commands: - gcc --version """, from_string=True) r.write_recipes() # Tests with the repository's Dockerfile instead of already uploaded images. # Copy repository to image build directory so everything is in docker context. image_build_dir = os.path.join(tmpdir, "repo") src_repo_dir = os.path.join(os.path.dirname(__file__), "..") shutil.copytree(src_repo_dir, image_build_dir) # Dockerfile will be recreated by RecipeBuilder => extract template and delete file dockerfile = os.path.join(image_build_dir, "Dockerfile") with open(dockerfile) as f: dockerfile_template = f.read().replace("{", "{{").replace("}", "}}") os.remove(dockerfile) docker_builder = docker_utils.RecipeBuilder( dockerfile_template=dockerfile_template, use_host_conda_bld=True, image_build_dir=image_build_dir, ) pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build_result = build.build( recipe=r.recipe_dirs['one'], pkg_paths=pkg_paths, docker_builder=docker_builder, mulled_test=False, ) assert build_result.success for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def test_skip_dependencies(config_fixture): r = Recipes( """ one: meta.yaml: | package: name: skip_dependencies_one version: 0.1 two: meta.yaml: | package: name: skip_dependencies_two version: 0.1 requirements: build: - skip_dependencies_one - nonexistent three: meta.yaml: | package: name: skip_dependencies_three version: 0.1 requirements: build: - skip_dependencies_one run: - skip_dependencies_two """, from_string=True) r.write_recipes() pkgs = {} for k, v in r.recipe_dirs.items(): pkgs[k] = utils.built_package_paths(v) for _pkgs in pkgs.values(): for pkg in _pkgs: ensure_missing(pkg) build.build_recipes( r.basedir, config=config_fixture, packages="*", testonly=False, force=False, mulled_test=False, ) for pkg in pkgs['one']: assert os.path.exists(pkg) for pkg in pkgs['two']: assert not os.path.exists(pkg) for pkg in pkgs['three']: assert not os.path.exists(pkg) # clean up for _pkgs in pkgs.values(): for pkg in _pkgs: ensure_missing(pkg)
def _build_pkg(recipe, mulled_test=False): r = Recipes(recipe, from_string=True) r.write_recipes() recipe = r.recipe_dirs['one'] built_packages = utils.built_package_paths(recipe) for pkg in built_packages: ensure_missing(pkg) build.build( recipe=r.recipe_dirs['one'], pkg_paths=built_packages, mulled_test=mulled_test, ) return built_packages
def recipes_fixture(): """ Writes example recipes (based on test_case.yaml), figures out the package paths and attaches them to the Recipes instance, and cleans up afterward. """ r = Recipes('test_case.yaml') r.write_recipes() r.pkgs = {} for k, v in r.recipe_dirs.items(): r.pkgs[k] = utils.built_package_paths(v) yield r for pkgs in r.pkgs.values(): for pkg in pkgs: ensure_missing(pkg)
def _build_pkg(recipe, mulled_test=False): r = Recipes(recipe, from_string=True) r.write_recipes() recipe = r.recipe_dirs['one'] built_packages = utils.built_package_paths(recipe) for pkg in built_packages: ensure_missing(pkg) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=built_packages, mulled_test=mulled_test, ) return built_packages
def test_conda_build_sysroot(config_fixture): """ Test if CONDA_BUILD_SYSROOT is empty/unset and correctly set after compiler activation. """ # conda-build >=3.18.0 sets CONDA_BUILD_SYSROOT to a hard-coded default path. # We clear its value in our bioconda_utils-conda_build_config.yaml. # With CONDA_BUILD_SYSROOT being empty, the activation script of clang_osx-64 # can set it to a valid path. r = Recipes(""" sysroot_var_is_unset_or_empty_without_c_compiler: meta.yaml: | package: name: sysroot_var_is_unset_or_empty_without_c_compiler version: 0.1 build: script: '[ -z "${CONDA_BUILD_SYSROOT:-}" ]' sysroot_is_existing_directory_with_c_compiler: meta.yaml: | package: name: sysroot_is_existing_directory_with_c_compiler version: 0.1 build: script: 'test -d "${CONDA_BUILD_SYSROOT}"' requirements: build: - {{ compiler('c') }} """, from_string=True) r.write_recipes() build_result = build.build_recipes(r.basedir, config_fixture, r.recipe_dirnames, testonly=False, force=False, mulled_test=False) assert build_result for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def test_build_container_no_default_gcc(tmpdir): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 test: commands: - gcc --version """, from_string=True) r.write_recipes() # Tests with the repository's Dockerfile instead of already uploaded images. # Copy repository to image build directory so everything is in docker context. image_build_dir = os.path.join(tmpdir, "repo") src_repo_dir = os.path.join(os.path.dirname(__file__), "..") shutil.copytree(src_repo_dir, image_build_dir) # Dockerfile will be recreated by RecipeBuilder => extract template and delete file dockerfile = os.path.join(image_build_dir, "Dockerfile") with open(dockerfile) as f: dockerfile_template = f.read().replace("{", "{{").replace("}", "}}") os.remove(dockerfile) docker_builder = docker_utils.RecipeBuilder( dockerfile_template=dockerfile_template, use_host_conda_bld=True, image_build_dir=image_build_dir, ) pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build_result = build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, docker_builder=docker_builder, mulled_test=False, ) assert build_result.success
def test_conda_as_dep(config_fixture, mulled_test): docker_builder = None if mulled_test: docker_builder = docker_utils.RecipeBuilder( use_host_conda_bld=True, docker_base_image=DOCKER_BASE_IMAGE, ) r = Recipes(""" one: meta.yaml: | package: name: bioconda_utils_test_conda_as_dep version: 0.1 requirements: host: - conda run: - conda test: commands: - test -e "${PREFIX}/bin/conda" """, from_string=True) r.write_recipes() build_result = build.build_recipes( r.basedir, config_fixture, r.recipe_dirnames, testonly=False, force=False, docker_builder=docker_builder, mulled_test=mulled_test, ) assert build_result for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def single_upload(): """ Creates a randomly-named recipe and uploads it using a label so that it doesn't affect the main bioconda channel. Tests that depend on this fixture get a tuple of name, pakage, recipe dir. Cleans up when it's done. """ name = 'upload-test-' + str(uuid.uuid4()).split('-')[0] r = Recipes(''' {0}: meta.yaml: | package: name: {0} version: "0.1" '''.format(name), from_string=True) r.write_recipes() r.pkgs = {} r.pkgs[name] = utils.built_package_paths(r.recipe_dirs[name]) build.build(recipe=r.recipe_dirs[name], recipe_folder='.', pkg_paths=r.pkgs[name], docker_builder=None, mulled_test=False) pkg = r.pkgs[name][0] upload.anaconda_upload(pkg, label=TEST_LABEL) yield (name, pkg, r.recipe_dirs[name]) sp.run([ 'anaconda', '-t', os.environ.get('ANACONDA_TOKEN'), 'remove', 'bioconda/{0}'.format(name), '--force' ], stdout=sp.PIPE, stderr=sp.STDOUT, check=True, universal_newlines=True)
def single_upload(): """ Creates a randomly-named recipe and uploads it using a label so that it doesn't affect the main bioconda channel. Tests that depend on this fixture get a tuple of name, pakage, recipe dir. Cleans up when it's done. """ name = 'upload-test-' + str(uuid.uuid4()).split('-')[0] r = Recipes( ''' {0}: meta.yaml: | package: name: {0} version: "0.1" '''.format(name), from_string=True) r.write_recipes() r.pkgs = {} r.pkgs[name] = utils.built_package_paths(r.recipe_dirs[name]) build.build( recipe=r.recipe_dirs[name], recipe_folder='.', pkg_paths=r.pkgs[name], docker_builder=None, mulled_test=False ) pkg = r.pkgs[name][0] upload.anaconda_upload(pkg, label=TEST_LABEL) yield (name, pkg, r.recipe_dirs[name]) sp.run( ['anaconda', '-t', os.environ.get('ANACONDA_TOKEN'), 'remove', 'bioconda/{0}'.format(name), '--force'], stdout=sp.PIPE, stderr=sp.STDOUT, check=True, universal_newlines=True)
def test_nested_recipes(config_fixture): """ Test get_recipes ability to identify different nesting depths of recipes """ r = Recipes( """ shallow: meta.yaml: | package: name: shallow version: "0.1" build.sh: | #!/bin/bash echo "Shallow Created" pwd normal/normal: meta.yaml: | package: name: normal version: "0.1" build: skip: true requirements: build: - python 3.6 build.sh: | #!/bin/bash echo "Testing build.sh through python" python -h deep/deep/deep: meta.yaml: | package: name: deep version: "0.1" requirements: build: - python run: - python build.sh: | #!/bin/bash ## Empty script F/I/V/E/deep: meta.yaml: | package: name: fivedeep version: "0.1" requirements: build: - python 3.6 run: - python 3.6 """, from_string=True) r.write_recipes() build_results = build.build_recipes( r.basedir, config=config_fixture, packages="*", testonly=False, force=False, mulled_test=False, ) assert build_results assert len(list(utils.get_recipes(r.basedir))) == 4 for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def test_rendering_sandboxing(): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 extra: var: {{ GITHUB_TOKEN }} """, from_string=True) r.write_recipes() env = { # First one is allowed, others are not 'CONDA_ARBITRARY_VAR': 'conda-val-here', 'TRAVIS_ARBITRARY_VAR': 'travis-val-here', 'GITHUB_TOKEN': 'asdf', 'BUILDKITE_TOKEN': 'asdf', } # If GITHUB_TOKEN is already set in the bash environment, then we get # a message on stdout+stderr (this is the case on travis-ci). # # However if GITHUB_TOKEN is not already set in the bash env (e.g., when # testing locally), then we get a SystemError. # # In both cases we're passing in the `env` dict, which does contain # GITHUB_TOKEN. if 'GITHUB_TOKEN' in os.environ: with pytest.raises(sp.CalledProcessError) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False, _raise_error=True, ) assert ("'GITHUB_TOKEN' is undefined" in str(excinfo.value.stdout)) else: # recipe for "one" should fail because GITHUB_TOKEN is not a jinja var. with pytest.raises(SystemExit) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False, ) assert "'GITHUB_TOKEN' is undefined" in str(excinfo.value) r = Recipes( """ two: meta.yaml: | package: name: two version: 0.1 extra: var2: {{ CONDA_ARBITRARY_VAR }} """, from_string=True) r.write_recipes() with utils.temp_env(env): pkg_paths = utils.built_package_paths(r.recipe_dirs['two']) for pkg in pkg_paths: ensure_missing(pkg) build.build( recipe=r.recipe_dirs['two'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False, ) for pkg in pkg_paths: t = tarfile.open(pkg) tmp = tempfile.mkdtemp() target = 'info/recipe/meta.yaml' t.extract(target, path=tmp) contents = yaml.load(open(os.path.join(tmp, target)).read()) assert contents['extra']['var2'] == 'conda-val-here', contents