def test_bioconda_pins(caplog, config_fixture): """ htslib currently only provided by bioconda pinnings """ caplog.set_level(logging.DEBUG) r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 requirements: run: - htslib """, from_string=True) r.write_recipes() build_result = build.build_recipes( r.basedir, config=config_fixture, packages="*", testonly=False, force=False, mulled_test=False, ) assert build_result for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def test_cb3_outputs(config_fixture): r = Recipes( """ one: meta.yaml: | package: name: one version: "0.1" outputs: - name: libone - name: py-one requirements: - {{ pin_subpackage('libone', exact=True) }} - python {{ python }} """, from_string=True) r.write_recipes() r.recipe_dirs['one'] build_result = build.build_recipes( r.basedir, config=config_fixture, packages="*", testonly=False, force=False, mulled_test=False, ) assert build_result for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def test_build_empty_extra_container(): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 extra: container: # empty """, from_string=True) r.write_recipes() pkgs = utils.built_package_paths(r.recipe_dirs['one']) build_result = build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkgs, mulled_test=True, ) assert build_result.success for pkg in pkgs: assert os.path.exists(pkg) ensure_missing(pkg)
def test_conda_forge_pins(caplog, config_fixture): caplog.set_level(logging.DEBUG) r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 requirements: run: - zlib {{ zlib }} """, from_string=True) r.write_recipes() build_result = build.build_recipes( r.basedir, config=config_fixture, packages="*", testonly=False, force=False, mulled_test=False, ) assert build_result for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): print(os.listdir(os.path.dirname(i))) assert os.path.exists(i) ensure_missing(i)
def test_env_sandboxing(): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 build.sh: | #!/bin/bash if [[ -z $GITHUB_TOKEN ]] then exit 0 else echo "\$GITHUB_TOKEN has leaked into the build environment!" exit 1 fi """, from_string=True) r.write_recipes() pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) with utils.temp_env({'GITHUB_TOKEN': 'token_here'}): build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False ) for pkg in pkg_paths: assert os.path.exists(pkg) ensure_missing(pkg)
def test_compiler(config_fixture): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 requirements: build: - {{ compiler('c') }} host: - python run: - python """, from_string=True) r.write_recipes() build_result = build.build_recipes( r.basedir, config=config_fixture, packages="*", testonly=False, force=False, mulled_test=False, ) assert build_result for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def test_skip_dependencies(config_fixture): r = Recipes( """ one: meta.yaml: | package: name: skip_dependencies_one version: 0.1 two: meta.yaml: | package: name: skip_dependencies_two version: 0.1 requirements: build: - skip_dependencies_one - nonexistent three: meta.yaml: | package: name: skip_dependencies_three version: 0.1 requirements: build: - skip_dependencies_one run: - skip_dependencies_two """, from_string=True) r.write_recipes() pkgs = {} for k, v in r.recipe_dirs.items(): pkgs[k] = utils.built_package_paths(v) for _pkgs in pkgs.values(): for pkg in _pkgs: ensure_missing(pkg) build.build_recipes( r.basedir, config=config_fixture, packages="*", testonly=False, force=False, mulled_test=False, ) for pkg in pkgs['one']: assert os.path.exists(pkg) for pkg in pkgs['two']: assert not os.path.exists(pkg) for pkg in pkgs['three']: assert not os.path.exists(pkg) # clean up for _pkgs in pkgs.values(): for pkg in _pkgs: ensure_missing(pkg)
def recipes_fixture(): """ Writes example recipes (based on test_case.yaml), figures out the package paths and attaches them to the Recipes instance, and cleans up afterward. """ r = Recipes('test_case.yaml') r.write_recipes() r.pkgs = {} for k, v in r.recipe_dirs.items(): r.pkgs[k] = utils.built_package_paths(v) yield r for pkgs in r.pkgs.values(): for pkg in pkgs: ensure_missing(pkg)
def recipes_fixture(): """ Writes example recipes (based on test_case.yaml), figures out the package paths and attaches them to the Recipes instance, and cleans up afterward. """ rcp = Recipes('test_case.yaml') rcp.write_recipes() rcp.pkgs = {} for key, val in rcp.recipe_dirs.items(): rcp.pkgs[key] = utils.built_package_paths(val) yield rcp for pkgs in rcp.pkgs.values(): for pkg in pkgs: ensure_missing(pkg)
def _build_pkg(recipe, mulled_test=False): r = Recipes(recipe, from_string=True) r.write_recipes() recipe = r.recipe_dirs['one'] built_packages = utils.built_package_paths(recipe) for pkg in built_packages: ensure_missing(pkg) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=built_packages, mulled_test=mulled_test, ) return built_packages
def multi_build(request, recipes_fixture): """ Builds the "one", "two", and "three" recipes. """ if request.param: docker_builder = docker_utils.RecipeBuilder(use_host_conda_bld=True) else: docker_builder = None build.build_recipes( recipe_folder=recipes_fixture.basedir, docker_builder=docker_builder, config={}, ) built_packages = recipes_fixture.pkgs yield built_packages for v in built_packages.values(): ensure_missing(v)
def single_build(request, recipes_fixture): """ Builds the "one" recipe. """ env_matrix = list(utils.EnvMatrix(tmp_env_matrix()))[0] if request.param: docker_builder = docker_utils.RecipeBuilder(use_host_conda_bld=True) else: docker_builder = None build.build( recipe=recipes_fixture.recipe_dirs['one'], recipe_folder='.', docker_builder=docker_builder, env=env_matrix, ) built_package = recipes_fixture.pkgs['one'] yield built_package ensure_missing(built_package)
def test_build_container_no_default_gcc(tmpdir): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 test: commands: - gcc --version """, from_string=True) r.write_recipes() # Tests with the repository's Dockerfile instead of already uploaded images. # Copy repository to image build directory so everything is in docker context. image_build_dir = os.path.join(tmpdir, "repo") src_repo_dir = os.path.join(os.path.dirname(__file__), "..") shutil.copytree(src_repo_dir, image_build_dir) # Dockerfile will be recreated by RecipeBuilder => extract template and delete file dockerfile = os.path.join(image_build_dir, "Dockerfile") with open(dockerfile) as f: dockerfile_template = f.read().replace("{", "{{").replace("}", "}}") os.remove(dockerfile) docker_builder = docker_utils.RecipeBuilder( dockerfile_template=dockerfile_template, use_host_conda_bld=True, image_build_dir=image_build_dir, ) pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build_result = build.build( recipe=r.recipe_dirs['one'], pkg_paths=pkg_paths, docker_builder=docker_builder, mulled_test=False, ) assert build_result.success for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def _build_pkg(): r = Recipes(dedent(""" one: meta.yaml: | package: name: one version: 0.1 test: commands: - "ls -la" """), from_string=True) r.write_recipes() env_matrix = list(utils.EnvMatrix(tmp_env_matrix()))[0] recipe = r.recipe_dirs['one'] built_package = utils.built_package_path(recipe) ensure_missing(built_package) build.build(recipe=r.recipe_dirs['one'], recipe_folder='.', env=env_matrix) return built_package
def single_build(request, recipes_fixture): """ Builds the "one" recipe. """ if request.param: docker_builder = docker_utils.RecipeBuilder(use_host_conda_bld=True) mulled_test = True else: docker_builder = None mulled_test = False build.build( recipe=recipes_fixture.recipe_dirs['one'], recipe_folder='.', pkg_paths=recipes_fixture.pkgs['one'], docker_builder=docker_builder, mulled_test=mulled_test, ) yield recipes_fixture.pkgs['one'] for pkg in recipes_fixture.pkgs['one']: ensure_missing(pkg)
def single_build(request, recipes_fixture): """ Builds the "one" recipe. """ if request.param: docker_builder = docker_utils.RecipeBuilder(use_host_conda_bld=True, docker_base_image="bioconda-utils-build-env:latest") mulled_test = True else: docker_builder = None mulled_test = False build.build( recipe=recipes_fixture.recipe_dirs['one'], recipe_folder='.', pkg_paths=recipes_fixture.pkgs['one'], docker_builder=docker_builder, mulled_test=mulled_test, ) yield recipes_fixture.pkgs['one'] for pkg in recipes_fixture.pkgs['one']: ensure_missing(pkg)
def test_conda_build_sysroot(config_fixture): """ Test if CONDA_BUILD_SYSROOT is empty/unset and correctly set after compiler activation. """ # conda-build >=3.18.0 sets CONDA_BUILD_SYSROOT to a hard-coded default path. # We clear its value in our bioconda_utils-conda_build_config.yaml. # With CONDA_BUILD_SYSROOT being empty, the activation script of clang_osx-64 # can set it to a valid path. r = Recipes(""" sysroot_var_is_unset_or_empty_without_c_compiler: meta.yaml: | package: name: sysroot_var_is_unset_or_empty_without_c_compiler version: 0.1 build: script: '[ -z "${CONDA_BUILD_SYSROOT:-}" ]' sysroot_is_existing_directory_with_c_compiler: meta.yaml: | package: name: sysroot_is_existing_directory_with_c_compiler version: 0.1 build: script: 'test -d "${CONDA_BUILD_SYSROOT}"' requirements: build: - {{ compiler('c') }} """, from_string=True) r.write_recipes() build_result = build.build_recipes(r.basedir, config_fixture, r.recipe_dirnames, testonly=False, force=False, mulled_test=False) assert build_result for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def multi_build(request, recipes_fixture, config_fixture): """ Builds the "one", "two", and "three" recipes. """ if request.param: docker_builder = docker_utils.RecipeBuilder(use_host_conda_bld=True) mulled_test = True else: docker_builder = None mulled_test = False build.build_recipes( recipe_folder=recipes_fixture.basedir, docker_builder=docker_builder, config=config_fixture, mulled_test=mulled_test, ) built_packages = recipes_fixture.pkgs yield built_packages for pkgs in built_packages.values(): for pkg in pkgs: ensure_missing(pkg)
def multi_build(request, recipes_fixture, config_fixture): """ Builds the "one", "two", and "three" recipes. """ if request.param: docker_builder = docker_utils.RecipeBuilder(use_host_conda_bld=True, docker_base_image="bioconda-utils-build-env:latest") mulled_test = True else: docker_builder = None mulled_test = False build.build_recipes( recipe_folder=recipes_fixture.basedir, docker_builder=docker_builder, config=config_fixture, mulled_test=mulled_test, ) built_packages = recipes_fixture.pkgs yield built_packages for pkgs in built_packages.values(): for pkg in pkgs: ensure_missing(pkg)
def test_conda_as_dep(config_fixture, mulled_test): docker_builder = None if mulled_test: docker_builder = docker_utils.RecipeBuilder( use_host_conda_bld=True, docker_base_image=DOCKER_BASE_IMAGE, ) r = Recipes(""" one: meta.yaml: | package: name: bioconda_utils_test_conda_as_dep version: 0.1 requirements: host: - conda run: - conda test: commands: - test -e "${PREFIX}/bin/conda" """, from_string=True) r.write_recipes() build_result = build.build_recipes( r.basedir, config_fixture, r.recipe_dirnames, testonly=False, force=False, docker_builder=docker_builder, mulled_test=mulled_test, ) assert build_result for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def test_build_empty_extra_container(): r = Recipes(""" one: meta.yaml: | package: name: one version: 0.1 extra: container: # empty """, from_string=True) r.write_recipes() pkgs = utils.built_package_paths(r.recipe_dirs['one']) build_result = build.build( recipe=r.recipe_dirs['one'], pkg_paths=pkgs, mulled_test=True, ) assert build_result.success for pkg in pkgs: assert os.path.exists(pkg) ensure_missing(pkg)
def test_nested_recipes(config_fixture): """ Test get_recipes ability to identify different nesting depths of recipes """ r = Recipes( """ shallow: meta.yaml: | package: name: shallow version: "0.1" build.sh: | #!/bin/bash echo "Shallow Created" pwd normal/normal: meta.yaml: | package: name: normal version: "0.1" build: skip: true requirements: build: - python 3.6 build.sh: | #!/bin/bash echo "Testing build.sh through python" python -h deep/deep/deep: meta.yaml: | package: name: deep version: "0.1" requirements: build: - python run: - python build.sh: | #!/bin/bash ## Empty script F/I/V/E/deep: meta.yaml: | package: name: fivedeep version: "0.1" requirements: build: - python 3.6 run: - python 3.6 """, from_string=True) r.write_recipes() build_results = build.build_recipes( r.basedir, config=config_fixture, packages="*", testonly=False, force=False, mulled_test=False, ) assert build_results assert len(list(utils.get_recipes(r.basedir))) == 4 for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def test_rendering_sandboxing(): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 extra: var: {{ GITHUB_TOKEN }} """, from_string=True) r.write_recipes() env = { # First one is allowed, others are not 'CONDA_ARBITRARY_VAR': 'conda-val-here', 'TRAVIS_ARBITRARY_VAR': 'travis-val-here', 'GITHUB_TOKEN': 'asdf', 'BUILDKITE_TOKEN': 'asdf', } # If GITHUB_TOKEN is already set in the bash environment, then we get # a message on stdout+stderr (this is the case on travis-ci). # # However if GITHUB_TOKEN is not already set in the bash env (e.g., when # testing locally), then we get a SystemError. # # In both cases we're passing in the `env` dict, which does contain # GITHUB_TOKEN. if 'GITHUB_TOKEN' in os.environ: with pytest.raises(sp.CalledProcessError) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False, _raise_error=True, ) assert ("'GITHUB_TOKEN' is undefined" in str(excinfo.value.stdout)) else: # recipe for "one" should fail because GITHUB_TOKEN is not a jinja var. with pytest.raises(SystemExit) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False, ) assert "'GITHUB_TOKEN' is undefined" in str(excinfo.value) r = Recipes( """ two: meta.yaml: | package: name: two version: 0.1 extra: var2: {{ CONDA_ARBITRARY_VAR }} """, from_string=True) r.write_recipes() with utils.temp_env(env): pkg_paths = utils.built_package_paths(r.recipe_dirs['two']) for pkg in pkg_paths: ensure_missing(pkg) build.build( recipe=r.recipe_dirs['two'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False, ) for pkg in pkg_paths: t = tarfile.open(pkg) tmp = tempfile.mkdtemp() target = 'info/recipe/meta.yaml' t.extract(target, path=tmp) contents = yaml.load(open(os.path.join(tmp, target)).read()) assert contents['extra']['var2'] == 'conda-val-here', contents