def test_env_sandboxing(): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 build.sh: | #!/bin/bash if [[ -z $GITHUB_TOKEN ]] then exit 0 else echo "\$GITHUB_TOKEN has leaked into the build environment!" exit 1 fi """, from_string=True) r.write_recipes() pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) with utils.temp_env({'GITHUB_TOKEN': 'token_here'}): build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False ) for pkg in pkg_paths: assert os.path.exists(pkg) ensure_missing(pkg)
def single_build(request, recipes_fixture): """ Builds the "one" recipe. """ if request.param: logger.error("Making recipe builder") docker_builder = docker_utils.RecipeBuilder( use_host_conda_bld=True, docker_base_image=DOCKER_BASE_IMAGE) mulled_test = True logger.error("DONE") else: docker_builder = None mulled_test = False logger.error("Fixture: Building 'one' %s", "within docker" if docker_builder else "locally") build.build( recipe=recipes_fixture.recipe_dirs['one'], pkg_paths=recipes_fixture.pkgs['one'], docker_builder=docker_builder, mulled_test=mulled_test, ) logger.error("Fixture: Building 'one' %s -- DONE", "within docker" if docker_builder else "locally") yield recipes_fixture.pkgs['one'] for pkg in recipes_fixture.pkgs['one']: ensure_missing(pkg)
def _build_pkg(recipe): r = Recipes(recipe, from_string=True) r.write_recipes() env_matrix = list(utils.EnvMatrix(tmp_env_matrix()))[0] recipe = r.recipe_dirs['one'] built_package = utils.built_package_path(recipe) ensure_missing(built_package) build.build(recipe=r.recipe_dirs['one'], recipe_folder='.', env=env_matrix) return built_package
def test_rendering_sandboxing(caplog): r = Recipes(""" one: meta.yaml: | package: name: one version: 0.1 extra: var: {{ GITHUB_TOKEN }} """, from_string=True) r.write_recipes() env = { # First one is allowed, others are not 'CONDA_ARBITRARY_VAR': 'conda-val-here', 'TRAVIS_ARBITRARY_VAR': 'travis-val-here', 'GITHUB_TOKEN': 'asdf', 'BUILDKITE_TOKEN': 'asdf', } # recipe for "one" should fail because GITHUB_TOKEN is not a jinja var. res = build.build(recipe=r.recipe_dirs['one'], recipe_folder='.', env=env, mulled_test=False) assert "Undefined Jinja2 variables remain (['GITHUB_TOKEN']). Please enable source downloading and try again." in caplog.text r = Recipes(""" two: meta.yaml: | package: name: two version: 0.1 extra: var2: {{ CONDA_ARBITRARY_VAR }} """, from_string=True) r.write_recipes() pkg = utils.built_package_path(r.recipe_dirs['two'], env=env) ensure_missing(pkg) res = build.build(recipe=r.recipe_dirs['two'], recipe_folder='.', env=env, mulled_test=False) t = tarfile.open(pkg) tmp = tempfile.mkdtemp() target = 'info/recipe/meta.yaml' t.extract(target, path=tmp) contents = yaml.load(open(os.path.join(tmp, target)).read()) assert contents['extra']['var2'] == 'conda-val-here', contents
def test_rendering_sandboxing(): r = Recipes(""" one: meta.yaml: | package: name: one version: 0.1 extra: var: {{ GITHUB_TOKEN }} """, from_string=True) r.write_recipes() env = { # None of these should be passed to the recipe 'CONDA_ARBITRARY_VAR': 'conda-val-here', 'TRAVIS_ARBITRARY_VAR': 'travis-val-here', 'GITHUB_TOKEN': 'asdf', 'BUILDKITE_TOKEN': 'asdf', } # If GITHUB_TOKEN is already set in the bash environment, then we get # a message on stdout+stderr (this is the case on travis-ci). # # However if GITHUB_TOKEN is not already set in the bash env (e.g., when # testing locally), then we get a SystemError. # # In both cases we're passing in the `env` dict, which does contain # GITHUB_TOKEN. if 'GITHUB_TOKEN' in os.environ: with pytest.raises(sp.CalledProcessError) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False, _raise_error=True, ) assert ("'GITHUB_TOKEN' is undefined" in str(excinfo.value.stdout)) else: # recipe for "one" should fail because GITHUB_TOKEN is not a jinja var. with pytest.raises(SystemExit) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False, ) assert "'GITHUB_TOKEN' is undefined" in str(excinfo.value)
def test_rendering_sandboxing(): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 extra: var: {{ GITHUB_TOKEN }} """, from_string=True) r.write_recipes() env = { # None of these should be passed to the recipe 'CONDA_ARBITRARY_VAR': 'conda-val-here', 'TRAVIS_ARBITRARY_VAR': 'travis-val-here', 'GITHUB_TOKEN': 'asdf', 'BUILDKITE_TOKEN': 'asdf', } # If GITHUB_TOKEN is already set in the bash environment, then we get # a message on stdout+stderr (this is the case on travis-ci). # # However if GITHUB_TOKEN is not already set in the bash env (e.g., when # testing locally), then we get a SystemError. # # In both cases we're passing in the `env` dict, which does contain # GITHUB_TOKEN. if 'GITHUB_TOKEN' in os.environ: with pytest.raises(sp.CalledProcessError) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False, _raise_error=True, ) assert ("'GITHUB_TOKEN' is undefined" in str(excinfo.value.stdout)) else: # recipe for "one" should fail because GITHUB_TOKEN is not a jinja var. with pytest.raises(SystemExit) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False, ) assert "'GITHUB_TOKEN' is undefined" in str(excinfo.value)
def _build_pkg(recipe, mulled_test=False): r = Recipes(recipe, from_string=True) r.write_recipes() recipe = r.recipe_dirs['one'] built_packages = utils.built_package_paths(recipe) for pkg in built_packages: ensure_missing(pkg) build.build( recipe=r.recipe_dirs['one'], pkg_paths=built_packages, mulled_test=mulled_test, ) return built_packages
def _build_pkg(recipe, mulled_test=False): r = Recipes(recipe, from_string=True) r.write_recipes() recipe = r.recipe_dirs['one'] built_packages = utils.built_package_paths(recipe) for pkg in built_packages: ensure_missing(pkg) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=built_packages, mulled_test=mulled_test, ) return built_packages
def test_build_empty_extra_container(): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 extra: container: # empty """, from_string=True) r.write_recipes() pkgs = utils.built_package_paths(r.recipe_dirs['one']) build_result = build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkgs, mulled_test=True, ) assert build_result.success for pkg in pkgs: assert os.path.exists(pkg) ensure_missing(pkg)
def single_build(request, recipes_fixture): """ Builds the "one" recipe. """ env_matrix = list(utils.EnvMatrix(tmp_env_matrix()))[0] if request.param: docker_builder = docker_utils.RecipeBuilder(use_host_conda_bld=True) else: docker_builder = None build.build( recipe=recipes_fixture.recipe_dirs['one'], recipe_folder='.', docker_builder=docker_builder, env=env_matrix, ) built_package = recipes_fixture.pkgs['one'] yield built_package ensure_missing(built_package)
def _build_pkg(): r = Recipes(dedent(""" one: meta.yaml: | package: name: one version: 0.1 test: commands: - "ls -la" """), from_string=True) r.write_recipes() env_matrix = list(utils.EnvMatrix(tmp_env_matrix()))[0] recipe = r.recipe_dirs['one'] built_package = utils.built_package_path(recipe) ensure_missing(built_package) build.build(recipe=r.recipe_dirs['one'], recipe_folder='.', env=env_matrix) return built_package
def single_upload(): """ Creates a randomly-named recipe and uploads it using a label so that it doesn't affect the main bioconda channel. Tests that depend on this fixture get a tuple of name, pakage, recipe dir. Cleans up when it's done. """ name = 'upload-test-' + str(uuid.uuid4()).split('-')[0] r = Recipes(''' {0}: meta.yaml: | package: name: {0} version: "0.1" '''.format(name), from_string=True) r.write_recipes() env_matrix = list(utils.EnvMatrix(tmp_env_matrix()))[0] build.build( recipe=r.recipe_dirs[name], recipe_folder='.', docker_builder=None, mulled_test=False, env=env_matrix, ) pkg = utils.built_package_path(r.recipe_dirs[name]) with utils.temp_env( dict(TRAVIS_BRANCH='master', TRAVIS_PULL_REQUEST='false')): upload.anaconda_upload(pkg, label=TEST_LABEL) yield (name, pkg, r.recipe_dirs[name]) p = sp.run([ 'anaconda', '-t', os.environ.get('ANACONDA_TOKEN'), 'remove', 'bioconda/{0}'.format(name), '--force' ], stdout=sp.PIPE, stderr=sp.STDOUT, check=True, universal_newlines=True)
def single_build(request, recipes_fixture): """ Builds the "one" recipe. """ if request.param: docker_builder = docker_utils.RecipeBuilder(use_host_conda_bld=True, docker_base_image="bioconda-utils-build-env:latest") mulled_test = True else: docker_builder = None mulled_test = False build.build( recipe=recipes_fixture.recipe_dirs['one'], recipe_folder='.', pkg_paths=recipes_fixture.pkgs['one'], docker_builder=docker_builder, mulled_test=mulled_test, ) yield recipes_fixture.pkgs['one'] for pkg in recipes_fixture.pkgs['one']: ensure_missing(pkg)
def single_build(request, recipes_fixture): """ Builds the "one" recipe. """ if request.param: docker_builder = docker_utils.RecipeBuilder(use_host_conda_bld=True) mulled_test = True else: docker_builder = None mulled_test = False build.build( recipe=recipes_fixture.recipe_dirs['one'], recipe_folder='.', pkg_paths=recipes_fixture.pkgs['one'], docker_builder=docker_builder, mulled_test=mulled_test, ) yield recipes_fixture.pkgs['one'] for pkg in recipes_fixture.pkgs['one']: ensure_missing(pkg)
def single_upload(): """ Creates a randomly-named recipe and uploads it using a label so that it doesn't affect the main bioconda channel. Tests that depend on this fixture get a tuple of name, pakage, recipe dir. Cleans up when it's done. """ name = 'upload-test-' + str(uuid.uuid4()).split('-')[0] r = Recipes(''' {0}: meta.yaml: | package: name: {0} version: "0.1" '''.format(name), from_string=True) r.write_recipes() r.pkgs = {} r.pkgs[name] = utils.built_package_paths(r.recipe_dirs[name]) build.build(recipe=r.recipe_dirs[name], recipe_folder='.', pkg_paths=r.pkgs[name], docker_builder=None, mulled_test=False) pkg = r.pkgs[name][0] upload.anaconda_upload(pkg, label=TEST_LABEL) yield (name, pkg, r.recipe_dirs[name]) sp.run([ 'anaconda', '-t', os.environ.get('ANACONDA_TOKEN'), 'remove', 'bioconda/{0}'.format(name), '--force' ], stdout=sp.PIPE, stderr=sp.STDOUT, check=True, universal_newlines=True)
def single_upload(): """ Creates a randomly-named recipe and uploads it using a label so that it doesn't affect the main bioconda channel. Tests that depend on this fixture get a tuple of name, pakage, recipe dir. Cleans up when it's done. """ name = 'upload-test-' + str(uuid.uuid4()).split('-')[0] r = Recipes( ''' {0}: meta.yaml: | package: name: {0} version: "0.1" '''.format(name), from_string=True) r.write_recipes() r.pkgs = {} r.pkgs[name] = utils.built_package_paths(r.recipe_dirs[name]) build.build( recipe=r.recipe_dirs[name], recipe_folder='.', pkg_paths=r.pkgs[name], docker_builder=None, mulled_test=False ) pkg = r.pkgs[name][0] upload.anaconda_upload(pkg, label=TEST_LABEL) yield (name, pkg, r.recipe_dirs[name]) sp.run( ['anaconda', '-t', os.environ.get('ANACONDA_TOKEN'), 'remove', 'bioconda/{0}'.format(name), '--force'], stdout=sp.PIPE, stderr=sp.STDOUT, check=True, universal_newlines=True)
def test_build_container_no_default_gcc(tmpdir): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 test: commands: - gcc --version """, from_string=True) r.write_recipes() # Tests with the repository's Dockerfile instead of already uploaded images. # Copy repository to image build directory so everything is in docker context. image_build_dir = os.path.join(tmpdir, "repo") src_repo_dir = os.path.join(os.path.dirname(__file__), "..") shutil.copytree(src_repo_dir, image_build_dir) # Dockerfile will be recreated by RecipeBuilder => extract template and delete file dockerfile = os.path.join(image_build_dir, "Dockerfile") with open(dockerfile) as f: dockerfile_template = f.read().replace("{", "{{").replace("}", "}}") os.remove(dockerfile) docker_builder = docker_utils.RecipeBuilder( dockerfile_template=dockerfile_template, use_host_conda_bld=True, image_build_dir=image_build_dir, ) pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build_result = build.build( recipe=r.recipe_dirs['one'], pkg_paths=pkg_paths, docker_builder=docker_builder, mulled_test=False, ) assert build_result.success for k, v in r.recipe_dirs.items(): for i in utils.built_package_paths(v): assert os.path.exists(i) ensure_missing(i)
def test_build_container_no_default_gcc(tmpdir): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 test: commands: - gcc --version """, from_string=True) r.write_recipes() # Tests with the repository's Dockerfile instead of already uploaded images. # Copy repository to image build directory so everything is in docker context. image_build_dir = os.path.join(tmpdir, "repo") src_repo_dir = os.path.join(os.path.dirname(__file__), "..") shutil.copytree(src_repo_dir, image_build_dir) # Dockerfile will be recreated by RecipeBuilder => extract template and delete file dockerfile = os.path.join(image_build_dir, "Dockerfile") with open(dockerfile) as f: dockerfile_template = f.read().replace("{", "{{").replace("}", "}}") os.remove(dockerfile) docker_builder = docker_utils.RecipeBuilder( dockerfile_template=dockerfile_template, use_host_conda_bld=True, image_build_dir=image_build_dir, ) pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build_result = build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, docker_builder=docker_builder, mulled_test=False, ) assert build_result.success
def test_build_container_default_gcc(tmpdir): r = Recipes(""" one: meta.yaml: | package: name: one version: 0.1 test: commands: - gcc --version - 'gcc --version | grep "gcc (GCC) 4.8.2 20140120 (Red Hat 4.8.2-15)"' """, from_string=True) r.write_recipes() # Tests with the repository's Dockerfile instead of already uploaded images. # Copy repository to image build directory so everything is in docker context. image_build_dir = os.path.join(tmpdir, "repo") src_repo_dir = os.path.join(os.path.dirname(__file__), "..") shutil.copytree(src_repo_dir, image_build_dir) # Dockerfile will be recreated by RecipeBuilder => extract template and delete file dockerfile = os.path.join(image_build_dir, "Dockerfile") with open(dockerfile) as f: dockerfile_template = f.read().replace("{", "{{").replace("}", "}}") os.remove(dockerfile) docker_builder = docker_utils.RecipeBuilder( dockerfile_template=dockerfile_template, use_host_conda_bld=True, image_build_dir=image_build_dir, ) build_result = build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', env={}, docker_builder=docker_builder, mulled_test=False, ) assert build_result.success
def test_rendering_sandboxing(): r = Recipes( """ one: meta.yaml: | package: name: one version: 0.1 extra: var: {{ GITHUB_TOKEN }} """, from_string=True) r.write_recipes() env = { # First one is allowed, others are not 'CONDA_ARBITRARY_VAR': 'conda-val-here', 'TRAVIS_ARBITRARY_VAR': 'travis-val-here', 'GITHUB_TOKEN': 'asdf', 'BUILDKITE_TOKEN': 'asdf', } # If GITHUB_TOKEN is already set in the bash environment, then we get # a message on stdout+stderr (this is the case on travis-ci). # # However if GITHUB_TOKEN is not already set in the bash env (e.g., when # testing locally), then we get a SystemError. # # In both cases we're passing in the `env` dict, which does contain # GITHUB_TOKEN. if 'GITHUB_TOKEN' in os.environ: with pytest.raises(sp.CalledProcessError) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False, _raise_error=True, ) assert ("'GITHUB_TOKEN' is undefined" in str(excinfo.value.stdout)) else: # recipe for "one" should fail because GITHUB_TOKEN is not a jinja var. with pytest.raises(SystemExit) as excinfo: pkg_paths = utils.built_package_paths(r.recipe_dirs['one']) build.build( recipe=r.recipe_dirs['one'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False, ) assert "'GITHUB_TOKEN' is undefined" in str(excinfo.value) r = Recipes( """ two: meta.yaml: | package: name: two version: 0.1 extra: var2: {{ CONDA_ARBITRARY_VAR }} """, from_string=True) r.write_recipes() with utils.temp_env(env): pkg_paths = utils.built_package_paths(r.recipe_dirs['two']) for pkg in pkg_paths: ensure_missing(pkg) build.build( recipe=r.recipe_dirs['two'], recipe_folder='.', pkg_paths=pkg_paths, mulled_test=False, ) for pkg in pkg_paths: t = tarfile.open(pkg) tmp = tempfile.mkdtemp() target = 'info/recipe/meta.yaml' t.extract(target, path=tmp) contents = yaml.load(open(os.path.join(tmp, target)).read()) assert contents['extra']['var2'] == 'conda-val-here', contents