Ejemplo n.º 1
0
def test_token_upload(testing_workdir):
    # generated with conda_test_account user, command:
    #    anaconda auth --create --name CONDA_BUILD_UPLOAD_TEST --scopes 'api repos conda'
    args = AnacondaClientArgs(specs="conda_test_account/empty_sections",
                              token="co-79de533f-926f-4e5e-a766-d393e33ae98f",
                              force=True)

    # clean up - we don't actually want this package to exist yet
    remove.main(args)

    with pytest.raises(NotFound):
        show.main(args)

    # the folder with the test recipe to upload
    api.build(empty_sections, token=args.token)

    # make sure that the package is available (should raise if it doesn't)
    show.main(args)

    # clean up - we don't actually want this package to exist
    remove.main(args)

    # verify cleanup:
    with pytest.raises(NotFound):
        show.main(args)
Ejemplo n.º 2
0
def test_noarch_foo_value():
    recipe = os.path.join(metadata_dir, "noarch_foo")
    fn = api.get_output_file_path(recipe)
    api.build(recipe)
    metadata = json.loads(package_has_file(fn, 'info/index.json').decode())
    assert 'noarch' in metadata
    assert metadata['noarch'] == "foo"
Ejemplo n.º 3
0
def test_recursive_fail(testing_workdir, testing_config):
    with pytest.raises(RuntimeError) as exc:
        api.build(os.path.join(fail_dir, "recursive-build"), config=testing_config)
    # indentation critical here.  If you indent this, and the exception is not raised, then
    #     the exc variable here isn't really completely created and shows really strange errors:
    #     AttributeError: 'ExceptionInfo' object has no attribute 'typename'
    assert "recursive-build2" in str(exc.value)
Ejemplo n.º 4
0
def test_info_files_json(test_config):
    recipe = os.path.join(metadata_dir, "ignore_some_prefix_files")
    fn = api.get_output_file_path(recipe, config=test_config)
    api.build(recipe, config=test_config)
    assert package_has_file(fn, "info/paths.json")
    with tarfile.open(fn) as tf:
        data = json.loads(tf.extractfile('info/paths.json').read().decode('utf-8'))
    fields = ["_path", "sha256", "size_in_bytes", "path_type", "file_mode", "no_link",
              "prefix_placeholder", "inode_paths"]
    for key in data.keys():
        assert key in ['paths', 'paths_version']
    for paths in data.get('paths'):
        for field in paths.keys():
            assert field in fields
    assert len(data.get('paths')) == 2
    for file in data.get('paths'):
        for key in file.keys():
            assert key in fields
        short_path = file.get("_path")
        if short_path == "test.sh" or short_path == "test.bat":
            assert file.get("prefix_placeholder") is not None
            assert file.get("file_mode") is not None
        else:
            assert file.get("prefix_placeholder") is None
            assert file.get("file_mode") is None
Ejemplo n.º 5
0
def test_skip_existing_url(testing_metadata, testing_workdir, capfd):
    # make sure that it is built
    outputs = api.build(testing_metadata)

    # Copy our package into some new folder
    output_dir = os.path.join(testing_workdir, 'someoutput')
    platform = os.path.join(output_dir, testing_metadata.config.host_subdir)
    os.makedirs(platform)
    copy_into(outputs[0], os.path.join(platform, os.path.basename(outputs[0])))

    # create the index so conda can find the file
    api.update_index(platform, config=testing_metadata.config)

    # HACK: manually create noarch location there, so that conda 4.3.2+ considers a valid channel
    noarch = os.path.join(output_dir, 'noarch')
    os.makedirs(noarch)
    api.update_index(noarch, config=testing_metadata.config)

    testing_metadata.config.skip_existing = True
    testing_metadata.config.channel_urls = [url_path(output_dir)]

    api.build(testing_metadata)

    output, error = capfd.readouterr()
    assert "are already built" in output
    assert url_path(testing_metadata.config.croot) in output
Ejemplo n.º 6
0
def test_fix_permissions(test_config):
    recipe = os.path.join(metadata_dir, "fix_permissions")
    fn = api.get_output_file_path(recipe, config=test_config)
    api.build(recipe, config=test_config)
    tf = tarfile.open(fn)
    for f in tf.getmembers():
        assert f.mode & 0o444 == 0o444, "tar member '{}' has invalid (read) mode".format(f.name)
def test_recipe_build(recipe, testing_config, testing_workdir, monkeypatch):
    # These variables are defined solely for testing purposes,
    # so they can be checked within build scripts
    testing_config.activate = True
    monkeypatch.setenv("CONDA_TEST_VAR", "conda_test")
    monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2")
    api.build(recipe, config=testing_config)
Ejemplo n.º 8
0
def test_recipe_test(testing_workdir, test_config):
    # temporarily necessary because we have custom rebuilt svn for longer prefix here
    test_config.channel_urls = ('conda_build_test', )

    recipe = os.path.join(metadata_dir, 'has_prefix_files')
    api.build(recipe, config=test_config, notest=True)
    api.test(recipe, config=test_config)
Ejemplo n.º 9
0
def test_inspect_prefix_length(testing_workdir, capfd):
    from conda_build import api

    # build our own known-length package here
    test_base = os.path.expanduser("~/cbtmp")
    config = api.Config(croot=test_base, anaconda_upload=False, verbose=True)
    recipe_path = os.path.join(metadata_dir, "has_prefix_files")
    fn = api.get_output_file_path(recipe_path, config=config)
    if os.path.isfile(fn):
        os.remove(fn)
    config.prefix_length = 80
    api.build(recipe_path, config=config)

    args = ["prefix-lengths", fn]
    with pytest.raises(SystemExit):
        main_inspect.execute(args)
        output, error = capfd.readouterr()
        assert "Packages with binary prefixes shorter than" in output
        assert fn in output

    config.prefix_length = 255
    api.build(recipe_path, config=config)
    main_inspect.execute(args)
    output, error = capfd.readouterr()
    assert "No packages found with binary prefixes shorter" in output
Ejemplo n.º 10
0
def test_token_upload(testing_workdir, testing_metadata):
    folder_uuid = uuid.uuid4().hex
    # generated with conda_test_account user, command:
    #    anaconda auth --create --name CONDA_BUILD_UPLOAD_TEST --scopes 'api repos conda'
    args = AnacondaClientArgs(specs="conda_build_test/test_token_upload_" + folder_uuid,
                              token="co-143399b8-276e-48db-b43f-4a3de839a024",
                              force=True)

    with pytest.raises(NotFound):
        show.main(args)

    testing_metadata.meta['package']['name'] = '_'.join([testing_metadata.name(), folder_uuid])
    testing_metadata.config.token = args.token

    # the folder with the test recipe to upload
    api.build(testing_metadata)

    # make sure that the package is available (should raise if it doesn't)
    show.main(args)

    # clean up - we don't actually want this package to exist
    remove.main(args)

    # verify cleanup:
    with pytest.raises(NotFound):
        show.main(args)
Ejemplo n.º 11
0
def test_pin_subpackage_exact(testing_config):
    recipe = os.path.join(metadata_dir, '_pin_subpackage_exact')
    ms = api.render(recipe, config=testing_config)
    assert any(re.match(r'run_exports_subpkg 1.0 h[a-f0-9]{%s}_0' % testing_config.hash_length,
                        req)
              for (m, _, _) in ms for req in m.meta['requirements']['run'])
    api.build(recipe, config=testing_config)
Ejemplo n.º 12
0
def test_token_upload(testing_workdir):
    folder_uuid = uuid.uuid4().hex
    # generated with conda_test_account user, command:
    #    anaconda auth --create --name CONDA_BUILD_UPLOAD_TEST --scopes 'api repos conda'
    args = AnacondaClientArgs(specs="conda_test_account/empty_sections_" + folder_uuid,
                              token="co-79de533f-926f-4e5e-a766-d393e33ae98f",
                              force=True)

    with pytest.raises(NotFound):
        show.main(args)

    metadata, _, _ = api.render(empty_sections)
    metadata.meta['package']['name'] = '_'.join([metadata.name(), folder_uuid])
    metadata.config.token = args.token

    # the folder with the test recipe to upload
    api.build(metadata)

    # make sure that the package is available (should raise if it doesn't)
    show.main(args)

    # clean up - we don't actually want this package to exist
    remove.main(args)

    # verify cleanup:
    with pytest.raises(NotFound):
        show.main(args)
Ejemplo n.º 13
0
def test_noarch_python():
    recipe = os.path.join(metadata_dir, "_noarch_python")
    fn = api.get_output_file_path(recipe)
    api.build(recipe)
    assert package_has_file(fn, 'info/files') is not ''
    noarch = json.loads(package_has_file(fn, 'info/noarch.json').decode())
    assert 'entry_points' in noarch
    assert 'type' in noarch
Ejemplo n.º 14
0
def test_dirty_variable_available_in_build_scripts(testing_workdir, test_config):
    recipe = os.path.join(metadata_dir, "_dirty_skip_section")
    test_config.dirty = True
    api.build(recipe, config=test_config)

    with pytest.raises(SystemExit):
        test_config.dirty = False
        api.build(recipe, config=test_config)
Ejemplo n.º 15
0
def test_package_with_jinja2_does_not_redownload_source(testing_workdir, test_config):
    recipe = os.path.join(metadata_dir, 'jinja2_build_str')
    api.build(recipe, config=test_config, notest=True)
    output_file = api.get_output_file_path(recipe, config=test_config)
    # this recipe uses jinja2, which should trigger source download, except that source download
    #    will have already happened in the build stage.
    # https://github.com/conda/conda-build/issues/1451
    api.test(output_file, config=test_config)
Ejemplo n.º 16
0
def test_env_creation_fail_exits_build(testing_config):
    recipe = os.path.join(metadata_dir, '_post_link_exits_after_retry')
    with pytest.raises((RuntimeError, LinkError, CondaError)):
        api.build(recipe, config=testing_config)

    recipe = os.path.join(metadata_dir, '_post_link_exits_tests')
    with pytest.raises((RuntimeError, LinkError, CondaError)):
        api.build(recipe, config=testing_config)
Ejemplo n.º 17
0
def test_per_output_tests(testing_config, capfd):
    recipe_dir = os.path.join(subpackage_dir, '_per_output_tests')
    api.build(recipe_dir, config=testing_config)
    out, err = capfd.readouterr()
    # windows echoes commands, so we see the result and the command
    count = 2 if utils.on_win else 1
    assert out.count("output-level test") == count, out
    assert out.count("top-level test") == count, out
Ejemplo n.º 18
0
def test_append_python_app_osx(testing_config):
    """Recipes that use osx_is_app need to have python.app in their runtime requirements.

    conda-build will add it if it's missing."""
    recipe = os.path.join(metadata_dir, '_osx_is_app_missing_python_app')
    # tests will fail here if python.app is not added to the run reqs by conda-build, because
    #    without it, pythonw will be missing.
    api.build(recipe, config=testing_config)
Ejemplo n.º 19
0
def test_dirty_variable_available_in_build_scripts(testing_workdir, testing_config):
    recipe = os.path.join(metadata_dir, "_dirty_skip_section")
    testing_config.dirty = True
    api.build(recipe, config=testing_config)

    with pytest.raises(subprocess.CalledProcessError):
        testing_config.dirty = False
        api.build(recipe, config=testing_config)
Ejemplo n.º 20
0
def test_copy_read_only_file_with_xattr(testing_config, testing_workdir):
    src_recipe = os.path.join(metadata_dir, '_xattr_copy')
    recipe = os.path.join(testing_workdir, '_xattr_copy')
    copy_into(src_recipe, recipe)
    # file is r/w for owner, but we change it to 400 after setting the attribute
    ro_file = os.path.join(recipe, 'mode_400_file')
    subprocess.check_call('setfattr -n user.attrib -v somevalue {}'.format(ro_file), shell=True)
    subprocess.check_call('chmod 400 {}'.format(ro_file), shell=True)
    api.build(recipe, config=testing_config)
Ejemplo n.º 21
0
def test_ignore_run_exports(testing_metadata, testing_config):
    # need to clear conda's index, or else we somehow pick up the test_run_exports folder
    #     above for our package here.
    api.build(os.path.join(metadata_dir, '_run_exports'), config=testing_config)
    testing_metadata.meta['requirements']['build'] = ['test_has_run_exports']
    testing_metadata.meta['build']['ignore_run_exports'] = ['downstream_pinned_package']
    testing_metadata.config.index = None
    m = finalize_metadata(testing_metadata)
    assert 'downstream_pinned_package 1.0' not in m.meta['requirements']['run']
Ejemplo n.º 22
0
def test_no_include_recipe_meta_yaml(testing_metadata, testing_config):
    # first, make sure that the recipe is there by default.  This test copied from above, but copied
    # as a sanity check here.
    outputs = api.build(testing_metadata)
    assert package_has_file(outputs[0], "info/recipe/meta.yaml")

    output_file = api.build(os.path.join(metadata_dir, '_no_include_recipe'),
                            config=testing_config)[0]
    assert not package_has_file(output_file, "info/recipe/meta.yaml")
Ejemplo n.º 23
0
def test_pypi_pin_numpy(testing_workdir, testing_config):
    # The package used here must have a numpy dependence for pin-numpy to have
    # any effect.
    api.skeletonize(packages='msumastro', repo='pypi', version='0.9.0', config=testing_config,
                    pin_numpy=True)
    with open(os.path.join('msumastro', 'meta.yaml')) as f:
        assert f.read().count('numpy x.x') == 2
    with pytest.raises(DependencyNeedsBuildingError):
        api.build('msumastro')
Ejemplo n.º 24
0
def test_remove_workdir_default(testing_config, caplog):
    recipe = os.path.join(metadata_dir, '_keep_work_dir')
    # make a metadata object - otherwise the build folder is computed within the build, but does
    #    not alter the config object that is passed in.  This is by design - we always make copies
    #    of the config object rather than edit it in place, so that variants don't clobber one
    #    another
    metadata = api.render(recipe, config=testing_config)[0][0]
    api.build(metadata)
    assert not glob(os.path.join(metadata.config.work_dir, '*'))
Ejemplo n.º 25
0
def execute(args):
    parser, args = parse_args(args)
    config = Config(**args.__dict__)
    build.check_external()

    # change globals in build module, see comment there as well
    channel_urls = args.channel or ()
    config.channel_urls = []

    for url in channel_urls:
        # allow people to specify relative or absolute paths to local channels
        #    These channels still must follow conda rules - they must have the
        #    appropriate platform-specific subdir (e.g. win-64)
        if os.path.isdir(url):
            if not os.path.isabs(url):
                url = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), url)))
            url = url_path(url)
        config.channel_urls.append(url)

    config.override_channels = args.override_channels
    config.verbose = not args.quiet or args.debug

    if 'purge' in args.recipe:
        build.clean_build(config)
        return

    if 'purge-all' in args.recipe:
        build.clean_build(config)
        config.clean_pkgs()
        return

    set_language_env_vars(args, parser, config=config, execute=execute)

    action = None
    if args.output:
        action = output_action
        logging.basicConfig(level=logging.ERROR)
        config.verbose = False
        config.quiet = True
    elif args.test:
        action = test_action
    elif args.source:
        action = source_action
    elif args.check:
        action = check_action

    if action:
        for recipe in args.recipe:
            action(recipe, config)

    else:
        api.build(args.recipe, post=args.post, build_only=args.build_only,
                   notest=args.notest, keep_old_work=args.keep_old_work,
                   already_built=None, config=config, noverify=args.no_verify)

    if not args.output and len(build.get_build_folders(config.croot)) > 0:
        build.print_build_intermediate_warning(config)
Ejemplo n.º 26
0
def test_run_exports_in_subpackage(testing_metadata):
    p1 = testing_metadata.copy()
    p1.meta['outputs'] = [{'name': 'has_run_exports', 'run_exports': 'bzip2 1.0'}]
    api.build(p1, config=testing_metadata.config)[0]
    # api.update_index(os.path.dirname(output), config=testing_metadata.config)
    p2 = testing_metadata.copy()
    p2.meta['requirements']['host'] = ['has_run_exports']
    p2_final = finalize_metadata(p2)
    assert 'bzip2 1.0.*' in p2_final.meta['requirements']['run']
Ejemplo n.º 27
0
def execute(args):
    parser, args = parse_args(args)
    config = Config(**args.__dict__)
    build.check_external()

    # change globals in build module, see comment there as well
    config.channel_urls = args.channel or ()
    config.override_channels = args.override_channels
    config.verbose = not args.quiet or args.debug

    if 'purge' in args.recipe:
        build.clean_build(config)
        return

    if 'purge-all' in args.recipe:
        build.clean_build(config)
        config.clean_pkgs()
        return

    if on_win:
        delete_trash(None)

    set_language_env_vars(args, parser, config=config, execute=execute)

    action = None
    if args.output:
        action = output_action
        logging.basicConfig(level=logging.ERROR)
        config.verbose = False
        config.quiet = True
    elif args.test:
        action = test_action
    elif args.source:
        action = source_action
    elif args.check:
        action = check_action

    if action:
        for recipe in args.recipe:
            recipe_dir, need_cleanup = get_recipe_abspath(recipe)

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            # this fully renders any jinja templating, throwing an error if any data is missing
            m, _, _ = render_recipe(recipe_dir, no_download_source=False, config=config)
            action(m, config)

            if need_cleanup:
                rm_rf(recipe_dir)
    else:
        api.build(args.recipe, post=args.post, build_only=args.build_only,
                   notest=args.notest, keep_old_work=args.keep_old_work,
                   already_built=None, config=config)

    if not args.output and len(build.get_build_folders(config.croot)) > 0:
        build.print_build_intermediate_warning(config)
Ejemplo n.º 28
0
def test_rm_rf_does_not_remove_relative_source_package_files(testing_config, monkeypatch):
    recipe_dir = os.path.join(subpackage_dir, '_rm_rf_stays_within_prefix')
    monkeypatch.chdir(recipe_dir)
    bin_file_that_disappears = os.path.join('bin', 'lsfm')
    if not os.path.isfile(bin_file_that_disappears):
        with open(bin_file_that_disappears, 'w') as f:
            f.write('weee')
    assert os.path.isfile(bin_file_that_disappears)
    api.build('conda', config=testing_config)
    assert os.path.isfile(bin_file_that_disappears)
Ejemplo n.º 29
0
def test_disable_pip(testing_config, testing_metadata):
    testing_metadata.disable_pip = True
    testing_metadata.meta['build']['script'] = 'python -c "import pip; print(pip.__version__)"'
    with pytest.raises(subprocess.CalledProcessError):
        api.build(testing_metadata)

    testing_metadata.meta['build']['script'] = ('python -c "import setuptools; '
                                                'print(setuptools.__version__)"')
    with pytest.raises(subprocess.CalledProcessError):
        api.build(testing_metadata)
Ejemplo n.º 30
0
def test_purge_all(test_metadata):
    """
    purge-all clears out build folders as well as build packages in the osx-64 folders and such
    """
    api.build(test_metadata)
    fn = api.get_output_file_path(test_metadata)
    args = ['purge-all', '--croot', test_metadata.config.croot]
    main_build.execute(args)
    assert not get_build_folders(test_metadata.config.croot)
    assert not os.path.isfile(fn)
Ejemplo n.º 31
0
def test_render_setup_py_old_funcname(testing_workdir, testing_config, caplog):
    api.build(os.path.join(metadata_dir, "_source_setuptools"),
              config=testing_config)
    assert "Deprecation notice: the load_setuptools function has been renamed to " in caplog.text
Ejemplo n.º 32
0
def test_no_locking(testing_config):
    recipe = os.path.join(metadata_dir, 'source_git_jinja2')
    api.update_index(os.path.join(testing_config.croot, testing_config.subdir),
                     config=testing_config)
    api.build(recipe, config=testing_config, locking=False)
Ejemplo n.º 33
0
def test_build_with_activate_does_activate():
    api.build(os.path.join(metadata_dir, '_set_env_var_activate_build'),
              activate=True,
              anaconda_upload=False)
Ejemplo n.º 34
0
def test_cmake_generator(platform, target_compiler, testing_workdir,
                         testing_config):
    testing_config.variant['python'] = target_compiler
    api.build(os.path.join(metadata_dir, '_cmake_generator'),
              config=testing_config)
Ejemplo n.º 35
0
def test_only_r_env_vars_defined(testing_config):
    recipe = os.path.join(metadata_dir, '_r_env_defined')
    testing_config.channel_urls = ('r', )
    api.build(recipe, config=testing_config)
Ejemplo n.º 36
0
def test_unknown_selectors(testing_config):
    recipe = os.path.join(metadata_dir, 'unknown_selector')
    api.build(recipe, config=testing_config)
Ejemplo n.º 37
0
def test_noarch_python_with_tests(testing_config):
    recipe = os.path.join(metadata_dir, "_noarch_python_with_tests")
    api.build(recipe, config=testing_config)
Ejemplo n.º 38
0
def test_noarch_foo_value(testing_config):
    outputs = api.build(os.path.join(metadata_dir, "noarch_generic"),
                        config=testing_config)
    metadata = json.loads(
        package_has_file(outputs[0], 'info/index.json').decode())
    assert metadata['noarch'] == "generic"
Ejemplo n.º 39
0
def test_noarch_none_value(testing_workdir, testing_config):
    recipe = os.path.join(metadata_dir, "_noarch_none")
    with pytest.raises(exceptions.CondaBuildException):
        api.build(recipe, config=testing_config)
Ejemplo n.º 40
0
def test_rpath_linux(testing_config):
    api.build(os.path.join(metadata_dir, "_rpath"), config=testing_config)
Ejemplo n.º 41
0
def test_relative_git_url_submodule_clone(testing_workdir, testing_config,
                                          monkeypatch):
    """
    A multi-part test encompassing the following checks:

    1. That git submodules identified with both relative and absolute URLs can be mirrored
       and cloned.

    2. That changes pushed to the original repository are updated in the mirror and finally
       reflected in the package version and filename via `GIT_DESCRIBE_TAG`.

    3. That `source.py` is using `check_call_env` and `check_output_env` and that those
       functions are using tools from the build env.
    """

    toplevel = os.path.join(testing_workdir, 'toplevel')
    os.mkdir(toplevel)
    relative_sub = os.path.join(testing_workdir, 'relative_sub')
    os.mkdir(relative_sub)
    absolute_sub = os.path.join(testing_workdir, 'absolute_sub')
    os.mkdir(absolute_sub)

    sys_git_env = os.environ.copy()
    sys_git_env['GIT_AUTHOR_NAME'] = 'conda-build'
    sys_git_env['GIT_AUTHOR_EMAIL'] = '*****@*****.**'
    sys_git_env['GIT_COMMITTER_NAME'] = 'conda-build'
    sys_git_env['GIT_COMMITTER_EMAIL'] = '*****@*****.**'

    # Find the git executable before putting our dummy one on PATH.
    git = find_executable('git')

    # Put the broken git on os.environ["PATH"]
    exename = dummy_executable(testing_workdir, 'git')
    monkeypatch.setenv("PATH", testing_workdir, prepend=os.pathsep)
    # .. and ensure it gets run (and fails).
    FNULL = open(os.devnull, 'w')
    # Strangely ..
    #   stderr=FNULL suppresses the output from echo on OS X whereas
    #   stdout=FNULL suppresses the output from echo on Windows
    with pytest.raises(subprocess.CalledProcessError,
                       message="Dummy git was not executed"):
        check_call_env([exename, '--version'], stdout=FNULL, stderr=FNULL)
    FNULL.close()

    for tag in range(2):
        os.chdir(absolute_sub)
        if tag == 0:
            check_call_env([git, 'init'], env=sys_git_env)
        with open('absolute', 'w') as f:
            f.write(str(tag))
        check_call_env([git, 'add', 'absolute'], env=sys_git_env)
        check_call_env([git, 'commit', '-m', 'absolute{}'.format(tag)],
                       env=sys_git_env)

        os.chdir(relative_sub)
        if tag == 0:
            check_call_env([git, 'init'], env=sys_git_env)
        with open('relative', 'w') as f:
            f.write(str(tag))
        check_call_env([git, 'add', 'relative'], env=sys_git_env)
        check_call_env([git, 'commit', '-m', 'relative{}'.format(tag)],
                       env=sys_git_env)

        os.chdir(toplevel)
        if tag == 0:
            check_call_env([git, 'init'], env=sys_git_env)
        with open('toplevel', 'w') as f:
            f.write(str(tag))
        check_call_env([git, 'add', 'toplevel'], env=sys_git_env)
        check_call_env([git, 'commit', '-m', 'toplevel{}'.format(tag)],
                       env=sys_git_env)
        if tag == 0:
            check_call_env([
                git, 'submodule', 'add',
                convert_path_for_cygwin_or_msys2(git, absolute_sub), 'absolute'
            ],
                           env=sys_git_env)
            check_call_env(
                [git, 'submodule', 'add', '../relative_sub', 'relative'],
                env=sys_git_env)
        else:
            # Once we use a more recent Git for Windows than 2.6.4 on Windows or m2-git we
            # can change this to `git submodule update --recursive`.
            check_call_env([git, 'submodule', 'foreach', git, 'pull'],
                           env=sys_git_env)
        check_call_env(
            [git, 'commit', '-am', 'added submodules@{}'.format(tag)],
            env=sys_git_env)
        check_call_env(
            [git, 'tag', '-a',
             str(tag), '-m', 'tag {}'.format(tag)],
            env=sys_git_env)

        # It is possible to use `Git for Windows` here too, though you *must* not use a different
        # (type of) git than the one used above to add the absolute submodule, because .gitmodules
        # stores the absolute path and that is not interchangeable between MSYS2 and native Win32.
        #
        # Also, git is set to False here because it needs to be rebuilt with the longer prefix. As
        # things stand, my _b_env folder for this test contains more than 80 characters.
        requirements = ('requirements',
                        OrderedDict([('build', [
                            'git            # [False]',
                            'm2-git         # [win]', 'm2-filesystem  # [win]'
                        ])]))

        recipe_dir = os.path.join(testing_workdir, 'recipe')
        if not os.path.exists(recipe_dir):
            os.makedirs(recipe_dir)
        filename = os.path.join(testing_workdir, 'recipe', 'meta.yaml')
        data = OrderedDict([
            ('package',
             OrderedDict([('name', 'relative_submodules'),
                          ('version', '{{ GIT_DESCRIBE_TAG }}')])),
            ('source',
             OrderedDict([('git_url', toplevel),
                          ('git_tag', str(tag))])), requirements,
            ('build',
             OrderedDict([('script', [
                 'git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%%s > '
                 '%PREFIX%\\summaries.txt  # [win]',
                 'git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%s > '
                 '$PREFIX/summaries.txt   # [not win]'
             ])])),
            ('test',
             OrderedDict([('commands', [
                 'echo absolute{}relative{} > %PREFIX%\\expected_summaries.txt       # [win]'
                 .format(tag, tag),
                 'fc.exe /W %PREFIX%\\expected_summaries.txt %PREFIX%\\summaries.txt # [win]',
                 'echo absolute{}relative{} > $PREFIX/expected_summaries.txt         # [not win]'
                 .format(tag, tag),
                 'diff -wuN ${PREFIX}/expected_summaries.txt ${PREFIX}/summaries.txt # [not win]'
             ])]))
        ])

        with open(filename, 'w') as outfile:
            outfile.write(
                yaml.dump(data, default_flow_style=False, width=999999999))
        # Reset the path because our broken, dummy `git` would cause `render_recipe`
        # to fail, while no `git` will cause the build_dependencies to be installed.
        monkeypatch.undo()
        # This will (after one spin round the loop) install and run 'git' with the
        # build env prepended to os.environ[]
        metadata = api.render(testing_workdir, config=testing_config)[0][0]
        output = api.get_output_file_path(metadata, config=testing_config)[0]
        assert ("relative_submodules-{}-".format(tag) in output)
        api.build(metadata, config=testing_config)
Ejemplo n.º 42
0
def test_build_metadata_object(testing_metadata):
    api.build(testing_metadata)
Ejemplo n.º 43
0
def test_skip_existing(testing_workdir, testing_config, capfd):
    # build the recipe first
    api.build(empty_sections, config=testing_config)
    api.build(empty_sections, config=testing_config, skip_existing=True)
    output, error = capfd.readouterr()
    assert "are already built" in output
Ejemplo n.º 44
0
def test_pin_depends(testing_metadata):
    """This is deprecated functionality - replaced by the more general variants pinning scheme"""
    testing_metadata.meta['build']['pin_depends'] = 'record'
    api.build(testing_metadata)
Ejemplo n.º 45
0
def test_croot_with_spaces(testing_metadata, testing_workdir):
    testing_metadata.config.croot = os.path.join(testing_workdir, "space path")
    api.build(testing_metadata)
Ejemplo n.º 46
0
def test_jinja_typo(testing_workdir, testing_config):
    with pytest.raises(SystemExit) as exc:
        api.build(os.path.join(fail_dir, "source_git_jinja2_oops"),
                  config=testing_config)
    assert "GIT_DSECRIBE_TAG" in exc.exconly()
Ejemplo n.º 47
0
def test_extract_tarball_with_unicode_filename(testing_config):
    """See https://github.com/conda/conda-build/pull/1779"""
    recipe = os.path.join(metadata_dir, '_unicode_in_tarball')
    api.build(recipe, config=testing_config)
Ejemplo n.º 48
0
def test_pip_in_meta_yaml_fail(testing_workdir, testing_config):
    with pytest.raises(ValueError) as exc:
        api.build(os.path.join(fail_dir, "pip_reqs_fail_informatively"),
                  config=testing_config)
    assert "environment.yml" in str(exc)
Ejemplo n.º 49
0
def test_symlink_fail(testing_workdir, testing_config, capfd):
    with pytest.raises((SystemExit, FileNotFoundError)):
        api.build(os.path.join(fail_dir, "symlinks"), config=testing_config)
Ejemplo n.º 50
0
def test_output_folder_moves_file(testing_metadata, testing_workdir):
    testing_metadata.config.output_folder = testing_workdir
    outputs = api.build(testing_metadata, no_test=True)
    assert outputs[0].startswith(testing_workdir)
Ejemplo n.º 51
0
def test_binary_has_prefix_files(testing_workdir, testing_config):
    api.build(os.path.join(metadata_dir, '_binary_has_prefix_files'),
              config=testing_config)
Ejemplo n.º 52
0
def test_recipe_builds(recipe, testing_config, testing_workdir, monkeypatch):
    # These variables are defined solely for testing purposes,
    # so they can be checked within build scripts
    monkeypatch.setenv("CONDA_TEST_VAR", "conda_test")
    monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2")
    api.build(recipe, config=testing_config)
Ejemplo n.º 53
0
def test_workdir_removal_warning(testing_config, caplog):
    recipe = os.path.join(metadata_dir, '_test_uses_src_dir')
    with pytest.raises(ValueError) as exc:
        api.build(recipe, config=testing_config)
        assert "work dir is removed" in str(exc)
Ejemplo n.º 54
0
def test_recursion_packages(testing_config):
    """Two packages that need to be built are listed in the recipe

    make sure that both get built before the one needing them gets built."""
    recipe = os.path.join(metadata_dir, '_recursive-build-two-packages')
    api.build(recipe, config=testing_config)
Ejemplo n.º 55
0
def test_no_anaconda_upload_condarc(service_name, testing_workdir,
                                    testing_config, capfd):
    api.build(empty_sections, config=testing_config)
    output, error = capfd.readouterr()
    assert "Automatic uploading is disabled" in output, error
Ejemplo n.º 56
0
def test_recursion_layers(testing_config):
    """go two 'hops' - try to build a, but a needs b, so build b first, then come back to a"""
    recipe = os.path.join(metadata_dir, '_recursive-build-two-layers')
    api.build(recipe, config=testing_config)
Ejemplo n.º 57
0
def test_legacy_noarch_python(testing_config):
    output = api.build(os.path.join(metadata_dir, "_legacy_noarch_python"),
                       config=testing_config)[0]
    # make sure that the package is going into the noarch folder
    assert os.path.basename(os.path.dirname(output)) == 'noarch'
Ejemplo n.º 58
0
def test_failed_tests_exit_build(testing_workdir, testing_config):
    """https://github.com/conda/conda-build/issues/1112"""
    with pytest.raises(SystemExit) as exc:
        api.build(os.path.join(metadata_dir, "_test_failed_test_exits"),
                  config=testing_config)
    assert 'TESTS FAILED' in str(exc)
Ejemplo n.º 59
0
def test_backslash_in_always_include_files_path(testing_config):
    api.build(os.path.join(metadata_dir, '_backslash_in_include_files'))
    with pytest.raises(RuntimeError):
        api.build(os.path.join(fail_dir, 'backslash_in_include_files'))
Ejemplo n.º 60
0
def test_script_win_creates_exe(testing_config, recipe_name):
    recipe = os.path.join(metadata_dir, recipe_name)
    outputs = api.build(recipe, config=testing_config)
    assert package_has_file(outputs[0], 'Scripts/test-script.exe')
    assert package_has_file(outputs[0], 'Scripts/test-script-script.py')