Exemple #1
0
def test_inner_python_loop_with_output(testing_config):
    outputs = api.get_output_file_paths(os.path.join(recipe_dir, 'test_python_as_subpackage_loop'),
                                        config=testing_config)
    outputs = [os.path.basename(out) for out in outputs]
    assert len(outputs) == 5
    assert len([out for out in outputs if out.startswith('tbb-2018')]) == 1
    assert len([out for out in outputs if out.startswith('tbb-devel-2018')]) == 1
    assert len([out for out in outputs if out.startswith('tbb4py-2018')]) == 3

    testing_config.variant_config_files = [os.path.join(recipe_dir, 'test_python_as_subpackage_loop', 'config_with_zip.yaml')]
    outputs = api.get_output_file_paths(os.path.join(recipe_dir, 'test_python_as_subpackage_loop'),
                                        config=testing_config)
    outputs = [os.path.basename(out) for out in outputs]
    assert len(outputs) == 5
    assert len([out for out in outputs if out.startswith('tbb-2018')]) == 1
    assert len([out for out in outputs if out.startswith('tbb-devel-2018')]) == 1
    assert len([out for out in outputs if out.startswith('tbb4py-2018')]) == 3

    testing_config.variant_config_files = [os.path.join(recipe_dir, 'test_python_as_subpackage_loop', 'config_with_zip.yaml')]
    outputs = api.get_output_file_paths(os.path.join(recipe_dir, 'test_python_as_subpackage_loop'),
                                        config=testing_config, platform='win', arch=64)
    outputs = [os.path.basename(out) for out in outputs]
    assert len(outputs) == 5
    assert len([out for out in outputs if out.startswith('tbb-2018')]) == 1
    assert len([out for out in outputs if out.startswith('tbb-devel-2018')]) == 1
    assert len([out for out in outputs if out.startswith('tbb4py-2018')]) == 3
def test_host_entries_finalized(testing_config):
    recipe = os.path.join(metadata_dir, '_host_entries_finalized')
    metadata = api.render(recipe, config=testing_config)
    assert len(metadata) == 2
    outputs = api.get_output_file_paths(recipe, config=testing_config)
    assert any('py27' in out for out in outputs)
    assert any('py36' in out for out in outputs)
def test_intradependencies(testing_workdir, testing_config):
    # Only necessary because for conda<4.3, the `r` channel was not in `defaults`
    testing_config.channel_urls = ('r')
    testing_config.activate = True
    recipe = os.path.join(subpackage_dir, '_intradependencies')
    outputs1 = api.get_output_file_paths(recipe, config=testing_config)
    outputs1_set = set([os.path.basename(p) for p in outputs1])
    # 2 * (2 * pythons, 1 * lib, 1 * R)
    assert len(outputs1) == 8
    outputs2 = api.build(recipe, config=testing_config)
    assert len(outputs2) == 8
    outputs2_set = set([os.path.basename(p) for p in outputs2])
    assert outputs1_set == outputs2_set, 'pkgs differ :: get_output_file_paths()=%s but build()=%s' % (outputs1_set,
                                                                                                       outputs2_set)
    pkg_hashes = api.inspect_hash_inputs(outputs2)
    py_regex = re.compile('^python.*')
    r_regex = re.compile('^r-base.*')
    for pkg, hashes in pkg_hashes.items():
        try:
            reqs = hashes['recipe']['requirements']['build']
        except:
            reqs = []
        # Assert that:
        # 1. r-base does and python does not appear in the hash inspection for the R packages
        if re.match('^r[0-9]-', pkg):
            assert not len([m.group(0) for r in reqs for m in [py_regex.search(r)] if m])
            assert len([m.group(0) for r in reqs for m in [r_regex.search(r)] if m])
        # 2. python does and r-base does not appear in the hash inspection for the Python packages
        elif re.match('^py[0-9]-', pkg):
            assert not len([m.group(0) for r in reqs for m in [r_regex.search(r)] if m])
            assert len([m.group(0) for r in reqs for m in [py_regex.search(r)] if m])
        # 3. neither python nor r-base appear in the hash inspection for the lib packages
        elif re.match('^lib[0-9]-', pkg):
            assert not len([m.group(0) for r in reqs for m in [r_regex.search(r)] if m])
            assert not len([m.group(0) for r in reqs for m in [py_regex.search(r)] if m])
Exemple #4
0
def execute(args):
    p, args = parse_args(args)

    config = get_or_merge_config(None, **args.__dict__)
    variants = get_package_variants(args.recipe, config)
    set_language_env_vars(variants)

    channel_urls = args.__dict__.get('channel') or args.__dict__.get('channels') or ()
    config.channel_urls = []

    for url in channel_urls:
        # allow people to specify relative or absolute paths to local channels
        #    These channels still must follow conda rules - they must have the
        #    appropriate platform-specific subdir (e.g. win-64)
        if os.path.isdir(url):
            if not os.path.isabs(url):
                url = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), url)))
            url = url_path(url)
        config.channel_urls.append(url)

    config.override_channels = args.override_channels

    metadata_tuples = api.render(args.recipe, config=config,
                                 no_download_source=args.no_source)

    if args.output:
        with LoggingContext(logging.CRITICAL + 1):
            config.verbose = False
            config.debug = False
            paths = api.get_output_file_paths(metadata_tuples, config=config)
            print('\n'.join(sorted(paths)))
    else:
        logging.basicConfig(level=logging.INFO)
        for (m, _, _) in metadata_tuples:
            print(api.output_yaml(m, args.file))
def test_output_build_path_git_source(testing_workdir, testing_config):
    recipe_path = os.path.join(metadata_dir, "source_git_jinja2")
    m = api.render(recipe_path, config=testing_config)[0][0]
    output = api.get_output_file_paths(m)[0]
    _hash = m._hash_dependencies()
    test_path = os.path.join(testing_config.croot, testing_config.host_subdir,
                    "conda-build-test-source-git-jinja2-1.20.2-py{}{}{}_0_g262d444.tar.bz2".format(
                        sys.version_info.major, sys.version_info.minor, _hash))
    assert output == test_path
Exemple #6
0
def test_activate_scripts_not_included(testing_workdir):
    recipe = os.path.join(metadata_dir, '_activate_scripts_not_included')
    args = ['--no-anaconda-upload', '--croot', testing_workdir, recipe]
    main_build.execute(args)
    out = api.get_output_file_paths(recipe, croot=testing_workdir)[0]
    for f in ('bin/activate', 'bin/deactivate', 'bin/conda',
              'Scripts/activate.bat', 'Scripts/deactivate.bat', 'Scripts/conda.bat',
              'Scripts/activate.exe', 'Scripts/deactivate.exe', 'Scripts/conda.exe',
              'Scripts/activate', 'Scripts/deactivate', 'Scripts/conda'):
        assert not package_has_file(out, f)
def test_get_output_file_path(testing_workdir, testing_metadata):
    testing_metadata = render.finalize_metadata(testing_metadata)
    api.output_yaml(testing_metadata, 'recipe/meta.yaml')

    build_path = api.get_output_file_paths(os.path.join(testing_workdir, 'recipe'),
                                          config=testing_metadata.config,
                                          no_download_source=True)[0]
    assert build_path == os.path.join(testing_metadata.config.croot,
                                      testing_metadata.config.host_subdir,
                                      "test_get_output_file_path-1.0-1.tar.bz2")
Exemple #8
0
def built_package_paths(recipe):
    """
    Returns the path to which a recipe would be built.

    Does not necessarily exist; equivalent to ``conda build --output recipename``
    but without the subprocess.
    """
    config = load_conda_build_config()
    paths = api.get_output_file_paths(recipe, config=config)
    return paths
Exemple #9
0
def test_variants_in_versions_with_setup_py_data(testing_workdir):
    recipe = os.path.join(recipe_dir, '12_variant_versions')
    try:
        outputs = api.get_output_file_paths(recipe)
        assert len(outputs) == 2
        assert any(os.path.basename(pkg).startswith('my_package-470.470') for pkg in outputs)
        assert any(os.path.basename(pkg).startswith('my_package-480.480') for pkg in outputs)
    except FileNotFoundError:
        # problem with python 3.x with Travis CI somehow.  Just ignore it.
        print("Ignoring test on setup.py data - problem with download")
def test_get_output_file_path_jinja2(testing_workdir, testing_config):
    # If this test does not raise, it's an indicator that the workdir is not
    #    being cleaned as it should.
    recipe = os.path.join(metadata_dir, "source_git_jinja2")

    # First get metadata with a recipe that is known to need a download:
    with pytest.raises((ValueError, SystemExit)):
        build_path = api.get_output_file_paths(recipe,
                                               config=testing_config,
                                               no_download_source=True)[0]

    metadata, need_download, need_reparse_in_env = api.render(
        recipe,
        config=testing_config,
        no_download_source=False)[0]
    build_path = api.get_output_file_paths(metadata)[0]
    _hash = metadata.hash_dependencies()
    python = ''.join(metadata.config.variant['python'].split('.')[:2])
    assert build_path == os.path.join(testing_config.croot, testing_config.host_subdir,
                                      "conda-build-test-source-git-jinja2-1.20.2-"
                                      "py{0}{1}_0_g262d444.tar.bz2".format(python, _hash))
def test_intradependencies(testing_workdir, testing_config):
    # Only necessary because for conda<4.3, the `r` channel was not in `defaults`
    testing_config.channel_urls = ('r')
    testing_config.activate = True
    recipe = os.path.join(subpackage_dir, '_intradependencies')
    outputs1 = api.get_output_file_paths(recipe, config=testing_config)
    outputs1_set = set([os.path.basename(p) for p in outputs1])
    # 2 * (2 * pythons, 1 * lib, 1 * R)
    assert len(outputs1) == 8
    outputs2 = api.build(recipe, config=testing_config)
    assert len(outputs2) == 8
    outputs2_set = set([os.path.basename(p) for p in outputs2])
    assert outputs1_set == outputs2_set, 'pkgs differ :: get_output_file_paths()=%s but build()=%s' % (outputs1_set,
                                                                                                       outputs2_set)
Exemple #12
0
def execute(args):
    p, args = parse_args(args)

    config = get_or_merge_config(None, **args.__dict__)
    variants = get_package_variants(args.recipe, config)
    set_language_env_vars(variants)

    metadata_tuples = api.render(args.recipe, config=config,
                                 no_download_source=args.no_source)

    if args.output:
        with LoggingContext(logging.CRITICAL + 1):
            paths = api.get_output_file_paths(metadata_tuples)
            print('\n'.join(sorted(paths)))
    else:
        logging.basicConfig(level=logging.INFO)
        for (m, _, _) in metadata_tuples:
            print(api.output_yaml(m, args.file))
Exemple #13
0
def get_package_paths(recipe, check_channels, force=False):
    if not force:
        if check_recipe_skippable(recipe, check_channels):
            # NB: If we skip early here, we don't detect possible divergent builds.
            logger.info(
                'FILTER: not building recipe %s because '
                'the same number of builds are in channel(s) and it is not forced.',
                recipe)
            return []
    platform, metas = _load_platform_metas(recipe, finalize=True)

    # The recipe likely defined skip: True
    if not metas:
        return []

    # If on CI, handle noarch.
    if os.environ.get('CI', None) == 'true':
        first_meta = metas[0]
        if first_meta.get_value('build/noarch'):
            if platform != 'linux':
                logger.debug('FILTER: only building %s on '
                             'linux because it defines noarch.',
                             recipe)
                return []

    new_metas, existing_metas, divergent_builds = (
        _filter_existing_packages(metas, check_channels))

    if divergent_builds:
        raise DivergentBuildsError(*sorted(divergent_builds))

    for meta in existing_metas:
        logger.info(
            'FILTER: not building %s because '
            'it is in channel(s) and it is not forced.', meta.pkg_fn())
    # yield all pkgs that do not yet exist
    if force:
        build_metas = new_metas + existing_metas
    else:
        build_metas = new_metas
    return list(chain.from_iterable(
        api.get_output_file_paths(meta) for meta in build_metas))
Exemple #14
0
def test_env_creation_with_prefix_fallback_disabled(testing_config):
    tempdir = '/tmp' if platform.system() == 'Darwin' else tempfile.gettempdir()
    testing_config.croot = os.path.join(tempdir, 'cb')
    testing_config.anaconda_upload = False
    testing_config.anaconda_upload = False
    testing_config.prefix_length_fallback = False
    testing_config.prefix_length = 80

    recipe_path = os.path.join(metadata_dir, "has_prefix_files")
    metadata = api.render(recipe_path, config=testing_config)[0][0]
    fn = api.get_output_file_paths(metadata)[0]
    if os.path.isfile(fn):
        os.remove(fn)

    with pytest.raises((SystemExit, PaddingError, LinkError, CondaError)):
        output = api.build(metadata)[0]
        assert not api.inspect_prefix_length(output, 255)
        testing_config.prefix_length = 255
        environ.create_env(testing_config.build_prefix,
                           specs_or_actions=["python", metadata.name()],
                           env='build', config=testing_config, subdir=subdir)
Exemple #15
0
def render(recipe, numpy=None, filename_hashing=True, channels=[]):
    '''
    Render recipe

    Parameters
    ----------
    recipe : path
    numpy
    filename_hashing

    Returns
    -------
    path
        file path of rendered recipe
    '''
    config = Config(numpy=numpy, filename_hashing=filename_hashing)
    config.channel_urls.extend(channels)

    meta_tuples = api.render(recipe, config=config)
    file_to_upload = api.get_output_file_paths(meta_tuples, config=config)[0]

    return file_to_upload
Exemple #16
0
def execute(args, print_results=True):
    p, args = parse_args(args)

    config = get_or_merge_config(None, **args.__dict__)

    variants = get_package_variants(args.recipe, config, variants=args.variants)
    set_language_env_vars(variants)

    config.channel_urls = get_channel_urls(args.__dict__)

    config.override_channels = args.override_channels

    if args.output:
        config.verbose = False
        config.debug = False

    metadata_tuples = api.render(args.recipe, config=config,
                                 no_download_source=args.no_source,
                                 variants=args.variants)

    if print_results:
        if args.output:
            with LoggingContext(logging.CRITICAL + 1):
                paths = api.get_output_file_paths(metadata_tuples, config=config)
                print('\n'.join(sorted(paths)))
        else:
            logging.basicConfig(level=logging.INFO)
            for (m, _, _) in metadata_tuples:
                print("--------------")
                print("Hash contents:")
                print("--------------")
                pprint(m.get_hash_contents())
                print("----------")
                print("meta.yaml:")
                print("----------")
                print(api.output_yaml(m, args.file, suppress_outputs=True))
    else:
        return metadata_tuples
Exemple #17
0
def test_target_platform_looping(testing_config):
    outputs = api.get_output_file_paths(os.path.join(recipe_dir, '25_target_platform_looping'),
                                   platform='win', arch='64')
    assert len(outputs) == 2
Exemple #18
0
def test_variants_in_output_names():
    recipe = os.path.join(recipe_dir, '11_variant_output_names')
    outputs = api.get_output_file_paths(recipe)
    assert len(outputs) == 4
def test_cross_compilers():
    recipe = os.path.join(recipe_dir, '09_cross')
    outputs = api.get_output_file_paths(recipe)
    assert len(outputs) == 3
def output_action(recipe, config):
    with LoggingContext(logging.CRITICAL + 1):
        config.verbose = False
        config.debug = False
        paths = api.get_output_file_paths(recipe, config=config)
        print('\n'.join(sorted(paths)))
Exemple #21
0
def test_numpy_used_variable_looping(testing_config):
    outputs = api.get_output_file_paths(os.path.join(recipe_dir, 'numpy_used'))
    assert len(outputs) == 4
Exemple #22
0
def test_cross_compilers():
    recipe = os.path.join(recipe_dir, '09_cross')
    outputs = api.get_output_file_paths(recipe, permit_unsatisfiable_variants=True)
    assert len(outputs) == 3
Exemple #23
0
def output_action(recipe, config):
    with LoggingContext(logging.CRITICAL + 1):
        paths = api.get_output_file_paths(recipe)
        print('\n'.join(sorted(paths)))
def test_get_output_file_path_metadata_object(testing_metadata):
    testing_metadata.final = True
    build_path = api.get_output_file_paths(testing_metadata)[0]
    assert build_path == os.path.join(testing_metadata.config.croot,
                                      testing_metadata.config.host_subdir,
                "test_get_output_file_path_metadata_object-1.0-1.tar.bz2")
def test_loops_do_not_remove_earlier_packages(testing_config):
    recipe = os.path.join(subpackage_dir, '_xgboost_example')
    output_files = api.get_output_file_paths(recipe, config=testing_config)

    api.build(recipe, config=testing_config)
    assert len(output_files) == len(glob(os.path.join(testing_config.croot, testing_config.host_subdir, "*.tar.bz2")))