Ejemplo n.º 1
0
def _read_specs_from_package(pkg_loc, pkg_dist):
    specs = {}
    if os.path.isdir(pkg_loc):
        downstream_file = os.path.join(pkg_loc, 'info/run_exports')
        if os.path.isfile(downstream_file):
            with open(downstream_file) as f:
                specs = {'weak': [spec.rstrip() for spec in f.readlines()]}
        # a later attempt: record more info in the yaml file, to support "strong" run exports
        elif os.path.isfile(downstream_file + '.yaml'):
            with open(downstream_file + '.yaml') as f:
                specs = yaml.safe_load(f)
    if not specs and os.path.isfile(pkg_loc):
        specs_yaml = utils.package_has_file(pkg_loc, 'info/run_exports.yaml')
        if specs_yaml:
            specs = yaml.safe_load(specs_yaml)
        else:
            legacy_specs = utils.package_has_file(pkg_loc, 'info/run_exports')
            # exclude packages pinning themselves (makes no sense)
            if legacy_specs:
                specs = {
                    'weak': [
                        spec.rstrip() for spec in legacy_specs.splitlines()
                        if not spec.startswith(pkg_dist.rsplit('-', 2)[0])
                    ]
                }
    return specs
Ejemplo n.º 2
0
def test_convert_platform_to_others(testing_workdir, base_platform, package):
    package_name, example_file = package
    platforms = ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']

    # skip building on the same platform as the source platform
    for platform in platforms:
        source_platform = '{}-64' .format(base_platform)
        if platform == source_platform:
            platforms.remove(platform)

    f = 'http://repo.continuum.io/pkgs/free/{}-64/{}-py27_0.tar.bz2'.format(base_platform,
                                                                            package_name)
    fn = "{}-py27_0.tar.bz2".format(package_name)
    download(f, fn)
    expected_paths_json = package_has_file(fn, 'info/paths.json')
    api.convert(fn, platforms='all', quiet=False, verbose=False)
    for platform in platforms:
        python_folder = 'lib/python2.7' if not platform.startswith('win') else 'Lib'
        package = os.path.join(platform, fn)
        assert package_has_file(package,
                                '{}/site-packages/{}'.format(python_folder, example_file))

        if expected_paths_json:
            assert package_has_file(package, 'info/paths.json')
            assert_package_paths_matches_files(package)
Ejemplo n.º 3
0
def test_convert_no_dependencies(testing_workdir, base_platform, package):
    package_name, example_file = package
    subdir = '{}-64'.format(base_platform)
    f = 'http://repo.anaconda.com/pkgs/free/{}/{}-np112py36_0.tar.bz2'.format(subdir,
                                                                              package_name)
    fn = "{}-np112py36_0.tar.bz2".format(package_name)
    download(f, fn)

    expected_paths_json = package_has_file(fn, 'info/paths.json')
    api.convert(fn, platforms='all', dependencies=None, quiet=False, verbose=False)
    for platform in ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']:
        if platform != subdir:
            python_folder = 'lib/python3.6' if not platform.startswith('win') else 'Lib'
            package = os.path.join(platform, fn)
            assert package_has_file(package,
                                    '{}/{}'.format(python_folder, example_file))

            with tarfile.open(package) as t:
                info = json.loads(t.extractfile('info/index.json').read().decode('utf-8'))

                assert 'numpy 1.12.1 py36_0' in info['depends']
                assert 'cryptography 1.8.1 py36_0' in info['depends']

            if expected_paths_json:
                assert package_has_file(package, 'info/paths.json')
                assert_package_paths_matches_files(package)
Ejemplo n.º 4
0
def test_convert_no_dependencies(testing_workdir, base_platform, package):
    package_name, example_file = package
    subdir = '{}-64'.format(base_platform)
    f = 'http://repo.continuum.io/pkgs/free/{}/{}-np112py36_0.tar.bz2'.format(
        subdir, package_name)
    fn = "{}-np112py36_0.tar.bz2".format(package_name)
    download(f, fn)

    expected_paths_json = package_has_file(fn, 'info/paths.json')
    api.convert(fn,
                platforms='all',
                dependencies=None,
                quiet=False,
                verbose=False)
    for platform in ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']:
        if platform != subdir:
            python_folder = 'lib/python3.6' if not platform.startswith(
                'win') else 'Lib'
            package = os.path.join(platform, fn)
            assert package_has_file(
                package, '{}/{}'.format(python_folder, example_file))

            with tarfile.open(package) as t:
                info = json.loads(
                    t.extractfile('info/index.json').read().decode('utf-8'))

                assert 'numpy 1.12.1 py36_0' in info['depends']
                assert 'cryptography 1.8.1 py36_0' in info['depends']

            if expected_paths_json:
                assert package_has_file(package, 'info/paths.json')
                assert_package_paths_matches_files(package)
Ejemplo n.º 5
0
def test_renaming_executables(testing_workdir, base_platform, package):
    """Test that the files in /bin are properly renamed.

    When converting the bin/ directory to Scripts/, only scripts
    need to be changed. Sometimes the /bin directory contains other
    files that are not Python scripts such as post-link.sh scripts.
    This test converts a packaege that contains a post-link.sh script
    in the bin/ directory and checks to see that its filename remains
    the same.
    """
    package_name, example_file = package
    subdir = '{}-64'.format(base_platform)
    f = 'http://repo.anaconda.com/pkgs/free/{}/{}-py27_0.tar.bz2'.format(
        subdir, package_name)
    fn = "{}-py27_0.tar.bz2".format(package_name)
    download(f, fn)
    expected_paths_json = package_has_file(fn, 'info/paths.json')
    api.convert(fn, platforms='all', quiet=False, verbose=False)
    for platform in ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']:
        if subdir != platform:
            package = os.path.join(platform, fn)

            if expected_paths_json:
                assert package_has_file(package, 'info/paths.json')
                assert_package_paths_matches_files(package)
Ejemplo n.º 6
0
def test_renaming_executables(testing_workdir, base_platform, package):
    """Test that the files in /bin are properly renamed.

    When converting the bin/ directory to Scripts/, only scripts
    need to be changed. Sometimes the /bin directory contains other
    files that are not Python scripts such as post-link.sh scripts.
    This test converts a packaege that contains a post-link.sh script
    in the bin/ directory and checks to see that its filename remains
    the same.
    """
    package_name, example_file = package
    subdir = '{}-64'.format(base_platform)
    f = 'http://repo.anaconda.com/pkgs/free/{}/{}-py27_0.tar.bz2'.format(subdir,
                                                                         package_name)
    fn = "{}-py27_0.tar.bz2".format(package_name)
    download(f, fn)
    expected_paths_json = package_has_file(fn, 'info/paths.json')
    api.convert(fn, platforms='all', quiet=False, verbose=False)
    for platform in ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']:
        if subdir != platform:
            package = os.path.join(platform, fn)

            if expected_paths_json:
                assert package_has_file(package, 'info/paths.json')
                assert_package_paths_matches_files(package)
Ejemplo n.º 7
0
def test_noarch_python(test_config):
    recipe = os.path.join(metadata_dir, "_noarch_python")
    fn = api.get_output_file_path(recipe, config=test_config)
    api.build(recipe, config=test_config)
    assert package_has_file(fn, 'info/files') is not ''
    noarch = json.loads(package_has_file(fn, 'info/noarch.json').decode())
    assert 'entry_points' in noarch
    assert 'type' in noarch
Ejemplo n.º 8
0
def test_noarch_python():
    recipe = os.path.join(metadata_dir, "_noarch_python")
    fn = api.get_output_file_path(recipe)
    api.build(recipe)
    assert package_has_file(fn, 'info/files') is not ''
    noarch = json.loads(package_has_file(fn, 'info/noarch.json').decode())
    assert 'entry_points' in noarch
    assert 'type' in noarch
Ejemplo n.º 9
0
def test_noarch_python_1(testing_config):
    output = api.build(os.path.join(metadata_dir, "_noarch_python"), config=testing_config)[0]
    assert package_has_file(output, 'info/files') is not ''
    extra = json.loads(package_has_file(output, 'info/link.json').decode())
    assert 'noarch' in extra
    assert 'entry_points' in extra['noarch']
    assert 'type' in extra['noarch']
    assert 'package_metadata_version' in extra
Ejemplo n.º 10
0
def test_serial_builds_have_independent_configs(testing_config):
    recipe = os.path.join(recipe_dir, '17_multiple_recipes_independent_config')
    recipes = [os.path.join(recipe, dirname) for dirname in ('a', 'b')]
    outputs = api.build(recipes, config=testing_config)
    index_json = json.loads(package_has_file(outputs[0], 'info/index.json'))
    assert 'bzip2 >=1,<1.0.7.0a0' in index_json['depends']
    index_json = json.loads(package_has_file(outputs[1], 'info/index.json'))
    assert 'bzip2 >=1.0.6,<2.0a0' in index_json['depends']
Ejemplo n.º 11
0
def test_serial_builds_have_independent_configs(testing_config):
    recipe = os.path.join(recipe_dir, '17_multiple_recipes_independent_config')
    recipes = [os.path.join(recipe, dirname) for dirname in ('a', 'b')]
    outputs = api.build(recipes, config=testing_config)
    index_json = json.loads(package_has_file(outputs[0], 'info/index.json'))
    assert 'bzip2 >=1,<1.0.7.0a0' in index_json['depends']
    index_json = json.loads(package_has_file(outputs[1], 'info/index.json'))
    assert 'bzip2 >=1.0.6,<2.0a0' in index_json['depends']
Ejemplo n.º 12
0
def test_no_include_recipe_meta_yaml(testing_metadata, testing_config):
    # first, make sure that the recipe is there by default.  This test copied from above, but copied
    # as a sanity check here.
    outputs = api.build(testing_metadata)
    assert package_has_file(outputs[0], "info/recipe/meta.yaml")

    output_file = api.build(os.path.join(metadata_dir, '_no_include_recipe'),
                            config=testing_config)[0]
    assert not package_has_file(output_file, "info/recipe/meta.yaml")
Ejemplo n.º 13
0
def test_no_include_recipe_meta_yaml(testing_metadata, testing_config):
    # first, make sure that the recipe is there by default.  This test copied from above, but copied
    # as a sanity check here.
    outputs = api.build(testing_metadata)
    assert package_has_file(outputs[0], "info/recipe/meta.yaml")

    output_file = api.build(os.path.join(metadata_dir, '_no_include_recipe'),
                            config=testing_config)[0]
    assert not package_has_file(output_file, "info/recipe/meta.yaml")
Ejemplo n.º 14
0
def test_noarch_python_1(testing_config):
    output = api.build(os.path.join(metadata_dir, "_noarch_python"),
                       config=testing_config)[0]
    assert package_has_file(output, 'info/files') is not ''
    extra = json.loads(package_has_file(output, 'info/link.json').decode())
    assert 'noarch' in extra
    assert 'entry_points' in extra['noarch']
    assert 'type' in extra['noarch']
    assert 'package_metadata_version' in extra
Ejemplo n.º 15
0
def test_convert_from_unix_to_win_creates_entry_points(test_config):
    recipe_dir = os.path.join(metadata_dir, "entry_points")
    fn = api.get_output_file_path(recipe_dir, config=test_config)
    api.build(recipe_dir, config=test_config)
    for platform in ['win-64', 'win-32']:
        api.convert(fn, platforms=[platform], force=True)
        assert package_has_file(os.path.join(platform, os.path.basename(fn)), "Scripts/test-script-manual-script.py")
        assert package_has_file(os.path.join(platform, os.path.basename(fn)), "Scripts/test-script-manual.bat")
        assert package_has_file(os.path.join(platform, os.path.basename(fn)), "Scripts/test-script-setup-script.py")
        assert package_has_file(os.path.join(platform, os.path.basename(fn)), "Scripts/test-script-setup.bat")
Ejemplo n.º 16
0
def test_compileall_compiles_all_good_files(testing_workdir, testing_config):
    output = api.build(os.path.join(metadata_dir, "_compile-test"), config=testing_config)[0]
    good_files = ['f1.py', 'f3.py']
    bad_file = 'f2_bad.py'
    for f in good_files:
        assert package_has_file(output, f)
        # look for the compiled file also
        assert package_has_file(output, add_mangling(f))
    assert package_has_file(output, bad_file)
    assert not package_has_file(output, add_mangling(bad_file))
Ejemplo n.º 17
0
def test_noarch_python_1(test_config):
    recipe = os.path.join(metadata_dir, "_noarch_python")
    fn = api.get_output_file_path(recipe, config=test_config)
    api.build(recipe, config=test_config)
    assert package_has_file(fn, 'info/files') is not ''
    extra = json.loads(package_has_file(fn, 'info/package_metadata.json').decode())
    assert 'noarch' in extra
    assert 'entry_points' in extra['noarch']
    assert 'type' in extra['noarch']
    assert 'package_metadata_version' in extra
Ejemplo n.º 18
0
def test_compileall_compiles_all_good_files(testing_workdir, testing_config):
    output = api.build(os.path.join(metadata_dir, "_compile-test"),
                       config=testing_config)[0]
    good_files = ['f1.py', 'f3.py']
    bad_file = 'f2_bad.py'
    for f in good_files:
        assert package_has_file(output, f)
        # look for the compiled file also
        assert package_has_file(output, add_mangling(f))
    assert package_has_file(output, bad_file)
    assert not package_has_file(output, add_mangling(bad_file))
Ejemplo n.º 19
0
def test_noarch_python_1(test_config):
    recipe = os.path.join(metadata_dir, "_noarch_python")
    fn = api.get_output_file_path(recipe, config=test_config)
    api.build(recipe, config=test_config)
    assert package_has_file(fn, 'info/files') is not ''
    extra = json.loads(
        package_has_file(fn, 'info/package_metadata.json').decode())
    assert 'noarch' in extra
    assert 'entry_points' in extra['noarch']
    assert 'type' in extra['noarch']
    assert 'package_metadata_version' in extra
Ejemplo n.º 20
0
def test_no_include_recipe_config_arg(testing_metadata):
    """Two ways to not include recipe: build/include_recipe: False in meta.yaml; or this.
    Former is tested with specific recipe."""
    outputs = api.build(testing_metadata)
    assert package_has_file(outputs[0], "info/recipe/meta.yaml")

    # make sure that it is not there when the command line flag is passed
    testing_metadata.config.include_recipe = False
    testing_metadata.meta['build']['number'] = 2
    output_file = api.build(testing_metadata)[0]
    assert not package_has_file(output_file, "info/recipe/meta.yaml")
Ejemplo n.º 21
0
def test_no_include_recipe_cmd_line_arg(test_config):
    """Two ways to not include recipe: build/include_recipe: False in meta.yaml; or this.
    Former is tested with specific recipe."""
    output_file = os.path.join(sys.prefix, "conda-bld", test_config.subdir,
                               "empty_sections-0.0-0.tar.bz2")
    api.build(empty_sections, anaconda_upload=False)
    assert package_has_file(output_file, "info/recipe/meta.yaml")

    # make sure that it is not there when the command line flag is passed
    api.build(empty_sections, anaconda_upload=False, include_recipe=False)
    assert not package_has_file(output_file, "info/recipe/meta.yaml")
Ejemplo n.º 22
0
def test_subpackage_hash_inputs(testing_config):
    recipe_dir = os.path.join(subpackage_dir, '_hash_inputs')
    outputs = api.build(recipe_dir, config=testing_config)
    assert len(outputs) == 2
    for out in outputs:
        if os.path.basename(out).startswith('test_subpackage'):
            assert utils.package_has_file(out, 'info/recipe/install-script.sh')
            assert utils.package_has_file(out, 'info/recipe/build.sh')
        else:
            assert utils.package_has_file(out, 'info/recipe/install-script.sh')
            assert utils.package_has_file(out, 'info/recipe/build.sh')
Ejemplo n.º 23
0
def test_no_include_recipe_config_arg(testing_metadata):
    """Two ways to not include recipe: build/include_recipe: False in meta.yaml; or this.
    Former is tested with specific recipe."""
    outputs = api.build(testing_metadata)
    assert package_has_file(outputs[0], "info/recipe/meta.yaml")

    # make sure that it is not there when the command line flag is passed
    testing_metadata.config.include_recipe = False
    testing_metadata.meta['build']['number'] = 2
    output_file = api.build(testing_metadata)[0]
    assert not package_has_file(output_file, "info/recipe/meta.yaml")
Ejemplo n.º 24
0
def test_metapackage_metadata(test_config, testing_workdir):
    args = ['metapackage_test', '1.0', '-d', 'bzip2', "--home", "http://abc.com", "--summary", "wee",
            "--license", "BSD"]
    main_metapackage.execute(args)
    test_path = os.path.join(sys.prefix, "conda-bld", test_config.subdir, 'metapackage_test-1.0-0.tar.bz2')
    assert os.path.isfile(test_path)
    info = json.loads(package_has_file(test_path, 'info/index.json').decode('utf-8'))
    assert info['license'] == 'BSD'
    info = json.loads(package_has_file(test_path, 'info/about.json').decode('utf-8'))
    assert info['home'] == 'http://abc.com'
    assert info['summary'] == 'wee'
Ejemplo n.º 25
0
def test_no_include_recipe_cmd_line_arg(test_config):
    """Two ways to not include recipe: build/include_recipe: False in meta.yaml; or this.
    Former is tested with specific recipe."""
    output_file = os.path.join(sys.prefix, "conda-bld", test_config.subdir,
                               "empty_sections-0.0-0.tar.bz2")
    api.build(empty_sections, anaconda_upload=False)
    assert package_has_file(output_file, "info/recipe/meta.yaml")

    # make sure that it is not there when the command line flag is passed
    api.build(empty_sections, anaconda_upload=False, include_recipe=False)
    assert not package_has_file(output_file, "info/recipe/meta.yaml")
Ejemplo n.º 26
0
def test_build_no_build_id(testing_workdir, test_config, capfd):
    args = [os.path.join(metadata_dir, "has_prefix_files"), '--no-build-id',
            '--croot', test_config.croot]
    main_build.execute(args)
    fn = api.get_output_file_path(os.path.join(metadata_dir, "has_prefix_files"),
                                  config=test_config)
    assert package_has_file(fn, 'info/has_prefix')
    data = package_has_file(fn, 'info/has_prefix')
    if hasattr(data, 'decode'):
        data = data.decode('UTF-8')
    assert 'has_prefix_files_1' not in data
Ejemplo n.º 27
0
def test_subpackage_hash_inputs(testing_config):
    recipe_dir = os.path.join(subpackage_dir, '_hash_inputs')
    outputs = api.build(recipe_dir, config=testing_config)
    assert len(outputs) == 2
    for out in outputs:
        if os.path.basename(out).startswith('test_subpackage'):
            assert utils.package_has_file(out, 'info/recipe/install-script.sh')
            assert utils.package_has_file(out, 'info/recipe/build.sh')
        else:
            assert utils.package_has_file(out, 'info/recipe/install-script.sh')
            assert utils.package_has_file(out, 'info/recipe/build.sh')
Ejemplo n.º 28
0
def test_slash_in_recipe_arg_keeps_build_id(testing_workdir, test_config):
    recipe_path = os.path.join(metadata_dir, "has_prefix_files" + os.path.sep)
    fn = api.get_output_file_path(recipe_path, config=test_config)
    args = [os.path.join(metadata_dir, "has_prefix_files"), '--croot', test_config.croot]
    main_build.execute(args)
    fn = api.get_output_file_path(recipe_path,
                                  config=test_config)
    assert package_has_file(fn, 'info/has_prefix')
    data = package_has_file(fn, 'info/has_prefix')
    if hasattr(data, 'decode'):
        data = data.decode('UTF-8')
    assert 'has_prefix_files_1' in data
Ejemplo n.º 29
0
def test_convert_from_unix_to_win_creates_entry_points(test_config):
    recipe_dir = os.path.join(metadata_dir, "entry_points")
    fn = api.get_output_file_path(recipe_dir, config=test_config)
    api.build(recipe_dir, config=test_config)
    for platform in ['win-64', 'win-32']:
        api.convert(fn, platforms=[platform], force=True)
        converted_fn = os.path.join(platform, os.path.basename(fn))
        assert package_has_file(converted_fn, "Scripts/test-script-manual-script.py")
        assert package_has_file(converted_fn, "Scripts/test-script-manual.bat")
        assert package_has_file(converted_fn, "Scripts/test-script-setup-script.py")
        assert package_has_file(converted_fn, "Scripts/test-script-setup.bat")
        assert_package_consistency(converted_fn)
Ejemplo n.º 30
0
def test_no_include_recipe_meta_yaml(test_config):
    # first, make sure that the recipe is there by default.  This test copied from above, but copied
    # as a sanity check here.
    output_file = os.path.join(sys.prefix, "conda-bld", test_config.subdir,
                               "empty_sections-0.0-0.tar.bz2")
    api.build(empty_sections, anaconda_upload=False)
    assert package_has_file(output_file, "info/recipe/meta.yaml")

    output_file = os.path.join(sys.prefix, "conda-bld", test_config.subdir,
                               "no_include_recipe-0.0-0.tar.bz2")
    api.build(os.path.join(metadata_dir, '_no_include_recipe'), anaconda_upload=False)
    assert not package_has_file(output_file, "info/recipe/meta.yaml")
Ejemplo n.º 31
0
def test_metapackage_metadata(testing_config, testing_workdir):
    args = ['metapackage_testing_metadata', '1.0', '-d', 'bzip2', "--home", "http://abc.com",
            "--summary", "wee", "--license", "BSD", '--no-anaconda-upload']
    main_metapackage.execute(args)

    test_path = glob(os.path.join(sys.prefix, "conda-bld", testing_config.host_subdir,
                             'metapackage_testing_metadata-1.0-0.tar.bz2'))[0]
    assert os.path.isfile(test_path)
    info = json.loads(package_has_file(test_path, 'info/index.json').decode('utf-8'))
    assert info['license'] == 'BSD'
    info = json.loads(package_has_file(test_path, 'info/about.json').decode('utf-8'))
    assert info['home'] == 'http://abc.com'
    assert info['summary'] == 'wee'
Ejemplo n.º 32
0
def test_no_include_recipe_meta_yaml(test_config):
    # first, make sure that the recipe is there by default.  This test copied from above, but copied
    # as a sanity check here.
    output_file = os.path.join(sys.prefix, "conda-bld", test_config.subdir,
                               "empty_sections-0.0-0.tar.bz2")
    api.build(empty_sections, anaconda_upload=False)
    assert package_has_file(output_file, "info/recipe/meta.yaml")

    output_file = os.path.join(sys.prefix, "conda-bld", test_config.subdir,
                               "no_include_recipe-0.0-0.tar.bz2")
    api.build(os.path.join(metadata_dir, '_no_include_recipe'),
              anaconda_upload=False)
    assert not package_has_file(output_file, "info/recipe/meta.yaml")
Ejemplo n.º 33
0
def test_compileall_compiles_all_good_files(testing_workdir, test_config):
    output_file = os.path.join(test_config.croot, test_config.subdir,
                               'test_compileall-1.0-py{0}{1}_0.tar.bz2'.format(
                                   sys.version_info.major, sys.version_info.minor))
    api.build(os.path.join(metadata_dir, "_compile-test"), config=test_config)
    good_files = ['f1.py', 'f3.py']
    bad_file = 'f2_bad.py'
    for f in good_files:
        assert package_has_file(output_file, f)
        # look for the compiled file also
        assert package_has_file(output_file, add_mangling(f))
    assert package_has_file(output_file, bad_file)
    assert not package_has_file(output_file, add_mangling(bad_file))
Ejemplo n.º 34
0
def test_compileall_compiles_all_good_files(testing_workdir, test_config):
    output_file = os.path.join(test_config.croot, test_config.subdir,
                               'test_compileall-1.0-py{0}{1}_0.tar.bz2'.format(
                                   sys.version_info.major, sys.version_info.minor))
    api.build(os.path.join(metadata_dir, "_compile-test"), config=test_config)
    good_files = ['f1.py', 'f3.py']
    bad_file = 'f2_bad.py'
    for f in good_files:
        assert package_has_file(output_file, f)
        # look for the compiled file also
        assert package_has_file(output_file, add_mangling(f))
    assert package_has_file(output_file, bad_file)
    assert not package_has_file(output_file, add_mangling(bad_file))
Ejemplo n.º 35
0
def test_convert_from_unix_to_win_creates_entry_points(testing_config):
    recipe_dir = os.path.join(metadata_dir, "entry_points")
    fn = api.build(recipe_dir, config=testing_config)[0]
    for platform in ['win-64', 'win-32']:
        api.convert(fn, platforms=[platform], force=True)
        converted_fn = os.path.join(platform, os.path.basename(fn))
        assert package_has_file(converted_fn,
                                "Scripts/test-script-manual-script.py")
        assert package_has_file(converted_fn, "Scripts/test-script-manual.exe")
        script_contents = package_has_file(
            converted_fn, "Scripts/test-script-setup-script.py")
        assert script_contents
        assert "Test script setup" in script_contents.decode()
        bat_contents = package_has_file(converted_fn,
                                        "Scripts/test-script-setup.exe")
        assert bat_contents
        assert_package_consistency(converted_fn)
        paths_content = json.loads(
            package_has_file(converted_fn, 'info/paths.json').decode())

        # Check the validity of the sha and filesize of the converted scripts
        with tarfile.open(converted_fn) as t:
            for f in paths_content['paths']:
                if f['_path'].startswith('Scripts/') and f['_path'].endswith(
                        '-script.py'):
                    script_content = package_has_file(converted_fn, f['_path'])
                    assert f['sha256'] == hashlib.sha256(
                        script_content).hexdigest()
                    assert f['size_in_bytes'] == t.getmember(f['_path']).size

        paths_list = {f['_path'] for f in paths_content['paths']}
        files = {
            p.decode()
            for p in package_has_file(converted_fn, 'info/files').splitlines()
        }
        assert files == paths_list

        index = json.loads(
            package_has_file(converted_fn, 'info/index.json').decode())
        assert index['subdir'] == platform

        has_prefix_files = package_has_file(converted_fn,
                                            "info/has_prefix").decode()
        fieldnames = ['prefix', 'type', 'path']
        csv_dialect = csv.Sniffer().sniff(has_prefix_files)
        csv_dialect.lineterminator = '\n'
        has_prefix_files = csv.DictReader(has_prefix_files.splitlines(),
                                          fieldnames=fieldnames,
                                          dialect=csv_dialect)
        has_prefix_files = {d['path']: d for d in has_prefix_files}
        assert len(has_prefix_files) == 4
        assert 'Scripts/test-script-script.py' in has_prefix_files
        assert 'Scripts/test-script-setup-script.py' in has_prefix_files
        assert 'Scripts/test-script-manual-script.py' in has_prefix_files
        assert 'Scripts/test-script-manual-postfix-script.py' in has_prefix_files
Ejemplo n.º 36
0
def test_build_no_build_id(testing_workdir, test_config, capfd):
    args = [
        os.path.join(metadata_dir, "has_prefix_files"), '--no-build-id',
        '--croot', test_config.croot
    ]
    main_build.execute(args)
    fn = api.get_output_file_path(os.path.join(metadata_dir,
                                               "has_prefix_files"),
                                  config=test_config)
    assert package_has_file(fn, 'info/has_prefix')
    data = package_has_file(fn, 'info/has_prefix')
    if hasattr(data, 'decode'):
        data = data.decode('UTF-8')
    assert 'has_prefix_files_1' not in data
Ejemplo n.º 37
0
def test_slash_in_recipe_arg_keeps_build_id(testing_workdir, test_config):
    recipe_path = os.path.join(metadata_dir, "has_prefix_files" + os.path.sep)
    fn = api.get_output_file_path(recipe_path, config=test_config)
    args = [
        os.path.join(metadata_dir, "has_prefix_files"), '--croot',
        test_config.croot
    ]
    main_build.execute(args)
    fn = api.get_output_file_path(recipe_path, config=test_config)
    assert package_has_file(fn, 'info/has_prefix')
    data = package_has_file(fn, 'info/has_prefix')
    if hasattr(data, 'decode'):
        data = data.decode('UTF-8')
    assert 'has_prefix_files_1' in data
Ejemplo n.º 38
0
def test_convert_from_unix_to_win_creates_entry_points(testing_config):
    recipe_dir = os.path.join(metadata_dir, "entry_points")
    fn = api.build(recipe_dir, config=testing_config)[0]
    for platform in ['win-64', 'win-32']:
        api.convert(fn, platforms=[platform], force=True)
        converted_fn = os.path.join(platform, os.path.basename(fn))
        assert package_has_file(converted_fn, "Scripts/test-script-manual-script.py")
        assert package_has_file(converted_fn, "Scripts/test-script-manual.bat")
        script_contents = package_has_file(converted_fn, "Scripts/test-script-setup-script.py")
        assert script_contents
        assert "Test script setup" in script_contents.decode()
        bat_contents = package_has_file(converted_fn, "Scripts/test-script-setup.bat")
        assert bat_contents
        assert "set PYFILE" in bat_contents.decode()
        assert_package_consistency(converted_fn)
        paths_content = json.loads(package_has_file(converted_fn, 'info/paths.json').decode())
        paths_list = {f['_path'] for f in paths_content['paths']}
        files = {p.decode() for p in package_has_file(converted_fn, 'info/files').splitlines()}
        assert files == paths_list

        index = json.loads(package_has_file(converted_fn, 'info/index.json').decode())
        assert index['subdir'] == platform

        has_prefix_files = package_has_file(converted_fn, "info/has_prefix").decode()
        fieldnames = ['prefix', 'type', 'path']
        csv_dialect = csv.Sniffer().sniff(has_prefix_files)
        csv_dialect.lineterminator = '\n'
        has_prefix_files = csv.DictReader(has_prefix_files.splitlines(), fieldnames=fieldnames,
                                          dialect=csv_dialect)
        has_prefix_files = {d['path']: d for d in has_prefix_files}
        assert len(has_prefix_files) == 4
        assert 'Scripts/test-script-script.py' in has_prefix_files
        assert 'Scripts/test-script-setup-script.py' in has_prefix_files
        assert 'Scripts/test-script-manual-script.py' in has_prefix_files
        assert 'Scripts/test-script-manual-postfix-script.py' in has_prefix_files
Ejemplo n.º 39
0
def test_convert_from_unix_to_win_creates_entry_points(testing_config):
    recipe_dir = os.path.join(metadata_dir, "entry_points")
    fn = api.build(recipe_dir, config=testing_config)[0]
    for platform in ['win-64', 'win-32']:
        api.convert(fn, platforms=[platform], force=True)
        converted_fn = os.path.join(platform, os.path.basename(fn))
        assert package_has_file(converted_fn, "Scripts/test-script-manual-script.py")
        assert package_has_file(converted_fn, "Scripts/test-script-manual.bat")
        script_contents = package_has_file(converted_fn, "Scripts/test-script-setup-script.py")
        assert script_contents
        assert "Test script setup" in script_contents.decode()
        bat_contents = package_has_file(converted_fn, "Scripts/test-script-setup.bat")
        assert bat_contents
        assert "set PYFILE" in bat_contents.decode()
        assert_package_consistency(converted_fn)
        paths_content = json.loads(package_has_file(converted_fn, 'info/paths.json').decode())
        paths_list = {f['_path'] for f in paths_content['paths']}
        files = {p.decode() for p in package_has_file(converted_fn, 'info/files').splitlines()}
        assert files == paths_list

        index = json.loads(package_has_file(converted_fn, 'info/index.json').decode())
        assert index['subdir'] == platform

        has_prefix_files = package_has_file(converted_fn, "info/has_prefix").decode()
        fieldnames = ['prefix', 'type', 'path']
        csv_dialect = csv.Sniffer().sniff(has_prefix_files)
        csv_dialect.lineterminator = '\n'
        has_prefix_files = csv.DictReader(has_prefix_files.splitlines(), fieldnames=fieldnames,
                                          dialect=csv_dialect)
        has_prefix_files = {d['path']: d for d in has_prefix_files}
        assert len(has_prefix_files) == 4
        assert 'Scripts/test-script-script.py' in has_prefix_files
        assert 'Scripts/test-script-setup-script.py' in has_prefix_files
        assert 'Scripts/test-script-manual-script.py' in has_prefix_files
        assert 'Scripts/test-script-manual-postfix-script.py' in has_prefix_files
Ejemplo n.º 40
0
def test_about_metadata(testing_config):
    recipe = os.path.join(subpackage_dir, '_about_metadata')
    metadata = api.render(recipe, config=testing_config)
    assert len(metadata) == 2
    for m, _, _ in metadata:
        if m.name() == 'abc':
            assert 'summary' in m.meta['about']
            assert m.meta['about']['summary'] == 'weee'
            assert 'home' not in m.meta['about']
        elif m.name() == 'def':
            assert 'home' in m.meta['about']
            assert 'summary' not in m.meta['about']
            assert m.meta['about']['home'] == 'http://not.a.url'
    outs = api.build(recipe, config=testing_config)
    for out in outs:
        about_meta = utils.package_has_file(out, 'info/about.json')
        assert about_meta
        info = json.loads(about_meta)
        if os.path.basename(out).startswith('abc'):
            assert 'summary' in info
            assert info['summary'] == 'weee'
            assert 'home' not in info
        elif os.path.basename(out).startswith('def'):
            assert 'home' in info
            assert 'summary' not in info
            assert info['home'] == 'http://not.a.url'
Ejemplo n.º 41
0
def test_info_files_json(test_config):
    recipe = os.path.join(metadata_dir, "ignore_some_prefix_files")
    fn = api.get_output_file_path(recipe, config=test_config)
    api.build(recipe, config=test_config)
    assert package_has_file(fn, "info/paths.json")
    with tarfile.open(fn) as tf:
        data = json.loads(tf.extractfile('info/paths.json').read().decode('utf-8'))
    fields = ["_path", "sha256", "size_in_bytes", "path_type", "file_mode", "no_link",
              "prefix_placeholder", "inode_paths"]
    for key in data.keys():
        assert key in ['paths', 'paths_version']
    for paths in data.get('paths'):
        for field in paths.keys():
            assert field in fields
    assert len(data.get('paths')) == 2
    for file in data.get('paths'):
        for key in file.keys():
            assert key in fields
        short_path = file.get("_path")
        if short_path == "test.sh" or short_path == "test.bat":
            assert file.get("prefix_placeholder") is not None
            assert file.get("file_mode") is not None
        else:
            assert file.get("prefix_placeholder") is None
            assert file.get("file_mode") is None
Ejemplo n.º 42
0
def test_noarch_foo_value():
    recipe = os.path.join(metadata_dir, "noarch_foo")
    fn = api.get_output_file_path(recipe)
    api.build(recipe)
    metadata = json.loads(package_has_file(fn, 'info/index.json').decode())
    assert 'noarch' in metadata
    assert metadata['noarch'] == "foo"
Ejemplo n.º 43
0
def test_run_constrained_stores_constrains_info(testing_config):
    recipe = os.path.join(metadata_dir, '_run_constrained')
    out_file = api.build(recipe, config=testing_config)[0]
    info_contents = json.loads(package_has_file(out_file, 'info/index.json'))
    assert 'constrains' in info_contents
    assert len(info_contents['constrains']) == 1
    assert info_contents['constrains'][0] == 'bzip2  1.*'
Ejemplo n.º 44
0
def test_run_constrained_stores_constrains_info(testing_config):
    recipe = os.path.join(metadata_dir, '_run_constrained')
    out_file = api.build(recipe, config=testing_config)[0]
    info_contents = json.loads(package_has_file(out_file, 'info/index.json'))
    assert 'constrains' in info_contents
    assert len(info_contents['constrains']) == 1
    assert info_contents['constrains'][0] == 'bzip2  1.*'
Ejemplo n.º 45
0
def test_info_files_json(testing_config):
    outputs = api.build(os.path.join(metadata_dir, "ignore_some_prefix_files"),
                        config=testing_config)
    assert package_has_file(outputs[0], "info/paths.json")
    with tarfile.open(outputs[0]) as tf:
        data = json.loads(
            tf.extractfile('info/paths.json').read().decode('utf-8'))
    fields = [
        "_path", "sha256", "size_in_bytes", "path_type", "file_mode",
        "no_link", "prefix_placeholder", "inode_paths"
    ]
    for key in data.keys():
        assert key in ['paths', 'paths_version']
    for paths in data.get('paths'):
        for field in paths.keys():
            assert field in fields
    assert len(data.get('paths')) == 2
    for file in data.get('paths'):
        for key in file.keys():
            assert key in fields
        short_path = file.get("_path")
        if short_path == "test.sh" or short_path == "test.bat":
            assert file.get("prefix_placeholder") is not None
            assert file.get("file_mode") is not None
        else:
            assert file.get("prefix_placeholder") is None
            assert file.get("file_mode") is None
Ejemplo n.º 46
0
def test_noarch_foo_value(test_config):
    recipe = os.path.join(metadata_dir, "noarch_foo")
    fn = api.get_output_file_path(recipe, config=test_config)
    api.build(recipe, config=test_config)
    metadata = json.loads(package_has_file(fn, 'info/index.json').decode())
    assert 'noarch' in metadata
    assert metadata['noarch'] == "foo"
Ejemplo n.º 47
0
def test_about_metadata(testing_config):
    recipe = os.path.join(subpackage_dir, '_about_metadata')
    metadata = api.render(recipe, config=testing_config)
    assert len(metadata) == 2
    for m, _, _ in metadata:
        if m.name() == 'abc':
            assert 'summary' in m.meta['about']
            assert m.meta['about']['summary'] == 'weee'
            assert 'home' not in m.meta['about']
        elif m.name() == 'def':
            assert 'home' in m.meta['about']
            assert 'summary' not in m.meta['about']
            assert m.meta['about']['home'] == 'http://not.a.url'
    outs = api.build(recipe, config=testing_config)
    for out in outs:
        about_meta = utils.package_has_file(out, 'info/about.json')
        assert about_meta
        info = json.loads(about_meta)
        if os.path.basename(out).startswith('abc'):
            assert 'summary' in info
            assert info['summary'] == 'weee'
            assert 'home' not in info
        elif os.path.basename(out).startswith('def'):
            assert 'home' in info
            assert 'summary' not in info
            assert info['home'] == 'http://not.a.url'
Ejemplo n.º 48
0
def test_pypi_installer_metadata(testing_config):
    recipe = os.path.join(metadata_dir, '_pypi_installer_metadata')
    pkg = api.build(recipe, config=testing_config, notest=True)[0]
    expected_installer = '{}/imagesize-1.1.0.dist-info/INSTALLER'.format(
        get_site_packages('', '3.9'))
    assert 'conda' == (package_has_file(pkg,
                                        expected_installer,
                                        refresh_mode='forced'))
Ejemplo n.º 49
0
def test_convert_platform_to_others(testing_workdir, base_platform, package):
    package_name, example_file = package
    f = 'http://repo.continuum.io/pkgs/free/{}-64/{}-py27_0.tar.bz2'.format(base_platform,
                                                                            package_name)
    fn = "{}-py27_0.tar.bz2".format(package_name)
    download(f, fn)
    expected_paths_json = package_has_file(fn, 'info/paths.json')
    api.convert(fn, platforms='all', quiet=False, verbose=False)
    for platform in ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']:
        python_folder = 'lib/python2.7' if not platform.startswith('win') else 'Lib'
        package = os.path.join(platform, fn)
        assert package_has_file(package,
                                '{}/site-packages/{}'.format(python_folder, example_file))

        if expected_paths_json:
            assert package_has_file(package, 'info/paths.json')
            assert_package_paths_matches_files(package)
Ejemplo n.º 50
0
def test_slash_in_recipe_arg_keeps_build_id(testing_workdir, testing_config):
    args = [os.path.join(metadata_dir, "has_prefix_files"), '--croot', testing_config.croot,
            '--no-anaconda-upload']
    outputs = main_build.execute(args)
    data = package_has_file(outputs[0], 'binary-has-prefix')
    assert data
    if hasattr(data, 'decode'):
        data = data.decode('UTF-8')
    assert 'conda-build-test-has-prefix-files_1' in data
Ejemplo n.º 51
0
def test_build_no_build_id(testing_workdir, testing_config):
    args = [os.path.join(metadata_dir, "has_prefix_files"), '--no-build-id',
            '--croot', testing_config.croot, '--no-activate', '--no-anaconda-upload']
    outputs = main_build.execute(args)
    data = package_has_file(outputs[0], 'binary-has-prefix')
    assert data
    if hasattr(data, 'decode'):
        data = data.decode('UTF-8')
    assert 'has_prefix_files_1' not in data
Ejemplo n.º 52
0
def test_convert_platform_to_others(testing_workdir, base_platform):
    f = 'http://repo.continuum.io/pkgs/free/{}-64/itsdangerous-0.24-py27_0.tar.bz2'.format(base_platform)
    fn = "itsdangerous-0.24-py27_0.tar.bz2"
    download(f, fn)
    api.convert(fn, platforms='all', quiet=False, verbose=True)
    for platform in ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']:
        python_folder = 'lib/python2.7' if not platform.startswith('win') else 'Lib'
        assert package_has_file(os.path.join(platform, fn),
                                '{}/site-packages/itsdangerous.py'.format(python_folder))
Ejemplo n.º 53
0
def test_convert_platform_to_others(testing_workdir, base_platform):
    f = 'http://repo.continuum.io/pkgs/free/{}-64/itsdangerous-0.24-py27_0.tar.bz2'.format(base_platform)
    fn = "itsdangerous-0.24-py27_0.tar.bz2"
    download(f, fn)
    api.convert(fn, platforms='all', quiet=False, verbose=True)
    for platform in ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']:
        python_folder = 'lib/python2.7' if not platform.startswith('win') else 'Lib'
        assert package_has_file(os.path.join(platform, fn),
                                '{}/site-packages/itsdangerous.py'.format(python_folder))
Ejemplo n.º 54
0
def test_slash_in_recipe_arg_keeps_build_id(testing_workdir, testing_config):
    args = [os.path.join(metadata_dir, "has_prefix_files"), '--croot', testing_config.croot,
            '--no-anaconda-upload']
    outputs = main_build.execute(args)
    data = package_has_file(outputs[0], 'binary-has-prefix')
    assert data
    if hasattr(data, 'decode'):
        data = data.decode('UTF-8')
    assert 'conda-build-test-has-prefix-files_1' in data
Ejemplo n.º 55
0
def test_build_no_build_id(testing_workdir, testing_config):
    args = [os.path.join(metadata_dir, "has_prefix_files"), '--no-build-id',
            '--croot', testing_config.croot, '--no-activate', '--no-anaconda-upload']
    outputs = main_build.execute(args)
    data = package_has_file(outputs[0], 'binary-has-prefix')
    assert data
    if hasattr(data, 'decode'):
        data = data.decode('UTF-8')
    assert 'has_prefix_files_1' not in data
Ejemplo n.º 56
0
def test_activate_scripts_not_included(testing_workdir):
    recipe = os.path.join(metadata_dir, '_activate_scripts_not_included')
    args = ['--no-anaconda-upload', '--croot', testing_workdir, recipe]
    main_build.execute(args)
    out = api.get_output_file_paths(recipe, croot=testing_workdir)[0]
    for f in ('bin/activate', 'bin/deactivate', 'bin/conda',
              'Scripts/activate.bat', 'Scripts/deactivate.bat', 'Scripts/conda.bat',
              'Scripts/activate.exe', 'Scripts/deactivate.exe', 'Scripts/conda.exe',
              'Scripts/activate', 'Scripts/deactivate', 'Scripts/conda'):
        assert not package_has_file(out, f)
Ejemplo n.º 57
0
def test_activate_scripts_not_included(testing_workdir):
    recipe = os.path.join(metadata_dir, '_activate_scripts_not_included')
    args = ['--no-anaconda-upload', '--croot', testing_workdir, recipe]
    main_build.execute(args)
    out = api.get_output_file_paths(recipe, croot=testing_workdir)[0]
    for f in ('bin/activate', 'bin/deactivate', 'bin/conda',
              'Scripts/activate.bat', 'Scripts/deactivate.bat', 'Scripts/conda.bat',
              'Scripts/activate.exe', 'Scripts/deactivate.exe', 'Scripts/conda.exe',
              'Scripts/activate', 'Scripts/deactivate', 'Scripts/conda'):
        assert not package_has_file(out, f)
Ejemplo n.º 58
0
def get_hash_input(packages):
    hash_inputs = {}
    for pkg in ensure_list(packages):
        pkgname = os.path.basename(pkg)[:-8]
        hash_inputs[pkgname] = {}
        hash_input = package_has_file(pkg, 'info/hash_input.json')
        if hash_input:
            hash_inputs[pkgname]['recipe'] = json.loads(hash_input.decode())
        else:
            hash_inputs[pkgname] = "<no hash_input.json in file>"

    return hash_inputs
Ejemplo n.º 59
0
def get_hash_input(packages):
    log = get_logger(__name__)
    hash_inputs = {}
    for pkg in packages:
        pkgname = os.path.basename(pkg)[:-8]
        hash_inputs[pkgname] = {}
        hash_input = package_has_file(pkg, 'info/hash_input.json')
        if hash_input:
            hash_inputs[pkgname]['recipe'] = json.loads(hash_input.decode())
        else:
            hash_inputs[pkgname] = "<no hash_input.json in file>"
        hash_input_files = package_has_file(pkg, 'info/hash_input_files')
        hash_inputs[pkgname]['files'] = []
        if hash_input_files:
            for fname in hash_input_files.splitlines():
                if hasattr(fname, 'decode'):
                    fname = fname.decode()
                hash_inputs[pkgname]['files'].append('info/recipe/{}'.format(fname))
        else:
            log.warn('Package {} does not include recipe.  Full hash information is '
                     'not reproducible.'.format(pkgname))
    return hash_inputs
Ejemplo n.º 60
0
def test_preferred_env(testing_config):
    recipe = os.path.join(metadata_dir, "_preferred_env")
    output = api.build(recipe, config=testing_config)[0]
    extra = json.loads(package_has_file(output, 'info/link.json').decode())
    assert 'preferred_env' in extra
    assert 'name' in extra['preferred_env']
    assert 'executable_paths' in extra['preferred_env']
    exe_paths = extra['preferred_env']['executable_paths']
    if on_win:
        assert exe_paths == ['Scripts/exepath1.bat', 'Scripts/exepath2.bat']
    else:
        assert exe_paths == ['bin/exepath1', 'bin/exepath2']
    assert 'package_metadata_version' in extra