예제 #1
0
def python_vars(metadata, prefix, escape_backslash):
    py_ver = get_py_ver(metadata.config)
    stdlib_dir = utils.get_stdlib_dir(prefix, py_ver)
    sp_dir = utils.get_site_packages(prefix, py_ver)

    if utils.on_win and escape_backslash:
        stdlib_dir = stdlib_dir.replace('\\', '\\\\')
        sp_dir = sp_dir.replace('\\', '\\\\')

    vars_ = {
            'CONDA_PY': ''.join(py_ver.split('.')[:2]),
            'PY3K': str(int(int(py_ver[0]) >= 3)),
            'PY_VER': py_ver,
            'STDLIB_DIR': stdlib_dir,
            'SP_DIR': sp_dir,
            }
    build_or_host = 'host' if metadata.is_cross else 'build'
    deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)]
    if 'python' in deps or metadata.name(fail_ok=True) == 'python':
        python_bin = metadata.config.python_bin(prefix, metadata.config.host_subdir)

        if utils.on_win and escape_backslash:
            python_bin = python_bin.replace('\\', '\\\\')

        vars_.update({
            # host prefix is always fine, because it is the same as build when is_cross is False
            'PYTHON': python_bin,
        })

    np_ver = metadata.config.variant.get('numpy', get_default_variant(metadata.config)['numpy'])
    vars_['NPY_VER'] = '.'.join(np_ver.split('.')[:2])
    vars_['CONDA_NPY'] = ''.join(np_ver.split('.')[:2])
    vars_['NPY_DISTUTILS_APPEND_FLAGS'] = '1'
    return vars_
예제 #2
0
def get_npy_ver(config):
    conda_npy = ''.join(str(config.variant.get('numpy') or
                            get_default_variant(config)['numpy']).split('.'))
    # Convert int -> string, e.g.
    #   17 -> '1.7'
    #   110 -> '1.10'
    return conda_npy[0] + '.' + conda_npy[1:]
예제 #3
0
def get_npy_ver(config):
    conda_npy = ''.join(str(config.variant.get('numpy') or
                            get_default_variant(config)['numpy']).split('.'))
    # Convert int -> string, e.g.
    #   17 -> '1.7'
    #   110 -> '1.10'
    return conda_npy[0] + '.' + conda_npy[1:]
예제 #4
0
def testing_metadata(request, testing_config):
    d = defaultdict(dict)
    d['package']['name'] = request.function.__name__
    d['package']['version'] = '1.0'
    d['build']['number'] = '1'
    d['build']['entry_points'] = []
    d['requirements']['build'] = []
    d['requirements']['run'] = []
    d['test']['commands'] = ['echo "A-OK"', 'exit 0']
    d['about']['home'] = "sweet home"
    d['about']['license'] = "contract in blood"
    d['about']['summary'] = "a test package"
    testing_config.variant = get_default_variant(testing_config)
    return MetaData.fromdict(d, config=testing_config)
예제 #5
0
def testing_metadata(request, testing_config):
    d = defaultdict(dict)
    d['package']['name'] = request.function.__name__
    d['package']['version'] = '1.0'
    d['build']['number'] = '1'
    d['build']['entry_points'] = []
    d['requirements']['build'] = []
    d['requirements']['run'] = []
    d['test']['commands'] = ['echo "A-OK"', 'exit 0']
    d['about']['home'] = "sweet home"
    d['about']['license'] = "contract in blood"
    d['about']['summary'] = "a test package"
    testing_config.variant = get_default_variant(testing_config)
    testing_config.variants = [testing_config.variant]
    return MetaData.fromdict(d, config=testing_config)
예제 #6
0
def python_vars(config, prefix):
    py_ver = get_py_ver(config)
    vars_ = {
        'CONDA_PY': ''.join(py_ver.split('.')[:2]),
        'PY3K': str(int(int(py_ver[0]) >= 3)),
        'PY_VER': py_ver,
        'STDLIB_DIR': utils.get_stdlib_dir(prefix, py_ver),
        'SP_DIR': utils.get_site_packages(prefix, py_ver),
    }
    if os.path.isfile(config.python_bin(prefix)):
        vars_.update({
            'PYTHON': config.python_bin(prefix),
        })

    np_ver = config.variant.get('numpy', get_default_variant(config)['numpy'])
    vars_['NPY_VER'] = '.'.join(np_ver.split('.')[:2])
    vars_['CONDA_NPY'] = ''.join(np_ver.split('.')[:2])
    return vars_
예제 #7
0
def python_vars(config, prefix, platform):
    py_ver = get_py_ver(config)
    vars_ = {
            'CONDA_PY': ''.join(py_ver.split('.')[:2]),
            'PY3K': str(int(int(py_ver[0]) >= 3)),
            'PY_VER': py_ver,
            'STDLIB_DIR': utils.get_stdlib_dir(prefix, py_ver),
            'SP_DIR': utils.get_site_packages(prefix, py_ver),
            }
    if os.path.isfile(config.python_bin(prefix, platform)):
        vars_.update({
            'PYTHON': config.python_bin(prefix, platform),
        })

    np_ver = config.variant.get('numpy', get_default_variant(config)['numpy'])
    vars_['NPY_VER'] = '.'.join(np_ver.split('.')[:2])
    vars_['CONDA_NPY'] = ''.join(np_ver.split('.')[:2])
    return vars_
예제 #8
0
def python_vars(metadata, prefix):
    py_ver = get_py_ver(metadata.config)
    vars_ = {
            'CONDA_PY': ''.join(py_ver.split('.')[:2]),
            'PY3K': str(int(int(py_ver[0]) >= 3)),
            'PY_VER': py_ver,
            'STDLIB_DIR': utils.get_stdlib_dir(prefix, py_ver),
            'SP_DIR': utils.get_site_packages(prefix, py_ver),
            }
    build_or_host = 'host' if metadata.is_cross else 'build'
    deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)]
    if 'python' in deps:
        vars_.update({
            # host prefix is always fine, because it is the same as build when is_cross is False
            'PYTHON': metadata.config.python_bin(prefix, metadata.config.host_subdir),
        })

    np_ver = metadata.config.variant.get('numpy', get_default_variant(metadata.config)['numpy'])
    vars_['NPY_VER'] = '.'.join(np_ver.split('.')[:2])
    vars_['CONDA_NPY'] = ''.join(np_ver.split('.')[:2])
    return vars_
예제 #9
0
def get_lua_ver(config):
    return '.'.join(
        config.variant.get('lua',
                           get_default_variant(config)['lua']).split('.')[:2])
예제 #10
0
def get_perl_ver(config):
    return '.'.join(
        config.variant.get('perl',
                           get_default_variant(config)['perl']).split('.')[:2])
예제 #11
0
def get_py_ver(config):
    return '.'.join(
        config.variant.get(
            'python',
            get_default_variant(config)['python']).split('.')[:2])
예제 #12
0
def get_perl_ver(config):
    return '.'.join(config.variant.get('perl', get_default_variant(config)['perl']).split('.')[:2])
예제 #13
0
파일: cpan.py 프로젝트: conda/conda-build
def skeletonize(packages, output_dir=".", version=None,
                meta_cpan_url="http://fastapi.metacpan.org/v1",
                recursive=False, force=False, config=None, write_core=False):
    '''
    Loops over packages, outputting conda recipes converted from CPAN metata.
    '''

    config = get_or_merge_config(config)
    # TODO: load/use variants?

    perl_version = config.variant.get('perl', get_default_variant(config)['perl'])
    # wildcards are not valid for perl
    perl_version = perl_version.replace(".*", "")
    package_dicts = {}
    indent = '\n    - '
    indent_core = '\n    #- '
    processed_packages = set()
    orig_version = version
    while packages:
        package = packages.pop()
        # If we're passed version in the same format as `PACKAGE=VERSION`
        # update version
        if '=' in package:
            package, _, version = package.partition('=')
        else:
            version = orig_version

        # Skip duplicates
        if package in processed_packages:
            continue
        processed_packages.add(package)

        # Convert modules into distributions
        orig_package = package
        package = dist_for_module(
            meta_cpan_url, package, perl_version, config=config)
        if package == 'perl':
            print(("WARNING: {0} is a Perl core module that is not developed " +
                   "outside of Perl, so we are skipping creating a recipe " +
                   "for it.").format(orig_package))
            continue
        elif package not in {orig_package, orig_package.replace('::', '-')}:
            print(
                ("WARNING: {0} was part of the {1} distribution, so we are " +
                 "making a recipe for {1} instead.").format(orig_package,
                                                            package)
            )

        latest_release_data = get_release_info(meta_cpan_url, package,
                                               None, perl_version,
                                               config=config)
        packagename = perl_to_conda(package)

        # Skip duplicates
        if ((version is not None and ((packagename + '-' + version) in
                                      processed_packages)) or
                ((packagename + '-' + latest_release_data['version']) in
                    processed_packages)):
            continue

        d = package_dicts.setdefault(package, {'packagename': packagename,
                                               'run_depends': '',
                                               'build_depends': '',
                                               'build_comment': '# ',
                                               'test_commands': '',
                                               'usesha256': '',
                                               'useurl': '',
                                               'source_comment': '',
                                               'summary': "''",
                                               'import_tests': ''})

        # Fetch all metadata from CPAN
        if version is None:
            release_data = latest_release_data
        else:
            release_data = get_release_info(meta_cpan_url, package,
                                            parse_version(version),
                                            perl_version,
                                            config=config)

        # Check if recipe directory already exists
        dir_path = join(output_dir, packagename, release_data['version'])

        # Add Perl version to core module requirements, since these are empty
        # packages, unless we're newer than what's in core
        if metacpan_api_is_core_version(meta_cpan_url, package):

            if not write_core:
                print('We found core module %s. Skipping recipe creation.' %
                      packagename)
                continue

            d['useurl'] = '#'
            d['usesha256'] = '#'
            d['source_comment'] = '#'
            empty_recipe = True
        # Add dependencies to d if not in core, or newer than what's in core
        else:
            build_deps, build_core_deps, run_deps, run_core_deps, packages_to_append = \
                deps_for_package(package, release_data=release_data, perl_version=perl_version,
                                 output_dir=output_dir, meta_cpan_url=meta_cpan_url,
                                 recursive=recursive, config=config)

            # Get which deps are in perl_core

            d['build_depends'] += indent.join([''] + list(build_deps |
                                                          run_deps))
            d['build_depends'] += indent_core.join([''] + list(build_core_deps |
                                                               run_core_deps))

            d['run_depends'] += indent.join([''] + list(run_deps))
            d['run_depends'] += indent_core.join([''] + list(run_core_deps))
            # Make sure we append any packages before continuing
            packages.extend(packages_to_append)
            empty_recipe = False

        # If we are recursively getting packages for a particular version
        # we need to make sure this is reset on the loop
        version = None
        if exists(dir_path) and not force:
            print(
                'Directory %s already exists and you have not specified --force ' % dir_path)
            continue
        elif exists(dir_path) and force:
            print('Directory %s already exists, but forcing recipe creation' % dir_path)

        # If this is something we're downloading, get MD5
        d['cpanurl'] = ''
        # Conda build will guess the filename
        d['filename'] = repr('')
        d['sha256'] = ''
        if release_data.get('archive'):
            d['filename'] = basename(release_data['archive'])
        if release_data.get('download_url'):
            d['cpanurl'] = release_data['download_url']
            d['sha256'], size = get_checksum_and_size(
                release_data['download_url'])
            d['filename'] = basename(release_data['download_url'])
            print("Using url %s (%s) for %s." % (d['cpanurl'], size, package))
        else:
            d['useurl'] = '#'
            d['usesha256'] = '#'
            d['source_comment'] = '#'

        try:
            d['homeurl'] = release_data['resources']['homepage']
        except KeyError:
            d['homeurl'] = 'http://metacpan.org/pod/' + package
        if 'abstract' in release_data:
            # TODO this does not escape quotes in a YAML friendly manner
            summary = repr(release_data['abstract']).lstrip('u')
            d['summary'] = summary
            # d['summary'] = repr(release_data['abstract']).lstrip('u')
        try:
            d['license'] = (release_data['license'][0] if
                            isinstance(release_data['license'], list) else
                            release_data['license'])
        except KeyError:
            d['license'] = 'perl_5'
        d['version'] = release_data['version']

        processed_packages.add(packagename + '-' + d['version'])

        # Create import tests
        module_prefix = package.replace('::', '-').split('-')[0]
        if 'provides' in release_data:
            for provided_mod in sorted(set(release_data['provides'])):
                # Filter out weird modules that don't belong
                if (provided_mod.startswith(module_prefix) and
                        '::_' not in provided_mod):
                    d['import_tests'] += indent + provided_mod
        if d['import_tests']:
            d['import_comment'] = ''
        else:
            d['import_comment'] = '# '

        if not exists(dir_path):
            makedirs(dir_path)

        # Write recipe files to a directory
        # TODO def write_recipe
        print("Writing recipe for %s-%s" % (packagename, d['version']))
        with open(join(dir_path, 'meta.yaml'), 'w') as f:
            f.write(CPAN_META.format(**d))
        with open(join(dir_path, 'build.sh'), 'w') as f:
            if empty_recipe:
                f.write('#!/bin/bash\necho "Nothing to do."\n')
            else:
                f.write(CPAN_BUILD_SH.format(**d))
        with open(join(dir_path, 'bld.bat'), 'w') as f:
            if empty_recipe:
                f.write('echo "Nothing to do."\n')
            else:
                f.write(CPAN_BLD_BAT.format(**d))
예제 #14
0
def get_py_ver(config):
    return '.'.join(config.variant.get('python',
                                       get_default_variant(config)['python']).split('.')[:2])
예제 #15
0
def get_dependency_variants(requirements, conda_build_config, config, features=()):
    host = requirements.get("host") or []
    build = requirements.get("build") or []
    # run = requirements.get("run") or []

    variants = {}
    default_variant = get_default_variant(config)

    variants["target_platform"] = conda_build_config.get(
        "target_platform", [default_variant["target_platform"]]
    )
    config.variant["target_platform"] = variants["target_platform"][0]

    def get_variants(env):
        specs = {}

        for s in env:
            spec = CondaBuildSpec(s)
            specs[spec.name] = spec

        for n, cb_spec in specs.items():
            if cb_spec.is_compiler:
                # This is a compiler package
                _, lang = cb_spec.raw.split()
                compiler = conda_build.jinja_context.compiler(lang, config)
                cb_spec.final = compiler
                config_key = f"{lang}_compiler"
                config_version_key = f"{lang}_compiler_version"

                if conda_build_config.get(config_key):
                    variants[config_key] = conda_build_config[config_key]
                if conda_build_config.get(config_version_key):
                    variants[config_version_key] = conda_build_config[
                        config_version_key
                    ]

            variant_key = n.replace("-", "_")
            vlist = None
            if variant_key in conda_build_config:
                vlist = conda_build_config[variant_key]
            elif variant_key in default_variant:
                vlist = [default_variant[variant_key]]
            if vlist:
                # we need to check if v matches the spec
                if cb_spec.is_simple:
                    variants[variant_key] = vlist
                elif cb_spec.is_pin:
                    # ignore variants?
                    pass
                else:
                    # check intersection of MatchSpec and variants
                    ms = MatchSpec(cb_spec.raw)
                    filtered = []
                    for var in vlist:
                        vsplit = var.split()
                        if len(vsplit) == 1:
                            p = {
                                "name": n,
                                "version": vsplit[0],
                                "build_number": 0,
                                "build": "",
                            }
                        elif len(vsplit) == 2:
                            p = {
                                "name": n,
                                "version": var.split()[0],
                                "build": var.split()[1],
                                "build_number": 0,
                            }
                        else:
                            raise RuntimeError("Check your conda_build_config")

                        if ms.match(p):
                            filtered.append(var)
                        else:
                            console.print(
                                f"Configured variant ignored because of the recipe requirement:\n  {cb_spec.raw} : {var}\n"
                            )

                    if len(filtered):
                        variants[variant_key] = filtered

        return variants

    v = get_variants(host + build)
    return v
예제 #16
0
def skeletonize(packages,
                output_dir=".",
                version=None,
                meta_cpan_url="http://fastapi.metacpan.org/v1",
                recursive=False,
                force=False,
                config=None,
                write_core=False):
    '''
    Loops over packages, outputting conda recipes converted from CPAN metata.
    '''

    config = get_or_merge_config(config)
    # TODO: load/use variants?

    perl_version = config.variant.get('perl',
                                      get_default_variant(config)['perl'])
    # wildcards are not valid for perl
    perl_version = perl_version.replace(".*", "")
    package_dicts = {}
    indent = '\n    - '
    indent_core = '\n    #- '
    processed_packages = set()
    orig_version = version
    while packages:
        package = packages.pop()
        # If we're passed version in the same format as `PACKAGE=VERSION`
        # update version
        if '=' in package:
            package, _, version = package.partition('=')
        else:
            version = orig_version

        # Skip duplicates
        if package in processed_packages:
            continue
        processed_packages.add(package)

        # Convert modules into distributions
        orig_package = package
        package = dist_for_module(meta_cpan_url,
                                  package,
                                  perl_version,
                                  config=config)
        if package == 'perl':
            print(
                ("WARNING: {0} is a Perl core module that is not developed " +
                 "outside of Perl, so we are skipping creating a recipe " +
                 "for it.").format(orig_package))
            continue
        elif package not in {orig_package, orig_package.replace('::', '-')}:
            print(
                ("WARNING: {0} was part of the {1} distribution, so we are " +
                 "making a recipe for {1} instead.").format(
                     orig_package, package))

        latest_release_data = get_release_info(meta_cpan_url,
                                               package,
                                               None,
                                               perl_version,
                                               config=config)
        packagename = perl_to_conda(package)

        # Skip duplicates
        if ((version is not None and
             ((packagename + '-' + version) in processed_packages))
                or ((packagename + '-' + latest_release_data['version'])
                    in processed_packages)):
            continue

        d = package_dicts.setdefault(
            package, {
                'packagename': packagename,
                'run_depends': '',
                'build_depends': '',
                'build_comment': '# ',
                'test_commands': '',
                'usesha256': '',
                'useurl': '',
                'source_comment': '',
                'summary': "''",
                'import_tests': ''
            })

        # Fetch all metadata from CPAN
        if version is None:
            release_data = latest_release_data
        else:
            release_data = get_release_info(meta_cpan_url,
                                            package,
                                            parse_version(version),
                                            perl_version,
                                            config=config)

        # Check if recipe directory already exists
        dir_path = join(output_dir, packagename, release_data['version'])

        # Add Perl version to core module requirements, since these are empty
        # packages, unless we're newer than what's in core
        if metacpan_api_is_core_version(meta_cpan_url, package):

            if not write_core:
                print('We found core module %s. Skipping recipe creation.' %
                      packagename)
                continue

            d['useurl'] = '#'
            d['usesha256'] = '#'
            d['source_comment'] = '#'
            empty_recipe = True
        # Add dependencies to d if not in core, or newer than what's in core
        else:
            build_deps, build_core_deps, run_deps, run_core_deps, packages_to_append = \
                deps_for_package(package, release_data=release_data, perl_version=perl_version,
                                 output_dir=output_dir, meta_cpan_url=meta_cpan_url,
                                 recursive=recursive, config=config)

            # Get which deps are in perl_core

            d['build_depends'] += indent.join([''] +
                                              list(build_deps | run_deps))
            d['build_depends'] += indent_core.join([''] +
                                                   list(build_core_deps
                                                        | run_core_deps))

            d['run_depends'] += indent.join([''] + list(run_deps))
            d['run_depends'] += indent_core.join([''] + list(run_core_deps))
            # Make sure we append any packages before continuing
            packages.extend(packages_to_append)
            empty_recipe = False

        # If we are recursively getting packages for a particular version
        # we need to make sure this is reset on the loop
        version = None
        if exists(dir_path) and not force:
            print(
                'Directory %s already exists and you have not specified --force '
                % dir_path)
            continue
        elif exists(dir_path) and force:
            print('Directory %s already exists, but forcing recipe creation' %
                  dir_path)

        # If this is something we're downloading, get MD5
        d['cpanurl'] = ''
        # Conda build will guess the filename
        d['filename'] = repr('')
        d['sha256'] = ''
        if release_data.get('archive'):
            d['filename'] = basename(release_data['archive'])
        if release_data.get('download_url'):
            d['cpanurl'] = release_data['download_url']
            d['sha256'], size = get_checksum_and_size(
                release_data['download_url'])
            d['filename'] = basename(release_data['download_url'])
            print("Using url %s (%s) for %s." % (d['cpanurl'], size, package))
        else:
            d['useurl'] = '#'
            d['usesha256'] = '#'
            d['source_comment'] = '#'

        try:
            d['homeurl'] = release_data['resources']['homepage']
        except KeyError:
            d['homeurl'] = 'http://metacpan.org/pod/' + package
        if 'abstract' in release_data:
            # TODO this does not escape quotes in a YAML friendly manner
            summary = repr(release_data['abstract']).lstrip('u')
            d['summary'] = summary
            # d['summary'] = repr(release_data['abstract']).lstrip('u')
        try:
            d['license'] = (release_data['license'][0] if isinstance(
                release_data['license'], list) else release_data['license'])
        except KeyError:
            d['license'] = 'perl_5'
        d['version'] = release_data['version']

        processed_packages.add(packagename + '-' + d['version'])

        # Create import tests
        module_prefix = package.replace('::', '-').split('-')[0]
        if 'provides' in release_data:
            for provided_mod in sorted(set(release_data['provides'])):
                # Filter out weird modules that don't belong
                if (provided_mod.startswith(module_prefix)
                        and '::_' not in provided_mod):
                    d['import_tests'] += indent + provided_mod
        if d['import_tests']:
            d['import_comment'] = ''
        else:
            d['import_comment'] = '# '

        if not exists(dir_path):
            makedirs(dir_path)

        # Write recipe files to a directory
        # TODO def write_recipe
        print("Writing recipe for %s-%s" % (packagename, d['version']))
        with open(join(dir_path, 'meta.yaml'), 'w') as f:
            f.write(CPAN_META.format(**d))
        with open(join(dir_path, 'build.sh'), 'w') as f:
            if empty_recipe:
                f.write('#!/bin/bash\necho "Nothing to do."\n')
            else:
                f.write(CPAN_BUILD_SH.format(**d))
        with open(join(dir_path, 'bld.bat'), 'w') as f:
            if empty_recipe:
                f.write('echo "Nothing to do."\n')
            else:
                f.write(CPAN_BLD_BAT.format(**d))
예제 #17
0
def msvc_env_cmd(bits, config, override=None):
    log = get_logger(__name__)
    log.warn("Using legacy MSVC compiler setup.  This will be removed in conda-build 4.0. "
             "If this recipe does not use a compiler, this message is safe to ignore.  "
             "Otherwise, use {{compiler('<language>')}} jinja2 in requirements/build.")
    # this has been an int at times.  Make sure it's a string for consistency.
    bits = str(bits)
    arch_selector = 'x86' if bits == '32' else 'amd64'

    msvc_env_lines = []

    version = None
    if override is not None:
        version = override

    # The DISTUTILS_USE_SDK variable tells distutils to not try and validate
    # the MSVC compiler. For < 3.5 this still forcibly looks for 'cl.exe'.
    # For > 3.5 it literally just skips the validation logic.
    # See distutils _msvccompiler.py and msvc9compiler.py / msvccompiler.py
    # for more information.
    msvc_env_lines.append('set DISTUTILS_USE_SDK=1')
    # This is also required to hit the 'don't validate' logic on < 3.5.
    # For > 3.5 this is ignored.
    msvc_env_lines.append('set MSSdk=1')

    if not version:
        py_ver = config.variant.get('python', get_default_variant(config)['python'])
        if int(py_ver[0]) >= 3:
            if int(py_ver.split('.')[1]) < 5:
                version = '10.0'
            version = '14.0'
        else:
            version = '9.0'

    if float(version) >= 14.0:
        # For Python 3.5+, ensure that we link with the dynamic runtime.  See
        # http://stevedower.id.au/blog/building-for-python-3-5-part-two/ for more info
        msvc_env_lines.append('set PY_VCRUNTIME_REDIST=%LIBRARY_BIN%\\vcruntime{0}.dll'.format(
            version.replace('.', '')))

    vcvarsall_vs_path = build_vcvarsall_vs_path(version)

    def build_vcvarsall_cmd(cmd, arch=arch_selector):
        # Default argument `arch_selector` is defined above
        return 'call "{cmd}" {arch}'.format(cmd=cmd, arch=arch)

    msvc_env_lines.append('set "VS_VERSION={}"'.format(version))
    msvc_env_lines.append('set "VS_MAJOR={}"'.format(version.split('.')[0]))
    msvc_env_lines.append('set "VS_YEAR={}"'.format(VS_VERSION_STRING[version][-4:]))
    msvc_env_lines.append('set "CMAKE_GENERATOR={}"'.format(VS_VERSION_STRING[version] +
                                                            {'64': ' Win64', '32': ''}[bits]))
    # tell msys2 to ignore path conversions for issue-causing windows-style flags in build
    #   See https://github.com/conda-forge/icu-feedstock/pull/5
    msvc_env_lines.append('set "MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out"')
    msvc_env_lines.append('set "MSYS2_ENV_CONV_EXCL=CL"')
    if version == '10.0':
        try:
            WIN_SDK_71_PATH = Reg.get_value(os.path.join(WINSDK_BASE, 'v7.1'),
                                            'installationfolder')
            WIN_SDK_71_BAT_PATH = os.path.join(WIN_SDK_71_PATH, 'Bin', 'SetEnv.cmd')

            win_sdk_arch = '/Release /x86' if bits == '32' else '/Release /x64'
            win_sdk_cmd = build_vcvarsall_cmd(WIN_SDK_71_BAT_PATH, arch=win_sdk_arch)

            # There are two methods of building Python 3.3 and 3.4 extensions (both
            # of which required Visual Studio 2010 - as explained in the Python wiki
            # https://wiki.python.org/moin/WindowsCompilers)
            # 1) Use the Windows SDK 7.1
            # 2) Use Visual Studio 2010 (any edition)
            # However, VS2010 never shipped with a 64-bit compiler, so in this case
            # **only** option (1) applies. For this reason, we always try and
            # activate the Windows SDK first. Unfortunately, unsuccessfully setting
            # up the environment does **not EXIT 1** and therefore we must fall
            # back to attempting to set up VS2010.
            # DelayedExpansion is required for the SetEnv.cmd
            msvc_env_lines.append('Setlocal EnableDelayedExpansion')
            msvc_env_lines.append(win_sdk_cmd)
            # If the WindowsSDKDir environment variable has not been successfully
            # set then try activating VS2010
            msvc_env_lines.append('if not "%WindowsSDKDir%" == "{}" ( {} )'.format(
                WIN_SDK_71_PATH, build_vcvarsall_cmd(vcvarsall_vs_path)))
        # sdk is not installed.  Fall back to only trying VS 2010
        except KeyError:
            msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))
    elif version == '9.0':
        # Get the Visual Studio 2008 path (not the Visual C++ for Python path)
        # and get the 'vcvars64.bat' from inside the bin (in the directory above
        # that returned by distutils_find_vcvarsall)
        try:
            VCVARS64_VS9_BAT_PATH = os.path.join(os.path.dirname(distutils_find_vcvarsall(9)),
                                                'bin', 'vcvars64.bat')
        # there's an exception if VS or the VC compiler for python are not actually installed.
        except (KeyError, TypeError):
            VCVARS64_VS9_BAT_PATH = None

        error1 = 'IF %ERRORLEVEL% NEQ 0 {}'

        # Prefer VS9 proper over Microsoft Visual C++ Compiler for Python 2.7
        msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))
        # The Visual Studio 2008 Express edition does not properly contain
        # the amd64 build files, so we call the vcvars64.bat manually,
        # rather than using the vcvarsall.bat which would try and call the
        # missing bat file.
        if arch_selector == 'amd64' and VCVARS64_VS9_BAT_PATH:
            msvc_env_lines.append(error1.format(
                build_vcvarsall_cmd(VCVARS64_VS9_BAT_PATH)))
        # Otherwise, fall back to icrosoft Visual C++ Compiler for Python 2.7+
        # by using the logic provided by setuptools
        msvc_env_lines.append(error1.format(
            build_vcvarsall_cmd(distutils_find_vcvarsall(9))))
    else:
        # Visual Studio 14 or otherwise
        msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))

    return '\n'.join(msvc_env_lines) + '\n'
예제 #18
0
def get_lua_ver(config):
    return '.'.join(config.variant.get('lua', get_default_variant(config)['lua']).split('.')[:2])
예제 #19
0
def get_py_ver(config):
    py = config.variant.get('python', get_default_variant(config)['python'])
    if not hasattr(py, 'split'):
        py = py[0]
    return '.'.join(py.split('.')[:2])
예제 #20
0
def skeletonize(packages,
                output_dir=".",
                version=None,
                meta_cpan_url="https://fastapi.metacpan.org/v1",
                recursive=False,
                force=False,
                config=None,
                write_core=False):
    '''
    Loops over packages, outputting conda recipes converted from CPAN metata.
    '''
    config = get_or_merge_config(config)
    cache_dir = os.path.join(config.src_cache_root, '.conda-build',
                             'pickled.cb')

    # TODO :: Make a temp env. with perl (which we need anyway) and use whatever version
    #         got installed instead of this. Also allow the version to be specified.
    perl_version = config.variant.get('perl',
                                      get_default_variant(config)['perl'])
    core_modules = get_core_modules_for_this_perl_version(
        perl_version, cache_dir)

    # wildcards are not valid for perl
    perl_version = perl_version.replace(".*", "")
    package_dicts = {}
    indent = '\n    - '
    indent_core = '\n    #- '
    processed_packages = set()
    orig_version = version
    new_packages = []
    for package in packages:
        # TODO :: Determine whether the user asked for a module or a package here.
        #      :: if a package, then the 2nd element is None, if a module then the
        #      :: 2nd element gives the name of that module.
        #      :: I also need to take care about the differences between package
        #      :: versions and module versions.
        # module = package
        # module = dist_for_module(meta_cpan_url, cache_dir, core_modules, package)
        module = None
        new_packages.append((package, module))
    packages = new_packages
    while packages:
        package, module = packages.pop()
        # If we're passed version in the same format as `PACKAGE=VERSION`
        # update version
        if '=' in package:
            package, _, version = package.partition('=')
        else:
            version = orig_version

        # Skip duplicates
        if package in processed_packages:
            continue
        processed_packages.add(package)

        # Convert modules into distributions .. incorrectly. Instead we should just look up
        # https://fastapi.metacpan.org/v1/module/Regexp::Common which seems to contain every
        # bit of information we could want here. Versions, modules, module versions,
        # distribution name, urls. The lot. Instead we mess about with other API end-points
        # getting a load of nonsense.
        orig_package = package
        package = dist_for_module(meta_cpan_url, cache_dir, core_modules,
                                  module if module else package)
        if package == 'perl':
            print(
                ("WARNING: {0} is a Perl core module that is not developed " +
                 "outside of Perl, so we are skipping creating a recipe " +
                 "for it.").format(orig_package))
            continue
        elif package not in {orig_package, orig_package.replace('::', '-')}:
            print(
                ("WARNING: {0} was part of the {1} distribution, so we are " +
                 "making a recipe for {1} instead.").format(
                     orig_package, package))

        latest_release_data = get_release_info(
            meta_cpan_url, cache_dir, core_modules,
            module if module else orig_package, version)
        packagename = perl_to_conda(package)

        # Skip duplicates
        if ((version is not None and
             ((packagename + '-' + version) in processed_packages))
                or ((packagename + '-' + latest_release_data['version'])
                    in processed_packages)):
            continue

        d = package_dicts.setdefault(
            package, {
                'packagename': packagename,
                'build_depends': '',
                'host_depends': '',
                'run_depends': '',
                'build_comment': '# ',
                'test_commands': '',
                'usesha256': '',
                'useurl': '',
                'source_comment': '',
                'summary': "''",
                'import_tests': ''
            })

        # Fetch all metadata from CPAN
        if version is None:
            release_data = latest_release_data
        else:
            release_data = get_release_info(meta_cpan_url, cache_dir,
                                            core_modules, package,
                                            parse_version(version))

        # Check if recipe directory already exists
        dir_path = join(output_dir, packagename, release_data['version'])

        # Add Perl version to core module requirements, since these are empty
        # packages, unless we're newer than what's in core
        if metacpan_api_is_core_version(meta_cpan_url, package):

            if not write_core:
                print('We found core module %s. Skipping recipe creation.' %
                      packagename)
                continue

            d['useurl'] = '#'
            d['usesha256'] = '#'
            d['source_comment'] = '#'
            empty_recipe = True
        # Add dependencies to d if not in core, or newer than what's in core
        else:
            deps, packages_to_append = \
                deps_for_package(package, release_data=release_data,
                                 output_dir=output_dir, cache_dir=cache_dir,
                                 meta_cpan_url=meta_cpan_url, recursive=recursive, core_modules=core_modules)

            # If this is something we're downloading, get MD5
            d['cpanurl'] = ''
            d['sha256'] = ''
            if release_data.get('download_url'):
                d['cpanurl'] = release_data['download_url']
                d['sha256'], size = get_checksum_and_size(
                    release_data['download_url'])
                print("Using url {} ({}) for {}.".format(
                    d['cpanurl'], size, package))
                src_build_depends = get_build_dependencies_from_src_archive(
                    release_data['download_url'], d['sha256'],
                    config.src_cache)
            else:
                src_build_depends = []
                d['useurl'] = '#'
                d['usesha256'] = '#'
                d['source_comment'] = '#'

            d['build_depends'] += indent.join([''] + src_build_depends)

            #            d['build_depends'] += indent_core.join([''] + list(deps['build']['core'] |
            #                                                               deps['run']['core']))

            d['host_depends'] += indent.join([''] +
                                             list(deps['build']['noncore']
                                                  | deps['run']['noncore']))

            # run_exports will set these, but:
            # TODO :: Add ignore_run_exports for things in deps['build'] that are not also
            #         in deps['run']
            d['run_depends'] += indent_core.join([''] +
                                                 list(deps['run']['noncore']))

            # Make sure we append any packages before continuing
            for pkg in packages_to_append:
                if pkg not in packages:
                    packages.append(pkg)
                else:
                    print("INFO :: Already building package {} (module {})".
                          format(*pkg))
            empty_recipe = False

        # If we are recursively getting packages for a particular version
        # we need to make sure this is reset on the loop
        version = None
        if exists(dir_path) and not force:
            print(
                'Directory %s already exists and you have not specified --force '
                % dir_path)
            continue
        elif exists(dir_path) and force:
            print('Directory %s already exists, but forcing recipe creation' %
                  dir_path)

        try:
            d['homeurl'] = release_data['resources']['homepage']
        except KeyError:
            d['homeurl'] = 'http://metacpan.org/pod/' + package
        if 'abstract' in release_data:
            # TODO this does not escape quotes in a YAML friendly manner
            summary = repr(release_data['abstract']).lstrip('u')
            d['summary'] = summary
            # d['summary'] = repr(release_data['abstract']).lstrip('u')
        try:
            d['license'] = (release_data['license'][0] if isinstance(
                release_data['license'], list) else release_data['license'])
        except KeyError:
            d['license'] = 'perl_5'
        d['version'] = release_data['version']

        processed_packages.add(packagename + '-' + d['version'])

        # Create import tests
        module_prefix = package.replace('::', '-').split('-')[0]
        if 'provides' in release_data:
            for provided_mod in sorted(set(release_data['provides'])):
                # Filter out weird modules that don't belong
                if (provided_mod.startswith(module_prefix)
                        and '::_' not in provided_mod):
                    d['import_tests'] += indent + provided_mod
        if d['import_tests']:
            d['import_comment'] = ''
        else:
            d['import_comment'] = '# '

        if not exists(dir_path):
            makedirs(dir_path)

        # Write recipe files to a directory
        # TODO def write_recipe
        print("Writing recipe for {}-{}".format(packagename, d['version']))
        with open(join(dir_path, 'meta.yaml'), 'wb') as f:
            f.write(CPAN_META.format(**d).encode('utf-8'))
        with open(join(dir_path, 'build.sh'), 'wb') as f:
            if empty_recipe:
                f.write(b'#!/bin/bash\necho "Nothing to do."\n')
            else:
                f.write(CPAN_BUILD_SH.format(**d).encode('utf-8'))
        with open(join(dir_path, 'bld.bat'), 'w') as f:
            if empty_recipe:
                f.write('echo "Nothing to do."\n')
            else:
                f.write(CPAN_BLD_BAT.format(**d))
예제 #21
0
def get_r_ver(config):
    return '.'.join(
        config.variant.get(
            'r_base',
            get_default_variant(config)['r_base']).split('.')[:3])
예제 #22
0
def get_r_ver(config):
    return '.'.join(config.variant.get('r_base',
                                       get_default_variant(config)['r_base']).split('.')[:3])
예제 #23
0
def msvc_env_cmd(bits, config, override=None):
    log = get_logger(__name__)
    log.warn("Using legacy MSVC compiler setup.  This will be removed in conda-build 4.0. "
             "If this recipe does not use a compiler, this message is safe to ignore.  "
             "Otherwise, use {{compiler('<language>')}} jinja2 in requirements/build.")
    if override:
        log.warn("msvc_compiler key in meta.yaml is deprecated. Use the new"
        "variant-powered compiler configuration instead. Note that msvc_compiler"
        "is incompatible with the new \{\{compiler('c')\}\} jinja scheme.")
    # this has been an int at times.  Make sure it's a string for consistency.
    bits = str(bits)
    arch_selector = 'x86' if bits == '32' else 'amd64'

    msvc_env_lines = []

    version = None
    if override is not None:
        version = override

    # The DISTUTILS_USE_SDK variable tells distutils to not try and validate
    # the MSVC compiler. For < 3.5 this still forcibly looks for 'cl.exe'.
    # For > 3.5 it literally just skips the validation logic.
    # See distutils _msvccompiler.py and msvc9compiler.py / msvccompiler.py
    # for more information.
    msvc_env_lines.append('set DISTUTILS_USE_SDK=1')
    # This is also required to hit the 'don't validate' logic on < 3.5.
    # For > 3.5 this is ignored.
    msvc_env_lines.append('set MSSdk=1')

    if not version:
        py_ver = config.variant.get('python', get_default_variant(config)['python'])
        if int(py_ver[0]) >= 3:
            if int(py_ver.split('.')[1]) < 5:
                version = '10.0'
            version = '14.0'
        else:
            version = '9.0'

    if float(version) >= 14.0:
        # For Python 3.5+, ensure that we link with the dynamic runtime.  See
        # http://stevedower.id.au/blog/building-for-python-3-5-part-two/ for more info
        msvc_env_lines.append('set PY_VCRUNTIME_REDIST=%LIBRARY_BIN%\\vcruntime{0}.dll'.format(
            version.replace('.', '')))

    vcvarsall_vs_path = build_vcvarsall_vs_path(version)

    def build_vcvarsall_cmd(cmd, arch=arch_selector):
        # Default argument `arch_selector` is defined above
        return 'call "{cmd}" {arch}'.format(cmd=cmd, arch=arch)

    msvc_env_lines.append('set "VS_VERSION={}"'.format(version))
    msvc_env_lines.append('set "VS_MAJOR={}"'.format(version.split('.')[0]))
    msvc_env_lines.append('set "VS_YEAR={}"'.format(VS_VERSION_STRING[version][-4:]))
    msvc_env_lines.append('set "CMAKE_GENERATOR={}"'.format(VS_VERSION_STRING[version] +
                                                            {'64': ' Win64', '32': ''}[bits]))
    # tell msys2 to ignore path conversions for issue-causing windows-style flags in build
    #   See https://github.com/conda-forge/icu-feedstock/pull/5
    msvc_env_lines.append('set "MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out"')
    msvc_env_lines.append('set "MSYS2_ENV_CONV_EXCL=CL"')
    if version == '10.0':
        try:
            WIN_SDK_71_PATH = Reg.get_value(os.path.join(WINSDK_BASE, 'v7.1'),
                                            'installationfolder')
            WIN_SDK_71_BAT_PATH = os.path.join(WIN_SDK_71_PATH, 'Bin', 'SetEnv.cmd')

            win_sdk_arch = '/Release /x86' if bits == '32' else '/Release /x64'
            win_sdk_cmd = build_vcvarsall_cmd(WIN_SDK_71_BAT_PATH, arch=win_sdk_arch)

            # There are two methods of building Python 3.3 and 3.4 extensions (both
            # of which required Visual Studio 2010 - as explained in the Python wiki
            # https://wiki.python.org/moin/WindowsCompilers)
            # 1) Use the Windows SDK 7.1
            # 2) Use Visual Studio 2010 (any edition)
            # However, VS2010 never shipped with a 64-bit compiler, so in this case
            # **only** option (1) applies. For this reason, we always try and
            # activate the Windows SDK first. Unfortunately, unsuccessfully setting
            # up the environment does **not EXIT 1** and therefore we must fall
            # back to attempting to set up VS2010.
            # DelayedExpansion is required for the SetEnv.cmd
            msvc_env_lines.append('Setlocal EnableDelayedExpansion')
            msvc_env_lines.append(win_sdk_cmd)
            # If the WindowsSDKDir environment variable has not been successfully
            # set then try activating VS2010
            msvc_env_lines.append('if not "%WindowsSDKDir%" == "{}" ( {} )'.format(
                WIN_SDK_71_PATH, build_vcvarsall_cmd(vcvarsall_vs_path)))
        # sdk is not installed.  Fall back to only trying VS 2010
        except KeyError:
            msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))
    elif version == '9.0':
        # Get the Visual Studio 2008 path (not the Visual C++ for Python path)
        # and get the 'vcvars64.bat' from inside the bin (in the directory above
        # that returned by distutils_find_vcvarsall)
        try:
            VCVARS64_VS9_BAT_PATH = os.path.join(os.path.dirname(distutils_find_vcvarsall(9)),
                                                'bin', 'vcvars64.bat')
        # there's an exception if VS or the VC compiler for python are not actually installed.
        except (KeyError, TypeError):
            VCVARS64_VS9_BAT_PATH = None

        error1 = 'IF %ERRORLEVEL% NEQ 0 {}'

        # Prefer VS9 proper over Microsoft Visual C++ Compiler for Python 2.7
        msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))
        # The Visual Studio 2008 Express edition does not properly contain
        # the amd64 build files, so we call the vcvars64.bat manually,
        # rather than using the vcvarsall.bat which would try and call the
        # missing bat file.
        if arch_selector == 'amd64' and VCVARS64_VS9_BAT_PATH:
            msvc_env_lines.append(error1.format(
                build_vcvarsall_cmd(VCVARS64_VS9_BAT_PATH)))
        # Otherwise, fall back to icrosoft Visual C++ Compiler for Python 2.7+
        # by using the logic provided by setuptools
        msvc_env_lines.append(error1.format(
            build_vcvarsall_cmd(distutils_find_vcvarsall(9))))
    else:
        # Visual Studio 14 or otherwise
        msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))

    return '\n'.join(msvc_env_lines) + '\n'
예제 #24
0
파일: run_build.py 프로젝트: stjordanis/boa
def get_dependency_variants(requirements, conda_build_config, config):
    host = requirements.get("host") or []
    build = requirements.get("build") or []
    # run = requirements.get("run") or []

    variants = {}
    default_variant = get_default_variant(config)

    # When compiling for OS X, we should fetch the clang compilers ...
    # I think this needs a more thorough rework
    # if config.variant["target_platform"] == "osx-64":
    # default_variant.update(
    #     {
    #         "c_compiler": "clang",
    #         "cxx_compiler": "clangxx",
    #         "fortran_compiler": "gfortran",
    #     },
    # )

    variants["target_platform"] = conda_build_config.get(
        "target_platform", [default_variant["target_platform"]]
    )

    if conda_build_config["target_platform"] == [None]:
        variants["target_platform"] = [default_variant["target_platform"]]

    config.variant["target_platform"] = variants["target_platform"][0]

    sys_var_stubs = get_sys_vars_stubs(config.variant["target_platform"])

    def get_variants(env):
        specs = {}

        for var in sys_var_stubs:
            if var in conda_build_config:
                variants[var] = ensure_list(conda_build_config[var])

        for s in env:
            spec = CondaBuildSpec(s)
            specs[spec.name] = spec

        for n, cb_spec in specs.items():
            if cb_spec.is_compiler:
                # This is a compiler package
                _, lang = cb_spec.raw.split()
                compiler = conda_build.jinja_context.compiler(lang, config)
                cb_spec.final = compiler
                config_key = f"{lang}_compiler"
                config_version_key = f"{lang}_compiler_version"

                if conda_build_config.get(config_key):
                    variants[config_key] = conda_build_config[config_key]
                if conda_build_config.get(config_version_key):
                    variants[config_version_key] = conda_build_config[
                        config_version_key
                    ]

            # Note: as a historical artifact we __have to__ use underscore-replaced
            # names here!
            variant_key = n.replace("-", "_")
            vlist = None
            if variant_key in conda_build_config:
                vlist = conda_build_config[variant_key]
            elif variant_key in default_variant:
                vlist = [default_variant[variant_key]]
            if vlist:
                # we need to check if v matches the spec
                if cb_spec.is_simple:
                    variants[variant_key] = vlist
                elif cb_spec.is_pin:
                    # ignore variants?
                    pass
                else:
                    # check intersection of MatchSpec and variants
                    ms = MatchSpec(cb_spec.raw)
                    filtered = []
                    for var in vlist:
                        vsplit = var.split()
                        if len(vsplit) == 1:
                            p = {
                                "name": n,
                                "version": vsplit[0],
                                "build_number": 0,
                                "build": "",
                            }
                        elif len(vsplit) == 2:
                            p = {
                                "name": n,
                                "version": var.split()[0],
                                "build": var.split()[1],
                                "build_number": 0,
                            }
                        else:
                            raise RuntimeError("Check your conda_build_config")

                        if ms.match(p):
                            filtered.append(var)
                        else:
                            console.print(
                                f"Configured variant ignored because of the recipe requirement:\n  {cb_spec.raw} : {var}\n"
                            )

                    if len(filtered):
                        variants[variant_key] = filtered

        return variants

    v = get_variants(host + build)
    return v