def ns_cfg(config): # Remember to update the docs of any of this changes plat = config.build_subdir d = dict( linux=plat.startswith('linux-'), linux32=bool(plat == 'linux-32'), linux64=bool(plat == 'linux-64'), arm=plat.startswith('linux-arm'), osx=plat.startswith('osx-'), unix=plat.startswith(('linux-', 'osx-')), win=plat.startswith('win-'), win32=bool(plat == 'win-32'), win64=bool(plat == 'win-64'), x86=plat.endswith(('-32', '-64')), x86_64=plat.endswith('-64'), os=os, environ=os.environ, nomkl=bool(int(os.environ.get('FEATURE_NOMKL', False))) ) py = config.variant.get('python', get_default_variants()[0]['python']) py = int("".join(py.split('.')[:2])) d.update(dict(py=py, py3k=bool(30 <= py < 40), py2k=bool(20 <= py < 30), py26=bool(py == 26), py27=bool(py == 27), py33=bool(py == 33), py34=bool(py == 34), py35=bool(py == 35), py36=bool(py == 36),)) np = config.variant.get('numpy', get_default_variants()[0]['numpy']) d['np'] = int("".join(np.split('.')[:2])) pl = config.variant.get('perl', get_default_variants()[0]['perl']) d['pl'] = pl lua = config.variant.get('lua', get_default_variants()[0]['lua']) d['lua'] = lua d['luajit'] = bool(lua[0] == "2") for machine in non_x86_linux_machines: d[machine] = bool(plat == 'linux-%s' % machine) for feature, value in feature_list: d[feature] = value d.update(os.environ) return d
def python_vars(config): py_ver = config.variant.get('python', get_default_variants()[0]['python']) py_ver = '.'.join(py_ver.split('.')[:2]) d = { 'PYTHON': config.build_python, 'PY3K': str(int(py_ver[0]) == 3), 'STDLIB_DIR': utils.get_stdlib_dir(config.build_prefix, py_ver), 'SP_DIR': utils.get_site_packages(config.build_prefix, py_ver), 'PY_VER': py_ver, 'CONDA_PY': ''.join(py_ver.split('.')[:2]), } np_ver = config.variant.get('numpy', get_default_variants()[0]['numpy']) d['NPY_VER'] = '.'.join(np_ver.split('.')[:2]) d['CONDA_NPY'] = ''.join(np_ver.split('.')[:2]) return d
def get_npy_ver(config): conda_npy = ''.join(str(config.variant.get('numpy') or get_default_variants()[0]['numpy']).split('.')) # Convert int -> string, e.g. # 17 -> '1.7' # 110 -> '1.10' return conda_npy[0] + '.' + conda_npy[1:]
def lua_vars(config): lua = config.build_lua lua_ver = config.variant.get('lua', get_default_variants()[0]['lua']) return { 'LUA': lua, 'LUA_INCLUDE_DIR': get_lua_include_dir(config), 'LUA_VER': lua_ver, }
def get_npy_ver(config): conda_npy = ''.join( str(config.variant.get('numpy') or get_default_variants()[0]['numpy']).split('.')) # Convert int -> string, e.g. # 17 -> '1.7' # 110 -> '1.10' return conda_npy[0] + '.' + conda_npy[1:]
def testing_metadata(request, testing_config): d = defaultdict(dict) d['package']['name'] = request.function.__name__ d['package']['version'] = '1.0' d['build']['number'] = '1' d['build']['entry_points'] = [] d['requirements']['build'] = ['python'] d['requirements']['run'] = ['python'] d['test']['commands'] = ['echo "A-OK"', 'exit 0'] d['about']['home'] = "sweet home" d['about']['license'] = "contract in blood" d['about']['summary'] = "a test package" testing_config.variant = get_default_variants()[0] return MetaData.fromdict(d, config=testing_config)
def python_vars(config, prefix): py_ver = get_py_ver(config) vars_ = { 'CONDA_PY': ''.join(py_ver.split('.')[:2]), 'PY3K': str(int(int(py_ver[0]) >= 3)), 'PY_VER': py_ver, 'STDLIB_DIR': utils.get_stdlib_dir(prefix, py_ver), 'SP_DIR': utils.get_site_packages(prefix, py_ver), } if os.path.isfile(config.python_bin(prefix)): vars_.update({ 'PYTHON': config.python_bin(prefix), }) np_ver = config.variant.get('numpy', get_default_variants()[0]['numpy']) vars_['NPY_VER'] = '.'.join(np_ver.split('.')[:2]) vars_['CONDA_NPY'] = ''.join(np_ver.split('.')[:2]) return vars_
def python_vars(config, prefix): py_ver = get_py_ver(config) vars_ = { 'CONDA_PY': ''.join(py_ver.split('.')[:2]), 'PY3K': str(int(py_ver[0]) >= 3), 'PY_VER': py_ver, 'STDLIB_DIR': utils.get_stdlib_dir(prefix, py_ver), 'SP_DIR': utils.get_site_packages(prefix, py_ver), } if os.path.isfile(config.python_bin(prefix)): vars_.update({ 'PYTHON': config.python_bin(prefix), }) np_ver = config.variant.get('numpy', get_default_variants()[0]['numpy']) vars_['NPY_VER'] = '.'.join(np_ver.split('.')[:2]) vars_['CONDA_NPY'] = ''.join(np_ver.split('.')[:2]) return vars_
def skeletonize(packages, output_dir=".", version=None, meta_cpan_url="http://fastapi.metacpan.org/v1", recursive=False, force=False, config=None): ''' Loops over packages, outputting conda recipes converted from CPAN metata. ''' config = get_or_merge_config(config) # TODO: load/use variants? perl_version = config.variant.get('perl', get_default_variants()[0]['perl']) # wildcards are not valid for perl perl_version = perl_version.replace(".*", "") package_dicts = {} indent = '\n - ' processed_packages = set() orig_version = version while packages: package = packages.pop() # If we're passed version in the same format as `PACKAGE=VERSION` # update version if '=' in package: package, _, version = package.partition('=') else: version = orig_version # Skip duplicates if package in processed_packages: continue processed_packages.add(package) # Convert modules into distributions orig_package = package package = dist_for_module(meta_cpan_url, package, perl_version, config=config) if package == 'perl': print(("WARNING: {0} is a Perl core module that is not developed " + "outside of Perl, so we are skipping creating a recipe " + "for it.").format(orig_package)) continue elif package not in {orig_package, orig_package.replace('::', '-')}: print(("WARNING: {0} was part of the {1} distribution, so we are " + "making a recipe for {1} instead.").format(orig_package, package)) latest_release_data = get_release_info(meta_cpan_url, package, None, perl_version, config=config) packagename = perl_to_conda(package) # Skip duplicates if ((version is not None and ((packagename + '-' + version) in processed_packages)) or ((packagename + '-' + latest_release_data['version']) in processed_packages)): continue d = package_dicts.setdefault(package, {'packagename': packagename, 'run_depends': '', 'build_depends': '', 'build_comment': '# ', 'test_commands': '', 'usemd5': '', 'useurl': '', 'source_comment': '', 'summary': "''", 'import_tests': ''}) # Fetch all metadata from CPAN core_version = core_module_version(package, perl_version, config=config) release_data = get_release_info(meta_cpan_url, package, (LooseVersion(version) if version not in [None, ''] else core_version), perl_version, config=config) # Check if recipe directory already exists dir_path = join(output_dir, packagename, release_data['version']) # Add Perl version to core module requirements, since these are empty # packages, unless we're newer than what's in core if core_version is not None and ((version in [None, '']) or (core_version >= LooseVersion(version))): d['useurl'] = '#' d['usemd5'] = '#' d['source_comment'] = '#' empty_recipe = True # Add dependencies to d if not in core, or newer than what's in core else: build_deps, run_deps, packages_to_append = deps_for_package( package, release_data=release_data, perl_version=perl_version, output_dir=output_dir, meta_cpan_url=meta_cpan_url, recursive=recursive, config=config) d['build_depends'] += indent.join([''] + list(build_deps | run_deps)) d['run_depends'] += indent.join([''] + list(run_deps)) # Make sure we append any packages before continuing packages.extend(packages_to_append) empty_recipe = False # If we are recursively getting packages for a particular version # we need to make sure this is reset on the loop version = None if exists(dir_path) and not force: print('Directory %s already exists and you have not specified --force ' % dir_path) continue elif exists(dir_path) and force: print('Directory %s already exists, but forcing recipe creation' % dir_path) # If this is something we're downloading, get MD5 if release_data['download_url']: d['cpanurl'] = release_data['download_url'] d['md5'], size = get_checksum_and_size(release_data['download_url']) d['filename'] = basename(release_data['archive']) print("Using url %s (%s) for %s." % (d['cpanurl'], size, package)) else: d['useurl'] = '#' d['usemd5'] = '#' d['source_comment'] = '#' d['cpanurl'] = '' d['filename'] = '' d['md5'] = '' try: d['homeurl'] = release_data['resources']['homepage'] except KeyError: d['homeurl'] = 'http://metacpan.org/pod/' + package if 'abstract' in release_data: d['summary'] = repr(release_data['abstract']).lstrip('u') d['license'] = (release_data['license'][0] if isinstance(release_data['license'], list) else release_data['license']) d['version'] = release_data['version'] processed_packages.add(packagename + '-' + d['version']) # Create import tests module_prefix = package.replace('::', '-').split('-')[0] if 'provides' in release_data: for provided_mod in sorted(set(release_data['provides'])): # Filter out weird modules that don't belong if (provided_mod.startswith(module_prefix) and '::_' not in provided_mod): d['import_tests'] += indent + provided_mod if d['import_tests']: d['import_comment'] = '' else: d['import_comment'] = '# ' if not exists(dir_path): makedirs(dir_path) # Write recipe files to a directory print("Writing recipe for %s-%s" % (packagename, d['version'])) with open(join(dir_path, 'meta.yaml'), 'w') as f: f.write(CPAN_META.format(**d)) with open(join(dir_path, 'build.sh'), 'w') as f: if empty_recipe: f.write('#!/bin/bash\necho "Nothing to do."\n') else: f.write(CPAN_BUILD_SH.format(**d)) with open(join(dir_path, 'bld.bat'), 'w') as f: if empty_recipe: f.write('echo "Nothing to do."\n') else: f.write(CPAN_BLD_BAT.format(**d))
def perl_vars(config): return { 'PERL_VER': config.variant.get('perl', get_default_variants()[0]['perl']), }
def get_lua_ver(config): return '.'.join(config.variant.get('lua', get_default_variants()[0]['lua']).split('.')[:2])
def get_r_ver(config): return '.'.join( config.variant.get('r_base', get_default_variants()[0]['r_base']).split('.')[:3])
def get_py_ver(config): return '.'.join( config.variant.get('python', get_default_variants()[0]['python']).split('.')[:2])
def get_lua_ver(config): return '.'.join( config.variant.get('lua', get_default_variants()[0]['lua']).split('.')[:2])
def msvc_env_cmd(bits, config, override=None): log = get_logger(__name__) log.warn("Using legacy MSVC compiler setup. This will be removed in conda-build 4.0. " "Use {{compiler('c')}} jinja2 in requirements/build or explicitly list compiler " "package as build dependency instead.") arch_selector = 'x86' if bits == 32 else 'amd64' msvc_env_lines = [] version = None if override is not None: version = override # The DISTUTILS_USE_SDK variable tells distutils to not try and validate # the MSVC compiler. For < 3.5 this still forcibly looks for 'cl.exe'. # For > 3.5 it literally just skips the validation logic. # See distutils _msvccompiler.py and msvc9compiler.py / msvccompiler.py # for more information. msvc_env_lines.append('set DISTUTILS_USE_SDK=1') # This is also required to hit the 'don't validate' logic on < 3.5. # For > 3.5 this is ignored. msvc_env_lines.append('set MSSdk=1') if not version: py_ver = config.variant.get('python', get_default_variants()[0]['python']) if int(py_ver[0]) >= 3: if int(py_ver.split('.')[1]) < 5: version = '10.0' version = '14.0' else: version = '9.0' if float(version) >= 14.0: # For Python 3.5+, ensure that we link with the dynamic runtime. See # http://stevedower.id.au/blog/building-for-python-3-5-part-two/ for more info msvc_env_lines.append('set PY_VCRUNTIME_REDIST=%LIBRARY_BIN%\\vcruntime{0}.dll'.format( version.replace('.', ''))) vcvarsall_vs_path = build_vcvarsall_vs_path(version) def build_vcvarsall_cmd(cmd, arch=arch_selector): # Default argument `arch_selector` is defined above return 'call "{cmd}" {arch}'.format(cmd=cmd, arch=arch) msvc_env_lines.append('set "VS_VERSION={}"'.format(version)) msvc_env_lines.append('set "VS_MAJOR={}"'.format(version.split('.')[0])) msvc_env_lines.append('set "VS_YEAR={}"'.format(VS_VERSION_STRING[version][-4:])) msvc_env_lines.append('set "CMAKE_GENERATOR={}"'.format(VS_VERSION_STRING[version] + {64: ' Win64', 32: ''}[bits])) # tell msys2 to ignore path conversions for issue-causing windows-style flags in build # See https://github.com/conda-forge/icu-feedstock/pull/5 msvc_env_lines.append('set "MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out"') msvc_env_lines.append('set "MSYS2_ENV_CONV_EXCL=CL"') if version == '10.0': try: WIN_SDK_71_PATH = Reg.get_value(os.path.join(WINSDK_BASE, 'v7.1'), 'installationfolder') WIN_SDK_71_BAT_PATH = os.path.join(WIN_SDK_71_PATH, 'Bin', 'SetEnv.cmd') win_sdk_arch = '/Release /x86' if bits == 32 else '/Release /x64' win_sdk_cmd = build_vcvarsall_cmd(WIN_SDK_71_BAT_PATH, arch=win_sdk_arch) # There are two methods of building Python 3.3 and 3.4 extensions (both # of which required Visual Studio 2010 - as explained in the Python wiki # https://wiki.python.org/moin/WindowsCompilers) # 1) Use the Windows SDK 7.1 # 2) Use Visual Studio 2010 (any edition) # However, VS2010 never shipped with a 64-bit compiler, so in this case # **only** option (1) applies. For this reason, we always try and # activate the Windows SDK first. Unfortunately, unsuccessfully setting # up the environment does **not EXIT 1** and therefore we must fall # back to attempting to set up VS2010. # DelayedExpansion is required for the SetEnv.cmd msvc_env_lines.append('Setlocal EnableDelayedExpansion') msvc_env_lines.append(win_sdk_cmd) # If the WindowsSDKDir environment variable has not been successfully # set then try activating VS2010 msvc_env_lines.append('if not "%WindowsSDKDir%" == "{}" ( {} )'.format( WIN_SDK_71_PATH, build_vcvarsall_cmd(vcvarsall_vs_path))) # sdk is not installed. Fall back to only trying VS 2010 except KeyError: msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path)) elif version == '9.0': # Get the Visual Studio 2008 path (not the Visual C++ for Python path) # and get the 'vcvars64.bat' from inside the bin (in the directory above # that returned by distutils_find_vcvarsall) try: VCVARS64_VS9_BAT_PATH = os.path.join(os.path.dirname(distutils_find_vcvarsall(9)), 'bin', 'vcvars64.bat') # there's an exception if VS or the VC compiler for python are not actually installed. except (KeyError, TypeError): VCVARS64_VS9_BAT_PATH = None error1 = 'if errorlevel 1 {}' # Prefer VS9 proper over Microsoft Visual C++ Compiler for Python 2.7 msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path)) # The Visual Studio 2008 Express edition does not properly contain # the amd64 build files, so we call the vcvars64.bat manually, # rather than using the vcvarsall.bat which would try and call the # missing bat file. if arch_selector == 'amd64' and VCVARS64_VS9_BAT_PATH: msvc_env_lines.append(error1.format( build_vcvarsall_cmd(VCVARS64_VS9_BAT_PATH))) # Otherwise, fall back to icrosoft Visual C++ Compiler for Python 2.7+ # by using the logic provided by setuptools msvc_env_lines.append(error1.format( build_vcvarsall_cmd(distutils_find_vcvarsall(9)))) else: # Visual Studio 14 or otherwise msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path)) return '\n'.join(msvc_env_lines) + '\n'
def skeletonize(packages, output_dir=".", version=None, meta_cpan_url="http://fastapi.metacpan.org/v1", recursive=False, force=False, config=None): ''' Loops over packages, outputting conda recipes converted from CPAN metata. ''' config = get_or_merge_config(config) # TODO: load/use variants? perl_version = config.variant.get('perl', get_default_variants()[0]['perl']) # wildcards are not valid for perl perl_version = perl_version.replace(".*", "") package_dicts = {} indent = '\n - ' processed_packages = set() orig_version = version while packages: package = packages.pop() # If we're passed version in the same format as `PACKAGE=VERSION` # update version if '=' in package: package, _, version = package.partition('=') else: version = orig_version # Skip duplicates if package in processed_packages: continue processed_packages.add(package) # Convert modules into distributions orig_package = package package = dist_for_module(meta_cpan_url, package, perl_version, config=config) if package == 'perl': print( ("WARNING: {0} is a Perl core module that is not developed " + "outside of Perl, so we are skipping creating a recipe " + "for it.").format(orig_package)) continue elif package not in {orig_package, orig_package.replace('::', '-')}: print( ("WARNING: {0} was part of the {1} distribution, so we are " + "making a recipe for {1} instead.").format( orig_package, package)) latest_release_data = get_release_info(meta_cpan_url, package, None, perl_version, config=config) packagename = perl_to_conda(package) # Skip duplicates if ((version is not None and ((packagename + '-' + version) in processed_packages)) or ((packagename + '-' + latest_release_data['version']) in processed_packages)): continue d = package_dicts.setdefault( package, { 'packagename': packagename, 'run_depends': '', 'build_depends': '', 'build_comment': '# ', 'test_commands': '', 'usemd5': '', 'useurl': '', 'source_comment': '', 'summary': "''", 'import_tests': '' }) # Fetch all metadata from CPAN core_version = core_module_version(package, perl_version, config=config) release_data = get_release_info(meta_cpan_url, package, (LooseVersion(version) if version not in [None, ''] else core_version), perl_version, config=config) # Check if recipe directory already exists dir_path = join(output_dir, packagename, release_data['version']) # Add Perl version to core module requirements, since these are empty # packages, unless we're newer than what's in core if core_version is not None and ( (version in [None, '']) or (core_version >= LooseVersion(version))): d['useurl'] = '#' d['usemd5'] = '#' d['source_comment'] = '#' empty_recipe = True # Add dependencies to d if not in core, or newer than what's in core else: build_deps, run_deps, packages_to_append = deps_for_package( package, release_data=release_data, perl_version=perl_version, output_dir=output_dir, meta_cpan_url=meta_cpan_url, recursive=recursive, config=config) d['build_depends'] += indent.join([''] + list(build_deps | run_deps)) d['run_depends'] += indent.join([''] + list(run_deps)) # Make sure we append any packages before continuing packages.extend(packages_to_append) empty_recipe = False # If we are recursively getting packages for a particular version # we need to make sure this is reset on the loop version = None if exists(dir_path) and not force: print( 'Directory %s already exists and you have not specified --force ' % dir_path) continue elif exists(dir_path) and force: print('Directory %s already exists, but forcing recipe creation' % dir_path) # If this is something we're downloading, get MD5 if release_data['download_url']: d['cpanurl'] = release_data['download_url'] d['md5'], size = get_checksum_and_size( release_data['download_url']) d['filename'] = basename(release_data['archive']) print("Using url %s (%s) for %s." % (d['cpanurl'], size, package)) else: d['useurl'] = '#' d['usemd5'] = '#' d['source_comment'] = '#' d['cpanurl'] = '' d['filename'] = '' d['md5'] = '' try: d['homeurl'] = release_data['resources']['homepage'] except KeyError: d['homeurl'] = 'http://metacpan.org/pod/' + package if 'abstract' in release_data: d['summary'] = repr(release_data['abstract']).lstrip('u') d['license'] = (release_data['license'][0] if isinstance( release_data['license'], list) else release_data['license']) d['version'] = release_data['version'] processed_packages.add(packagename + '-' + d['version']) # Create import tests module_prefix = package.replace('::', '-').split('-')[0] if 'provides' in release_data: for provided_mod in sorted(set(release_data['provides'])): # Filter out weird modules that don't belong if (provided_mod.startswith(module_prefix) and '::_' not in provided_mod): d['import_tests'] += indent + provided_mod if d['import_tests']: d['import_comment'] = '' else: d['import_comment'] = '# ' if not exists(dir_path): makedirs(dir_path) # Write recipe files to a directory print("Writing recipe for %s-%s" % (packagename, d['version'])) with open(join(dir_path, 'meta.yaml'), 'w') as f: f.write(CPAN_META.format(**d)) with open(join(dir_path, 'build.sh'), 'w') as f: if empty_recipe: f.write('#!/bin/bash\necho "Nothing to do."\n') else: f.write(CPAN_BUILD_SH.format(**d)) with open(join(dir_path, 'bld.bat'), 'w') as f: if empty_recipe: f.write('echo "Nothing to do."\n') else: f.write(CPAN_BLD_BAT.format(**d))
def msvc_env_cmd(bits, config, override=None): log = get_logger(__name__) log.warn("Using legacy MSVC compiler setup. This will be removed in conda-build 4.0. " "If this recipe does not use a compiler, this message is safe to ignore. " "Otherwise, use {{compiler('<language>')}} jinja2 in requirements/build.") arch_selector = 'x86' if bits == 32 else 'amd64' msvc_env_lines = [] version = None if override is not None: version = override # The DISTUTILS_USE_SDK variable tells distutils to not try and validate # the MSVC compiler. For < 3.5 this still forcibly looks for 'cl.exe'. # For > 3.5 it literally just skips the validation logic. # See distutils _msvccompiler.py and msvc9compiler.py / msvccompiler.py # for more information. msvc_env_lines.append('set DISTUTILS_USE_SDK=1') # This is also required to hit the 'don't validate' logic on < 3.5. # For > 3.5 this is ignored. msvc_env_lines.append('set MSSdk=1') if not version: py_ver = config.variant.get('python', get_default_variants()[0]['python']) if int(py_ver[0]) >= 3: if int(py_ver.split('.')[1]) < 5: version = '10.0' version = '14.0' else: version = '9.0' if float(version) >= 14.0: # For Python 3.5+, ensure that we link with the dynamic runtime. See # http://stevedower.id.au/blog/building-for-python-3-5-part-two/ for more info msvc_env_lines.append('set PY_VCRUNTIME_REDIST=%LIBRARY_BIN%\\vcruntime{0}.dll'.format( version.replace('.', ''))) vcvarsall_vs_path = build_vcvarsall_vs_path(version) def build_vcvarsall_cmd(cmd, arch=arch_selector): # Default argument `arch_selector` is defined above return 'call "{cmd}" {arch}'.format(cmd=cmd, arch=arch) msvc_env_lines.append('set "VS_VERSION={}"'.format(version)) msvc_env_lines.append('set "VS_MAJOR={}"'.format(version.split('.')[0])) msvc_env_lines.append('set "VS_YEAR={}"'.format(VS_VERSION_STRING[version][-4:])) msvc_env_lines.append('set "CMAKE_GENERATOR={}"'.format(VS_VERSION_STRING[version] + {64: ' Win64', 32: ''}[bits])) # tell msys2 to ignore path conversions for issue-causing windows-style flags in build # See https://github.com/conda-forge/icu-feedstock/pull/5 msvc_env_lines.append('set "MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out"') msvc_env_lines.append('set "MSYS2_ENV_CONV_EXCL=CL"') if version == '10.0': try: WIN_SDK_71_PATH = Reg.get_value(os.path.join(WINSDK_BASE, 'v7.1'), 'installationfolder') WIN_SDK_71_BAT_PATH = os.path.join(WIN_SDK_71_PATH, 'Bin', 'SetEnv.cmd') win_sdk_arch = '/Release /x86' if bits == 32 else '/Release /x64' win_sdk_cmd = build_vcvarsall_cmd(WIN_SDK_71_BAT_PATH, arch=win_sdk_arch) # There are two methods of building Python 3.3 and 3.4 extensions (both # of which required Visual Studio 2010 - as explained in the Python wiki # https://wiki.python.org/moin/WindowsCompilers) # 1) Use the Windows SDK 7.1 # 2) Use Visual Studio 2010 (any edition) # However, VS2010 never shipped with a 64-bit compiler, so in this case # **only** option (1) applies. For this reason, we always try and # activate the Windows SDK first. Unfortunately, unsuccessfully setting # up the environment does **not EXIT 1** and therefore we must fall # back to attempting to set up VS2010. # DelayedExpansion is required for the SetEnv.cmd msvc_env_lines.append('Setlocal EnableDelayedExpansion') msvc_env_lines.append(win_sdk_cmd) # If the WindowsSDKDir environment variable has not been successfully # set then try activating VS2010 msvc_env_lines.append('if not "%WindowsSDKDir%" == "{}" ( {} )'.format( WIN_SDK_71_PATH, build_vcvarsall_cmd(vcvarsall_vs_path))) # sdk is not installed. Fall back to only trying VS 2010 except KeyError: msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path)) elif version == '9.0': # Get the Visual Studio 2008 path (not the Visual C++ for Python path) # and get the 'vcvars64.bat' from inside the bin (in the directory above # that returned by distutils_find_vcvarsall) try: VCVARS64_VS9_BAT_PATH = os.path.join(os.path.dirname(distutils_find_vcvarsall(9)), 'bin', 'vcvars64.bat') # there's an exception if VS or the VC compiler for python are not actually installed. except (KeyError, TypeError): VCVARS64_VS9_BAT_PATH = None error1 = 'if errorlevel 1 {}' # Prefer VS9 proper over Microsoft Visual C++ Compiler for Python 2.7 msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path)) # The Visual Studio 2008 Express edition does not properly contain # the amd64 build files, so we call the vcvars64.bat manually, # rather than using the vcvarsall.bat which would try and call the # missing bat file. if arch_selector == 'amd64' and VCVARS64_VS9_BAT_PATH: msvc_env_lines.append(error1.format( build_vcvarsall_cmd(VCVARS64_VS9_BAT_PATH))) # Otherwise, fall back to icrosoft Visual C++ Compiler for Python 2.7+ # by using the logic provided by setuptools msvc_env_lines.append(error1.format( build_vcvarsall_cmd(distutils_find_vcvarsall(9)))) else: # Visual Studio 14 or otherwise msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path)) return '\n'.join(msvc_env_lines) + '\n'
def get_py_ver(config): return '.'.join(config.variant.get('python', get_default_variants()[0]['python']).split('.')[:2])
def get_r_ver(config): return '.'.join(config.variant.get('r_base', get_default_variants()[0]['r_base']).split('.')[:3])