def validate_lambda_function(dist, attr, value): if not re.compile( r"^([a-zA-Z0-9_]+\.)*[a-zA-Z0-9_]+:[a-zA-Z0-9_]+$").match(value): raise DistutilsSetupError( f"{attr} must be in the form of 'my_package.some_module:some_function'" )
def check_test_suite(dist, attr, value): if not isinstance(value, str): raise DistutilsSetupError("test_suite must be a string")
def _verify_tag(self): if os.system('git tag | grep -q "^%s\$"' % self.fullname) == 0: raise DistutilsSetupError("Tag '%s' already exists!" % self.fullname)
def _check_extra(extra, reqs): name, sep, marker = extra.partition(':') if marker and pkg_resources.invalid_marker(marker): raise DistutilsSetupError("Invalid environment marker: " + marker) list(pkg_resources.parse_requirements(reqs))
def invalid_unless_false(dist, attr, value): if not value: warnings.warn(f"{attr} is ignored.", DistDeprecationWarning) return raise DistutilsSetupError(f"{attr} is invalid.")
def swig_sources(self, sources, extension): # Assuming SWIG 1.3.14 or later. See compatibility note in # http://www.swig.org/Doc1.3/Python.html#Python_nn6 new_sources = [] swig_sources = [] swig_targets = {} target_dirs = [] py_files = [] # swig generated .py files target_ext = '.c' if self.swig_cpp: typ = 'c++' is_cpp = True else: typ = None is_cpp = False skip_swig = 0 ext_name = extension.name.split('.')[-1] for source in sources: (base, ext) = os.path.splitext(source) if ext == '.i': # SWIG interface file if self.inplace: target_dir = os.path.dirname(base) py_target_dir = self.ext_target_dir else: target_dir = appendpath(self.build_src, os.path.dirname(base)) py_target_dir = target_dir if os.path.isfile(source): name = get_swig_modulename(source) if name != ext_name[1:]: raise DistutilsSetupError( 'mismatch of extension names: %s provides %r' ' but expected %r' % (source, name, ext_name[1:])) if typ is None: typ = get_swig_target(source) is_cpp = typ == 'c++' if is_cpp: target_ext = '.cpp' else: typ2 = get_swig_target(source) if typ != typ2: log.warn('expected %r but source %r defines %r swig target' \ % (typ, source, typ2)) if typ2 == 'c++': log.warn( 'resetting swig target to c++ (some targets may have .c extension)' ) is_cpp = True target_ext = '.cpp' else: log.warn( 'assuming that %r has c++ swig target' % (source)) target_file = os.path.join(target_dir,'%s_wrap%s' \ % (name, target_ext)) else: log.warn(' source %s does not exist: skipping swig\'ing.' \ % (source)) name = ext_name[1:] skip_swig = 1 target_file = _find_swig_target(target_dir, name) if not os.path.isfile(target_file): log.warn(' target %s does not exist:\n '\ 'Assuming %s_wrap.{c,cpp} was generated with '\ '"build_src --inplace" command.' \ % (target_file, name)) target_dir = os.path.dirname(base) target_file = _find_swig_target(target_dir, name) if not os.path.isfile(target_file): raise DistutilsSetupError("%r missing" % (target_file, )) log.warn(' Yes! Using %r as up-to-date target.' \ % (target_file)) target_dirs.append(target_dir) new_sources.append(target_file) py_files.append(os.path.join(py_target_dir, name + '.py')) swig_sources.append(source) swig_targets[source] = new_sources[-1] else: new_sources.append(source) if not swig_sources: return new_sources if skip_swig: return new_sources + py_files map(self.mkpath, target_dirs) swig = self.swig or self.find_swig() swig_cmd = [swig, "-python"] if is_cpp: swig_cmd.append('-c++') for d in extension.include_dirs: swig_cmd.append('-I' + d) for source in swig_sources: target = swig_targets[source] depends = [source] + extension.depends if self.force or newer_group(depends, target, 'newer'): log.info("%s: %s" % (os.path.basename(swig) \ + (is_cpp and '++' or ''), source)) self.spawn(swig_cmd + self.swig_opts \ + ["-o", target, '-outdir', py_target_dir, source]) else: log.debug(" skipping '%s' swig interface (up-to-date)" \ % (source)) return new_sources + py_files
def abort(message): from distutils.errors import DistutilsSetupError raise DistutilsSetupError(message)
def check_entry_points(dist, attr, value): """Verify that entry_points map is parseable""" try: _entry_points.load(value) except Exception as e: raise DistutilsSetupError(e) from e
def build_interp(self, ext): sources = ext.sources if sources is None or not isinstance(sources, (list, tuple)): raise DistutilsSetupError( "in 'interpreters' option (extension '%s'), " "'sources' must be present and must be " "a list of source filenames" % ext.name) sources = list(sources) ext_path = self.get_ext_fullpath(ext.name) if ext.target_desc == "executable": ext_path += ".exe" else: ext_path += ".dll" depends = sources + ext.depends if not (self.force or newer_group(depends, ext_path, 'newer')): log.debug("skipping '%s' extension (up-to-date)", ext.name) return else: log.info("building '%s' extension", ext.name) # First, compile the source code to object files. # XXX not honouring 'define_macros' or 'undef_macros' -- the # CCompiler API needs to change to accommodate this, and I # want to do one thing at a time! # Two possible sources for extra compiler arguments: # - 'extra_compile_args' in Extension object # - CFLAGS environment variable (not particularly # elegant, but people seem to expect it and I # guess it's useful) # The environment variable should take precedence, and # any sensible compiler will give precedence to later # command line args. Hence we combine them in order: extra_args = ext.extra_compile_args or [] macros = ext.define_macros[:] for undef in ext.undef_macros: macros.append((undef, )) objects = self.compiler.compile(sources, output_dir=self.build_temp, macros=macros, include_dirs=ext.include_dirs, debug=self.debug, extra_postargs=extra_args, depends=ext.depends) # XXX -- this is a Vile HACK! # # The setup.py script for Python on Unix needs to be able to # get this list so it can perform all the clean up needed to # avoid keeping object files around when cleaning out a failed # build of an extension module. Since Distutils does not # track dependencies, we have to get rid of intermediates to # ensure all the intermediates will be properly re-built. # self._built_objects = objects[:] # Now link the object files together into a "shared object" -- # of course, first we have to figure out all the other things # that go into the mix. if ext.extra_objects: objects.extend(ext.extra_objects) extra_args = ext.extra_link_args or [] # Detect target language, if not provided ## language = ext.language or self.compiler.detect_language(sources) ## self.compiler.link_shared_object( ## objects, ext_path, ## libraries=self.get_libraries(ext), ## library_dirs=ext.library_dirs, ## runtime_library_dirs=ext.runtime_library_dirs, ## extra_postargs=extra_args, ## export_symbols=self.get_export_symbols(ext), ## debug=self.debug, ## build_temp=self.build_temp, ## target_lang=language) # Hm, for Python 3.5 to link a shared library (instead of exe # or pyd) we need to add /DLL to the linker arguments. # Currently this is done in the setup script; should we do it # here? self.compiler.link(ext.target_desc, objects, ext_path, libraries=self.get_libraries(ext), library_dirs=ext.library_dirs, runtime_library_dirs=ext.runtime_library_dirs, export_symbols=ext.export_symbols, extra_postargs=extra_args, debug=self.debug)
try: charm4py_version = subprocess.check_output( ['git', 'describe']).rstrip().decode().split('-')[0] if charm4py_version.startswith('v'): charm4py_version = charm4py_version[1:] with open(os.path.join('charm4py', '_version.py'), 'w') as f: f.write("version='" + charm4py_version + "'\n") except: try: os.environ['PYTHONPATH'] = os.getcwd() os.environ['CHARM_NOLOAD'] = '1' from charm4py import _version charm4py_version = _version.version except: raise DistutilsSetupError('Could not determine Charm4py version') def charm_built(charm_src_dir): library_path = os.path.join(charm_src_dir, 'charm', 'lib', libcharm_filename) if not os.path.exists(library_path): return False charmrun_path = os.path.join(charm_src_dir, 'charm', 'bin', charmrun_filename) if not os.path.exists(charmrun_path): return False return True def check_libcharm_version(charm_src_dir):
def build_libcharm(charm_src_dir, build_dir): lib_output_dirs = [] charmrun_output_dirs = [] lib_output_dirs.append(os.path.join(build_dir, 'charm4py', '.libs')) lib_output_dirs.append(os.path.join(os.getcwd(), 'charm4py', '.libs')) charmrun_output_dirs.append(os.path.join(build_dir, 'charmrun')) charmrun_output_dirs.append(os.path.join(os.getcwd(), 'charmrun')) for output_dir in (lib_output_dirs + charmrun_output_dirs): distutils.dir_util.mkpath(output_dir) if not os.path.exists(charm_src_dir) or not os.path.isdir(charm_src_dir): raise DistutilsSetupError('charm sources dir ' + charm_src_dir + ' not found') if not charm_built(charm_src_dir): if system == 'Windows' or system.lower().startswith('cygwin'): raise DistutilsSetupError( 'Building charm++ from setup.py not currently supported on Windows.' ' Please download a Charm4py binary wheel (64-bit Python required)' ) if os.path.exists(os.path.join(charm_src_dir, 'charm.tar.gz')): log.info('Uncompressing charm.tar.gz...') cmd = ['tar', 'xf', 'charm.tar.gz'] p = subprocess.Popen(cmd, cwd=charm_src_dir, shell=False) rc = p.wait() if rc != 0: raise DistutilsSetupError( 'An error occured while building charm library') # divide by 2 to not hog the system. On systems with hyperthreading, this will likely # result in using same # cores as physical cores (therefore not all the logical cores) import multiprocessing build_num_cores = max( int( os.environ.get('CHARM_BUILD_PROCESSES', multiprocessing.cpu_count() // 2)), 1) extra_build_opts = os.environ.get('CHARM_EXTRA_BUILD_OPTS', '') if system == 'Darwin': if build_mpi: cmd = './build charm4py mpi-darwin-x86_64 -j' + str( build_num_cores) + ' --with-production ' + extra_build_opts else: cmd = './build charm4py netlrts-darwin-x86_64 tcp -j' + str( build_num_cores) + ' --with-production ' + extra_build_opts else: try: arch = os.uname()[4] except: arch = None if arch is not None and arch.startswith('arm'): import re regexp = re.compile("armv(\d+).*") m = regexp.match(arch) if m: version = int(m.group(1)) if version < 8: cmd = './build charm4py netlrts-linux-arm7 tcp -j' + str( build_num_cores ) + ' --with-production ' + extra_build_opts else: cmd = './build charm4py netlrts-linux-arm8 tcp -j' + str( build_num_cores ) + ' --with-production ' + extra_build_opts else: cmd = './build charm4py netlrts-linux-arm7 tcp -j' + str( build_num_cores ) + ' --with-production ' + extra_build_opts elif arch == "ppc64le": if build_mpi: cmd = './build charm4py mpi-linux-ppc64le -j' + str( build_num_cores ) + ' --with-production ' + extra_build_opts else: cmd = './build charm4py netlrts-linux-ppc64le tcp -j' + str( build_num_cores ) + ' --with-production ' + extra_build_opts else: if build_mpi: cmd = './build charm4py mpi-linux-x86_64 -j' + str( build_num_cores ) + ' --with-production ' + extra_build_opts else: cmd = './build charm4py netlrts-linux-x86_64 tcp -j' + str( build_num_cores ) + ' --with-production ' + extra_build_opts p = subprocess.Popen(cmd.rstrip().split(' '), cwd=os.path.join(charm_src_dir, 'charm'), shell=False) rc = p.wait() if rc != 0: raise DistutilsSetupError( 'An error occured while building charm library') if system == 'Darwin': old_file_path = os.path.join(charm_src_dir, 'charm', 'lib', 'libcharm.so') new_file_path = os.path.join(charm_src_dir, 'charm', 'lib', libcharm_filename) shutil.move(old_file_path, new_file_path) cmd = [ 'install_name_tool', '-id', '@rpath/../.libs/' + libcharm_filename, new_file_path ] p = subprocess.Popen(cmd, shell=False) rc = p.wait() if rc != 0: raise DistutilsSetupError('install_name_tool error') # verify that the version of charm++ that was built is same or greater than the # one required by charm4py check_libcharm_version(charm_src_dir) # ---- copy libcharm ---- lib_src_path = os.path.join(charm_src_dir, 'charm', 'lib', libcharm_filename) for output_dir in lib_output_dirs: log.info('copying ' + os.path.relpath(lib_src_path) + ' to ' + os.path.relpath(output_dir)) shutil.copy(lib_src_path, output_dir) if libcharm_filename2 is not None: lib_src_path = os.path.join(charm_src_dir, 'charm', 'lib', libcharm_filename2) for output_dir in lib_output_dirs: log.info('copying ' + os.path.relpath(lib_src_path) + ' to ' + os.path.relpath(output_dir)) shutil.copy(lib_src_path, output_dir) # ---- copy charmrun ---- charmrun_src_path = os.path.join(charm_src_dir, 'charm', 'bin', charmrun_filename) for output_dir in charmrun_output_dirs: log.info('copying ' + os.path.relpath(charmrun_src_path) + ' to ' + os.path.relpath(output_dir)) shutil.copy(charmrun_src_path, output_dir)
def ensure_list(value, attr): """raise an error if `value` is not a list""" from distutils.errors import DistutilsSetupError # pylint: disable=import-error,no-name-in-module if not isinstance(value, list): raise DistutilsSetupError('{} must be a list, got {}'.format( attr, value.__class__))
def run(self): # base run _build.run(self) # custom run log.info('Now building cppyy-cling') builddir = get_builddir() prefix = get_prefix() srcdir = get_srcdir() if not os.path.exists(srcdir): log.info('No src directory ... creating with "python create_src_directory.py"') if subprocess.call(['python', 'create_src_directory.py']) != 0: log.error('ERROR: the source directory "%s" does not exist' % srcdir) log.error('Please run "python create_src_directory.py" first.') sys.exit(1) if not os.path.exists(builddir): log.info('Creating build directory %s ...' % builddir) os.makedirs(builddir) # get C++ standard to use, if set try: stdcxx = os.environ['STDCXX'] except KeyError: if is_manylinux(): stdcxx = '11' elif 'win32' in sys.platform: stdcxx = '14' # current cmake claims MSVC'17 does not support C++17 yet else: stdcxx = '17' if not stdcxx in ['11', '14', '17']: log.fatal('FATAL: envar STDCXX should be one of 11, 14, or 17') sys.exit(1) stdcxx='-DCMAKE_CXX_STANDARD='+stdcxx # extra optimization flags for Cling if not 'EXTRA_CLING_ARGS' in os.environ: has_avx = False if not is_manylinux(): try: for line in open('/proc/cpuinfo', 'r'): if 'avx' in line: has_avx = True break except Exception: try: cli_arg = subprocess.check_output(['sysctl', 'machdep.cpu.features']) has_avx = 'avx' in cli_arg.decode("utf-8").strip().lower() except Exception: pass extra_args = '-O2' if has_avx: extra_args += ' -mavx' os.putenv('EXTRA_CLING_ARGS', extra_args) CMAKE_COMMAND = ['cmake', srcdir, stdcxx, '-DLLVM_ENABLE_TERMINFO=0', '-Dminimal=ON', '-Dasimage=OFF', '-Droot7=OFF', '-Dhttp=OFF', '-Dbuiltin_pcre=ON', '-Dbuiltin_freetype=ON', '-Dbuiltin_zlib=ON', '-Dbuiltin_xxhash=ON'] if 'darwin' in sys.platform: CMAKE_COMMAND.append('-Dlibcxx=ON') CMAKE_COMMAND.append('-DCMAKE_BUILD_TYPE='+get_build_type()) if 'win32' in sys.platform: import platform if '64' in platform.architecture()[0]: CMAKE_COMMAND += ['-Thost=x64', '-DCMAKE_GENERATOR_PLATFORM=x64', '-Dall=OFF', '-Dmathmore=OFF', '-Dbuiltin_ftgl=OFF', '-Droofit=OFF', '-Dgfal=OFF', '-Dfftw3=OFF'] FFTW_INC = os.environ.get("FFTW_INC", None) FFTW_LIB = os.environ.get("FFTW_LIB", None) if FFTW_INC and FFTW_LIB: CMAKE_COMMAND += ["-DFFTW_INCLUDE_DIR={}".format(FFTW_INC), "-DFFTW_LIBRARY={}".format(FFTW_LIB)] else: CMAKE_COMMAND += ['-Dbuiltin_freetype=OFF'] CMAKE_COMMAND.append('-DCMAKE_INSTALL_PREFIX='+prefix) log.info('Running cmake for cppyy-cling: %s', ' '.join(CMAKE_COMMAND)) if subprocess.call(CMAKE_COMMAND, cwd=builddir) != 0: raise DistutilsSetupError('Failed to configure cppyy-cling') # use $MAKE to build if it is defined env_make = os.getenv('MAKE') if not env_make: build_cmd = 'cmake' # default to using all available cores (x2 if hyperthreading enabled) nprocs = os.getenv("MAKE_NPROCS") or '0' try: nprocs = int(nprocs) except ValueError: log.warn("Integer expected for MAKE_NPROCS, but got %s (ignored)", nprocs) nprocs = 0 if nprocs < 1: nprocs = multiprocessing.cpu_count() build_args = ['--build', '.', '--config', get_build_type(), '--'] if 'win32' in sys.platform: build_args.append('/maxcpucount:' + str(nprocs)) else: build_args.append('-j' + str(nprocs)) else: build_args = env_make.split() build_cmd, build_args = build_args[0], build_args[1:] log.info('Now building cppyy-cling and dependencies ...') if env_make: os.unsetenv("MAKE") if subprocess.call([build_cmd] + build_args, cwd=builddir) != 0: raise DistutilsSetupError('Failed to build cppyy-cling') if env_make: os.putenv('MAKE', env_make) log.info('Build finished')
def run(self): # base install _install.run(self) # custom install of backend log.info('Now installing cppyy-cling into cppyy_backend') builddir = get_builddir() if not os.path.exists(builddir): raise DistutilsSetupError('Failed to find build dir!') # use $MAKE to install if it is defined env_make = os.getenv("MAKE") if not env_make: install_cmd = 'cmake' install_args = ['--build', '.', '--config', get_build_type(), '--target', 'install'] else: install_args = env_make.split() install_cmd, install_args = install_args[0], install_args[1:]+['install'] prefix = get_prefix() log.info('Now creating installation under %s ...', prefix) if env_make: os.unsetenv("MAKE") if subprocess.call([install_cmd] + install_args, cwd=builddir) != 0: raise DistutilsSetupError('Failed to install cppyy-cling') if env_make: os.putenv("MAKE", env_make) # remove allDict.cxx.pch as it's not portable (rebuild on first run, see cppyy) log.info('removing allDict.cxx.pch') os.remove(os.path.join(get_prefix(), 'etc', 'allDict.cxx.pch')) # for manylinux, reset the default cxxversion to 17 if no user override if not 'STDCXX' in os.environ and is_manylinux(): log.info('updating root-config to C++17 for manylinux') inp = os.path.join(get_prefix(), 'bin', 'root-config') outp = inp+'.new' outfile = open(outp, 'w') for line in open(inp).readlines(): if line.find('cxxversionflag=', 0, 15) == 0: line = 'cxxversionflag="-std=c++1z "\n' elif line.find('features=', 0, 9) == 0: line = line.replace('cxx11', 'cxx17') outfile.write(line) outfile.close() os.rename(outp, inp) os.chmod(inp, stat.S_IMODE(os.lstat(inp).st_mode) | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) log.info('updating allCppflags.txt to C++17 for manylinux') inp = os.path.join(get_prefix(), 'etc', 'dictpch', 'allCppflags.txt') outp = inp+'.new' outfile = open(outp, 'w') for line in open(inp).readlines(): if '-std=' == line[:5]: line = '-std=c++1z\n' outfile.write(line) outfile.close() os.rename(outp, inp) log.info('updating compiledata.h to C++17 for manylinux') inp = os.path.join(get_prefix(), 'include', 'compiledata.h') outp = inp+'.new' outfile = open(outp, 'w') for line in open(inp).readlines(): line = line.replace('-std=c++11', '-std=c++1z') outfile.write(line) outfile.close() os.rename(outp, inp) install_path = self._get_install_path() log.info('Copying installation to: %s ...', install_path) self.copy_tree(os.path.join(get_prefix(), os.path.pardir), install_path) log.info('Install finished')
def build_libraries(self, libraries): for (lib_name, build_info) in libraries: sources = build_info.get('sources') if sources is None or not isinstance(sources, (list, tuple)): raise DistutilsSetupError( "in 'libraries' option (library '%s'), " "'sources' must be present and must be " "a list of source filenames" % lib_name) sources = list(sources) log.info("building '%s' library", lib_name) # Make sure everything is the correct type. # obj_deps should be a dictionary of keys as sources # and a list/tuple of files that are its dependencies. obj_deps = build_info.get('obj_deps', dict()) if not isinstance(obj_deps, dict): raise DistutilsSetupError( "in 'libraries' option (library '%s'), " "'obj_deps' must be a dictionary of " "type 'source: list'" % lib_name) dependencies = [] # Get the global dependencies that are specified by the '' key. # These will go into every source's dependency list. global_deps = obj_deps.get('', list()) if not isinstance(global_deps, (list, tuple)): raise DistutilsSetupError( "in 'libraries' option (library '%s'), " "'obj_deps' must be a dictionary of " "type 'source: list'" % lib_name) # Build the list to be used by newer_pairwise_group # each source will be auto-added to its dependencies. for source in sources: src_deps = [source] src_deps.extend(global_deps) extra_deps = obj_deps.get(source, list()) if not isinstance(extra_deps, (list, tuple)): raise DistutilsSetupError( "in 'libraries' option (library '%s'), " "'obj_deps' must be a dictionary of " "type 'source: list'" % lib_name) src_deps.extend(extra_deps) dependencies.append(src_deps) expected_objects = self.compiler.object_filenames( sources, output_dir=self.build_temp ) if newer_pairwise_group(dependencies, expected_objects) != ([], []): # First, compile the source code to object files in the library # directory. (This should probably change to putting object # files in a temporary build directory.) macros = build_info.get('macros') include_dirs = build_info.get('include_dirs') cflags = build_info.get('cflags') objects = self.compiler.compile( sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, extra_postargs=cflags, debug=self.debug ) # Now "link" the object files together into a static library. # (On Unix at least, this isn't really linking -- it just # builds an archive. Whatever.) self.compiler.create_static_lib( expected_objects, lib_name, output_dir=self.build_clib, debug=self.debug )
def build_extension(self, ext): sources = ext.sources if sources is None or not is_sequence(sources): raise DistutilsSetupError( ("in 'ext_modules' option (extension '%s'), " + "'sources' must be present and must be " + "a list of source filenames") % ext.name) sources = list(sources) if not sources: return fullname = self.get_ext_fullname(ext.name) if self.inplace: modpath = fullname.split('.') package = '.'.join(modpath[0:-1]) base = modpath[-1] build_py = self.get_finalized_command('build_py') package_dir = build_py.get_package_dir(package) ext_filename = os.path.join(package_dir, self.get_ext_filename(base)) else: ext_filename = os.path.join(self.build_lib, self.get_ext_filename(fullname)) depends = sources + ext.depends if not (self.force or newer_group(depends, ext_filename, 'newer')): log.debug("skipping '%s' extension (up-to-date)", ext.name) return else: log.info("building '%s' extension", ext.name) extra_args = ext.extra_compile_args or [] macros = ext.define_macros[:] for undef in ext.undef_macros: macros.append((undef, )) c_sources, cxx_sources, f_sources, fmodule_sources = \ filter_sources(ext.sources) if self.compiler.compiler_type == 'msvc': if cxx_sources: # Needed to compile kiva.agg._agg extension. extra_args.append('/Zm1000') # this hack works around the msvc compiler attributes # problem, msvc uses its own convention :( c_sources += cxx_sources cxx_sources = [] # Set Fortran/C++ compilers for compilation and linking. if ext.language == 'f90': fcompiler = self._f90_compiler elif ext.language == 'f77': fcompiler = self._f77_compiler else: # in case ext.language is c++, for instance fcompiler = self._f90_compiler or self._f77_compiler if fcompiler is not None: fcompiler.extra_f77_compile_args = ( ext.extra_f77_compile_args or []) if hasattr( ext, 'extra_f77_compile_args') else [] fcompiler.extra_f90_compile_args = ( ext.extra_f90_compile_args or []) if hasattr( ext, 'extra_f90_compile_args') else [] cxx_compiler = self._cxx_compiler # check for the availability of required compilers if cxx_sources and cxx_compiler is None: raise DistutilsError("extension %r has C++ sources" "but no C++ compiler found" % (ext.name)) if (f_sources or fmodule_sources) and fcompiler is None: raise DistutilsError("extension %r has Fortran sources " "but no Fortran compiler found" % (ext.name)) if ext.language in ['f77', 'f90'] and fcompiler is None: self.warn("extension %r has Fortran libraries " "but no Fortran linker found, using default linker" % (ext.name)) if ext.language == 'c++' and cxx_compiler is None: self.warn("extension %r has C++ libraries " "but no C++ linker found, using default linker" % (ext.name)) kws = {'depends': ext.depends} output_dir = self.build_temp include_dirs = ext.include_dirs + get_numpy_include_dirs() c_objects = [] if c_sources: log.info("compiling C sources") c_objects = self.compiler.compile(c_sources, output_dir=output_dir, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args, **kws) if cxx_sources: log.info("compiling C++ sources") c_objects += cxx_compiler.compile(cxx_sources, output_dir=output_dir, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args, **kws) extra_postargs = [] f_objects = [] if fmodule_sources: log.info("compiling Fortran 90 module sources") module_dirs = ext.module_dirs[:] module_build_dir = os.path.join( self.build_temp, os.path.dirname(self.get_ext_filename(fullname))) self.mkpath(module_build_dir) if fcompiler.module_dir_switch is None: existing_modules = glob('*.mod') extra_postargs += fcompiler.module_options(module_dirs, module_build_dir) f_objects += fcompiler.compile(fmodule_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs, depends=ext.depends) if fcompiler.module_dir_switch is None: for f in glob('*.mod'): if f in existing_modules: continue t = os.path.join(module_build_dir, f) if os.path.abspath(f) == os.path.abspath(t): continue if os.path.isfile(t): os.remove(t) try: self.move_file(f, module_build_dir) except DistutilsFileError: log.warn('failed to move %r to %r' % (f, module_build_dir)) if f_sources: log.info("compiling Fortran sources") f_objects += fcompiler.compile(f_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs, depends=ext.depends) if f_objects and not fcompiler.can_ccompiler_link(self.compiler): unlinkable_fobjects = f_objects objects = c_objects else: unlinkable_fobjects = [] objects = c_objects + f_objects if ext.extra_objects: objects.extend(ext.extra_objects) extra_args = ext.extra_link_args or [] libraries = self.get_libraries(ext)[:] library_dirs = ext.library_dirs[:] linker = self.compiler.link_shared_object # Always use system linker when using MSVC compiler. if self.compiler.compiler_type in ('msvc', 'intelw', 'intelemw'): # expand libraries with fcompiler libraries as we are # not using fcompiler linker self._libs_with_msvc_and_fortran(fcompiler, libraries, library_dirs) elif ext.language in ['f77', 'f90'] and fcompiler is not None: linker = fcompiler.link_shared_object if ext.language == 'c++' and cxx_compiler is not None: linker = cxx_compiler.link_shared_object if fcompiler is not None: objects, libraries = self._process_unlinkable_fobjects( objects, libraries, fcompiler, library_dirs, unlinkable_fobjects) linker(objects, ext_filename, libraries=libraries, library_dirs=library_dirs, runtime_library_dirs=ext.runtime_library_dirs, extra_postargs=extra_args, export_symbols=self.get_export_symbols(ext), debug=self.debug, build_temp=self.build_temp, target_lang=ext.language)
def f2py_sources(self, sources, extension): new_sources = [] f2py_sources = [] f_sources = [] f2py_targets = {} target_dirs = [] ext_name = extension.name.split('.')[-1] skip_f2py = 0 for source in sources: (base, ext) = os.path.splitext(source) if ext == '.pyf': # F2PY interface file if self.inplace: target_dir = os.path.dirname(base) else: target_dir = appendpath(self.build_src, os.path.dirname(base)) if os.path.isfile(source): name = get_f2py_modulename(source) if name != ext_name: raise DistutilsSetupError( 'mismatch of extension names: %s ' 'provides %r but expected %r' % (source, name, ext_name)) target_file = os.path.join(target_dir, name + 'module.c') else: log.debug(' source %s does not exist: skipping f2py\'ing.' \ % (source)) name = ext_name skip_f2py = 1 target_file = os.path.join(target_dir, name + 'module.c') if not os.path.isfile(target_file): log.warn(' target %s does not exist:\n '\ 'Assuming %smodule.c was generated with '\ '"build_src --inplace" command.' \ % (target_file, name)) target_dir = os.path.dirname(base) target_file = os.path.join(target_dir, name + 'module.c') if not os.path.isfile(target_file): raise DistutilsSetupError("%r missing" % (target_file, )) log.info(' Yes! Using %r as up-to-date target.' \ % (target_file)) target_dirs.append(target_dir) f2py_sources.append(source) f2py_targets[source] = target_file new_sources.append(target_file) elif fortran_ext_match(ext): f_sources.append(source) else: new_sources.append(source) if not (f2py_sources or f_sources): return new_sources map(self.mkpath, target_dirs) f2py_options = extension.f2py_options + self.f2py_opts if self.distribution.libraries: for name, build_info in self.distribution.libraries: if name in extension.libraries: f2py_options.extend(build_info.get('f2py_options', [])) log.info("f2py options: %s" % (f2py_options)) if f2py_sources: if len(f2py_sources) != 1: raise DistutilsSetupError( 'only one .pyf file is allowed per extension module but got'\ ' more: %r' % (f2py_sources,)) source = f2py_sources[0] target_file = f2py_targets[source] target_dir = os.path.dirname(target_file) or '.' depends = [source] + extension.depends if (self.force or newer_group(depends, target_file,'newer')) \ and not skip_f2py: log.info("f2py: %s" % (source)) import numpy.f2py as f2py2e f2py2e.run_main(f2py_options + ['--build-dir', target_dir, source]) else: log.debug(" skipping '%s' f2py interface (up-to-date)" % (source)) else: #XXX TODO: --inplace support for sdist command if is_sequence(extension): name = extension[0] else: name = extension.name target_dir = os.path.join(*([self.build_src]\ +name.split('.')[:-1])) target_file = os.path.join(target_dir, ext_name + 'module.c') new_sources.append(target_file) depends = f_sources + extension.depends if (self.force or newer_group(depends, target_file, 'newer')) \ and not skip_f2py: import numpy.f2py as f2py2e log.info("f2py:> %s" % (target_file)) self.mkpath(target_dir) f2py2e.run_main(f2py_options + ['--lower', '--build-dir',target_dir]+\ ['-m',ext_name]+f_sources) else: log.debug(" skipping f2py fortran files for '%s' (up-to-date)"\ % (target_file)) if not os.path.isfile(target_file): raise DistutilsError("f2py target file %r not generated" % (target_file, )) target_c = os.path.join(self.build_src, 'fortranobject.c') target_h = os.path.join(self.build_src, 'fortranobject.h') log.info(" adding '%s' to sources." % (target_c)) new_sources.append(target_c) if self.build_src not in extension.include_dirs: log.info(" adding '%s' to include_dirs." \ % (self.build_src)) extension.include_dirs.append(self.build_src) if not skip_f2py: import numpy.f2py as f2py2e d = os.path.dirname(f2py2e.__file__) source_c = os.path.join(d, 'src', 'fortranobject.c') source_h = os.path.join(d, 'src', 'fortranobject.h') if newer(source_c, target_c) or newer(source_h, target_h): self.mkpath(os.path.dirname(target_c)) self.copy_file(source_c, target_c) self.copy_file(source_h, target_h) else: if not os.path.isfile(target_c): raise DistutilsSetupError("f2py target_c file %r not found" % (target_c, )) if not os.path.isfile(target_h): raise DistutilsSetupError("f2py target_h file %r not found" % (target_h, )) for name_ext in ['-f2pywrappers.f', '-f2pywrappers2.f90']: filename = os.path.join(target_dir, ext_name + name_ext) if os.path.isfile(filename): log.info(" adding '%s' to sources." % (filename)) f_sources.append(filename) return new_sources + f_sources
def error(msg): from distutils.errors import DistutilsSetupError raise DistutilsSetupError(msg)
def run(self): global BUILD_DIR, BUILT_EXTENSIONS, EIGEN3_INCLUDE_DIR BUILD_DIR = os.path.abspath(self.build_dir) if EIGEN3_INCLUDE_DIR is None: EIGEN3_INCLUDE_DIR = os.path.join(BUILD_DIR, "eigen") EIGEN3_INCLUDE_DIR = os.path.abspath(EIGEN3_INCLUDE_DIR) log.info("CMAKE_PATH=%r" % CMAKE_PATH) log.info("MAKE_PATH=%r" % MAKE_PATH) log.info("MAKE_FLAGS=%r" % " ".join(MAKE_FLAGS)) # log.info("HG_PATH=%r" % HG_PATH) log.info("EIGEN3_INCLUDE_DIR=%r" % EIGEN3_INCLUDE_DIR) log.info("EIGEN3_DOWNLOAD_URL=%r" % EIGEN3_DOWNLOAD_URL) log.info("CC_PATH=%r" % CC_PATH) log.info("CXX_PATH=%r" % CXX_PATH) log.info("SCRIPT_DIR=%r" % SCRIPT_DIR) log.info("BUILD_DIR=%r" % BUILD_DIR) log.info("INSTALL_PREFIX=%r" % INSTALL_PREFIX) log.info("PYTHON=%r" % PYTHON) if CMAKE_PATH != None: run_process([CMAKE_PATH, "--version"]) if CXX_PATH != None: run_process([CXX_PATH, "--version"]) # This will generally be called by the pip install if not self.skip_build: if CMAKE_PATH is None: raise DistutilsSetupError("`cmake` not found, and `CMAKE` is not set.") if MAKE_PATH is None: raise DistutilsSetupError("`make` not found, and `MAKE` is not set.") if CC_PATH is None: raise DistutilsSetupError("`gcc` not found, and `CC` is not set.") if CXX_PATH is None: raise DistutilsSetupError("`g++` not found, and `CXX` is not set.") # Prepare folders if not os.path.isdir(BUILD_DIR): log.info("Creating build directory " + BUILD_DIR) os.makedirs(BUILD_DIR) os.chdir(BUILD_DIR) if os.path.isdir(EIGEN3_INCLUDE_DIR): log.info("Found eigen in " + EIGEN3_INCLUDE_DIR) else: try: # Can use BZ2 or zip, right now using zip # log.info("Fetching Eigen...") # urlretrieve(EIGEN3_DOWNLOAD_URL, "eigen.tar.bz2") # log.info("Unpacking Eigen...") # tfile = tarfile.open("eigen.tar.bz2", 'r') # tfile.extractall('eigen') log.info("Fetching Eigen...") urlretrieve(EIGEN3_DOWNLOAD_URL, "eigen.zip") log.info("Unpacking Eigen...") zfile = zipfile.ZipFile("eigen.zip", 'r') zfile.extractall('eigen') #BitBucket packages everything in a tarball with a changing root directory, so grab the only child EIGEN3_INCLUDE_DIR = os.path.join(BUILD_DIR, "eigen", os.listdir('eigen')[0]) except: raise DistutilsSetupError("Could not download Eigen from " + EIGEN3_DOWNLOAD_URL) os.environ["CXX"] = CXX_PATH os.environ["CC"] = CC_PATH # Build module cmake_cmd = [ CMAKE_PATH, SCRIPT_DIR, "-DCMAKE_INSTALL_PREFIX=%r" % INSTALL_PREFIX, "-DEIGEN3_INCLUDE_DIR=%r" % EIGEN3_INCLUDE_DIR, "-DPYTHON=%r" % PYTHON, ] for env_var in ("BACKEND",): value = ENV.get(env_var) if value is not None: cmake_cmd.append("-D" + env_var + "=%r" % value) log.info("Configuring...") if run_process(cmake_cmd) != 0: raise DistutilsSetupError(" ".join(cmake_cmd)) make_cmd = [MAKE_PATH] + MAKE_FLAGS log.info("Compiling...") if run_process(make_cmd) != 0: raise DistutilsSetupError(" ".join(make_cmd)) make_cmd = [MAKE_PATH, "install"] log.info("Installing...") if run_process(make_cmd) != 0: raise DistutilsSetupError(" ".join(make_cmd)) # This will generally be called by the manual install else: if not os.path.isdir(EIGEN3_INCLUDE_DIR): raise RuntimeError("Could not find Eigen in EIGEN3_INCLUDE_DIR={}. If doing manual install, please set the EIGEN3_INCLUDE_DIR variable with the absolute path to Eigen manually. If doing install via pip, please file an issue at the github site.".format(EIGEN3_INCLUDE_DIR)) BUILT_EXTENSIONS = True # because make calls build_ext _build.run(self)
fileName = os.path.join(dirName, subDir, baseFileName) if os.path.exists(fileName): oracleHome = dirName oracleLibDir = os.path.join(dirName, subDir) oracleVersion = version return True oracleHome = oracleVersion = oracleLibDir = None return False # try to determine the Oracle home userOracleHome = os.environ.get("ORACLE_HOME") if userOracleHome is not None: if not CheckOracleHome(userOracleHome): messageFormat = "Oracle home (%s) does not refer to an " \ "9i, 10g or 11g installation." raise DistutilsSetupError(messageFormat % userOracleHome) else: for path in os.environ["PATH"].split(os.pathsep): if CheckOracleHome(path): break if oracleHome is None: raise DistutilsSetupError("cannot locate an Oracle software " \ "installation") # define some variables if sys.platform == "win32": libDirs = [os.path.join(oracleHome, "bin"), oracleHome, os.path.join(oracleHome, "oci", "lib", "msvc"), os.path.join(oracleHome, "sdk", "lib", "msvc")] possibleIncludeDirs = ["oci/include", "rdbms/demo", "sdk/include"] includeDirs = []
def _exclude_packages(self, packages): if not isinstance(packages, sequence): raise DistutilsSetupError( "packages: setting must be a list or tuple (%r)" % (packages, )) list(map(self.exclude_package, packages))
def assert_bool(dist, attr, value): """Verify that value is True, False, 0, or 1""" if bool(value) != value: raise DistutilsSetupError("%r must be a boolean value (got %r)" % (attr, value))
def assert_bool(dist, attr, value): """Verify that value is True, False, 0, or 1""" if bool(value) != value: tmpl = "{attr!r} must be a boolean value (got {value!r})" raise DistutilsSetupError(tmpl.format(attr=attr, value=value))
def build_extensions(self): pre_setup.setup() make_option = "" # To resolve tf-gcc incompatibility has_cxx_flag = False glibcxx_flag = False if not int(os.environ.get('BYTEPS_WITHOUT_TENSORFLOW', 0)): try: import tensorflow as tf make_option += 'ADD_CFLAGS="' for flag in tf.sysconfig.get_compile_flags(): if 'D_GLIBCXX_USE_CXX11_ABI' in flag: has_cxx_flag = True glibcxx_flag = False if (flag[-1] == '0') else True make_option += flag + ' ' break make_option += '" ' except: pass # To resolve torch-gcc incompatibility if not int(os.environ.get('BYTEPS_WITHOUT_PYTORCH', 0)): try: import torch torch_flag = torch.compiled_with_cxx11_abi() if has_cxx_flag: if glibcxx_flag != torch_flag: raise DistutilsError( '-D_GLIBCXX_USE_CXX11_ABI is not consistent between TensorFlow and PyTorch, ' 'consider install them separately.') else: pass else: make_option += 'ADD_CFLAGS=-D_GLIBCXX_USE_CXX11_ABI=' + \ str(int(torch_flag)) + ' ' has_cxx_flag = True glibcxx_flag = torch_flag except: pass if not os.path.exists("3rdparty/ps-lite/build/libps.a") or \ not os.path.exists("3rdparty/ps-lite/deps/lib"): if os.environ.get('CI', 'false') == 'false': make_option += "-j " if has_rdma_header(): make_option += "USE_RDMA=1 " make_option += pre_setup.extra_make_option() make_process = subprocess.Popen('make ' + make_option, cwd='3rdparty/ps-lite', stdout=sys.stdout, stderr=sys.stderr, shell=True) make_process.communicate() if make_process.returncode: raise DistutilsSetupError('An ERROR occured while running the ' 'Makefile for the ps-lite library. ' 'Exit code: {0}'.format( make_process.returncode)) options = get_common_options(self) if has_cxx_flag: options['COMPILE_FLAGS'] += [ '-D_GLIBCXX_USE_CXX11_ABI=' + str(int(glibcxx_flag)) ] built_plugins = [] try: build_server(self, options) except: raise DistutilsSetupError( 'An ERROR occured while building the server module.\n\n' '%s' % traceback.format_exc()) # If PyTorch is installed, it must be imported before others, otherwise # we may get an error: dlopen: cannot load any more object with static TLS if not int(os.environ.get('BYTEPS_WITHOUT_PYTORCH', 0)): dummy_import_torch() if not int(os.environ.get('BYTEPS_WITHOUT_TENSORFLOW', 0)): try: build_tf_extension(self, options) built_plugins.append(True) print('INFO: Tensorflow extension is built successfully.') except: if not int(os.environ.get('BYTEPS_WITH_TENSORFLOW', 0)): print( 'INFO: Unable to build TensorFlow plugin, will skip it.\n\n' '%s' % traceback.format_exc()) built_plugins.append(False) else: raise if not int(os.environ.get('BYTEPS_WITHOUT_PYTORCH', 0)): try: torch_version = check_torch_version() build_torch_extension(self, options, torch_version) built_plugins.append(True) print('INFO: PyTorch extension is built successfully.') except: if not int(os.environ.get('BYTEPS_WITH_PYTORCH', 0)): print( 'INFO: Unable to build PyTorch plugin, will skip it.\n\n' '%s' % traceback.format_exc()) built_plugins.append(False) else: raise if not int(os.environ.get('BYTEPS_WITHOUT_MXNET', 0)): # fix "libcuda.so.1 not found" issue cuda_home = os.environ.get('BYTEPS_CUDA_HOME', '/usr/local/cuda') cuda_stub_path = cuda_home + '/lib64/stubs' ln_command = "cd " + cuda_stub_path + "; ln -sf libcuda.so libcuda.so.1" os.system(ln_command) try: build_mx_extension(self, options) built_plugins.append(True) print('INFO: MXNet extension is built successfully.') except: if not int(os.environ.get('BYTEPS_WITH_MXNET', 0)): print( 'INFO: Unable to build MXNet plugin, will skip it.\n\n' '%s' % traceback.format_exc()) built_plugins.append(False) else: raise finally: os.system("rm -rf " + cuda_stub_path + "/libcuda.so.1") if not built_plugins: print('INFO: Only server module is built.') return if not any(built_plugins): raise DistutilsError( 'None of TensorFlow, MXNet, PyTorch plugins were built. See errors above.' )
def check_entry_points(dist, attr, value): """Verify that entry_points map is parseable""" try: pkg_resources.EntryPoint.parse_map(value) except ValueError as e: raise DistutilsSetupError(e) from e
def get_version_string(filename="VERSION"): try: with open(filename) as f: return f.read().strip() except IOError: raise DistutilsSetupError("failed to read version info")
def build_a_library(self, build_info, lib_name, libraries): # default compilers compiler = self.compiler fcompiler = self._f_compiler sources = build_info.get('sources') if sources is None or not is_sequence(sources): raise DistutilsSetupError( ("in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames") % lib_name) sources = list(sources) c_sources, cxx_sources, f_sources, fmodule_sources \ = filter_sources(sources) requiref90 = not not fmodule_sources or \ build_info.get('language', 'c') == 'f90' # save source type information so that build_ext can use it. source_languages = [] if c_sources: source_languages.append('c') if cxx_sources: source_languages.append('c++') if requiref90: source_languages.append('f90') elif f_sources: source_languages.append('f77') build_info['source_languages'] = source_languages lib_file = compiler.library_filename(lib_name, output_dir=self.build_clib) depends = sources + build_info.get('depends', []) if not (self.force or newer_group(depends, lib_file, 'newer')): log.debug("skipping '%s' library (up-to-date)", lib_name) return else: log.info("building '%s' library", lib_name) config_fc = build_info.get('config_fc', {}) if fcompiler is not None and config_fc: log.info('using additional config_fc from setup script ' 'for fortran compiler: %s' % (config_fc, )) from numpy.distutils.fcompiler import new_fcompiler fcompiler = new_fcompiler(compiler=fcompiler.compiler_type, verbose=self.verbose, dry_run=self.dry_run, force=self.force, requiref90=requiref90, c_compiler=self.compiler) if fcompiler is not None: dist = self.distribution base_config_fc = dist.get_option_dict('config_fc').copy() base_config_fc.update(config_fc) fcompiler.customize(base_config_fc) # check availability of Fortran compilers if (f_sources or fmodule_sources) and fcompiler is None: raise DistutilsError("library %s has Fortran sources" " but no Fortran compiler found" % (lib_name)) if fcompiler is not None: fcompiler.extra_f77_compile_args = build_info.get( 'extra_f77_compile_args') or [] fcompiler.extra_f90_compile_args = build_info.get( 'extra_f90_compile_args') or [] macros = build_info.get('macros') include_dirs = build_info.get('include_dirs') if include_dirs is None: include_dirs = [] extra_postargs = build_info.get('extra_compiler_args') or [] include_dirs.extend(get_numpy_include_dirs()) # where compiled F90 module files are: module_dirs = build_info.get('module_dirs') or [] module_build_dir = os.path.dirname(lib_file) if requiref90: self.mkpath(module_build_dir) if compiler.compiler_type == 'msvc': # this hack works around the msvc compiler attributes # problem, msvc uses its own convention :( c_sources += cxx_sources cxx_sources = [] objects = [] if c_sources: log.info("compiling C sources") objects = compiler.compile(c_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) if cxx_sources: log.info("compiling C++ sources") cxx_compiler = compiler.cxx_compiler() cxx_objects = cxx_compiler.compile(cxx_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) objects.extend(cxx_objects) if f_sources or fmodule_sources: extra_postargs = [] f_objects = [] if requiref90: if fcompiler.module_dir_switch is None: existing_modules = glob('*.mod') extra_postargs += fcompiler.module_options( module_dirs, module_build_dir) if fmodule_sources: log.info("compiling Fortran 90 module sources") f_objects += fcompiler.compile(fmodule_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) if requiref90 and self._f_compiler.module_dir_switch is None: # move new compiled F90 module files to module_build_dir for f in glob('*.mod'): if f in existing_modules: continue t = os.path.join(module_build_dir, f) if os.path.abspath(f) == os.path.abspath(t): continue if os.path.isfile(t): os.remove(t) try: self.move_file(f, module_build_dir) except DistutilsFileError: log.warn('failed to move %r to %r' % (f, module_build_dir)) if f_sources: log.info("compiling Fortran sources") f_objects += fcompiler.compile(f_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) else: f_objects = [] if f_objects and not fcompiler.can_ccompiler_link(compiler): # Default linker cannot link Fortran object files, and results # need to be wrapped later. Instead of creating a real static # library, just keep track of the object files. listfn = os.path.join(self.build_clib, lib_name + '.fobjects') with open(listfn, 'w') as f: f.write("\n".join(os.path.abspath(obj) for obj in f_objects)) listfn = os.path.join(self.build_clib, lib_name + '.cobjects') with open(listfn, 'w') as f: f.write("\n".join(os.path.abspath(obj) for obj in objects)) # create empty "library" file for dependency tracking lib_fname = os.path.join(self.build_clib, lib_name + compiler.static_lib_extension) with open(lib_fname, 'wb') as f: pass else: # assume that default linker is suitable for # linking Fortran object files objects.extend(f_objects) compiler.create_static_lib(objects, lib_name, output_dir=self.build_clib, debug=self.debug) # fix library dependencies clib_libraries = build_info.get('libraries', []) for lname, binfo in libraries: if lname in clib_libraries: clib_libraries.extend(binfo.get('libraries', [])) if clib_libraries: build_info['libraries'] = clib_libraries
def _get_version(): try: with open('VERSION') as f: return f.read().strip() except IOError: raise DistutilsSetupError("failed to read version info")
def _verify_not_dirty(self): if os.system('git diff --shortstat | grep -q "."') == 0: raise DistutilsSetupError("Git has uncommitted changes!")
def winsdk_setenv(platform_arch, build_type): from distutils.msvc9compiler import VERSION as MSVC_VERSION from distutils.msvc9compiler import Reg from distutils.msvc9compiler import HKEYS from distutils.msvc9compiler import WINSDK_BASE sdk_version_map = { "v6.0a": 9.0, "v6.1": 9.0, "v7.0": 9.0, "v7.0a": 10.0, "v7.1": 10.0 } log.info("Searching Windows SDK with MSVC compiler version %s" % MSVC_VERSION) setenv_paths = [] for base in HKEYS: sdk_versions = Reg.read_keys(base, WINSDK_BASE) if sdk_versions: for sdk_version in sdk_versions: installationfolder = Reg.get_value( WINSDK_BASE + "\\" + sdk_version, "installationfolder") productversion = Reg.get_value( WINSDK_BASE + "\\" + sdk_version, "productversion") setenv_path = os.path.join(installationfolder, os.path.join('bin', 'SetEnv.cmd')) if not os.path.exists(setenv_path): continue if not sdk_version in sdk_version_map: continue if sdk_version_map[sdk_version] != MSVC_VERSION: continue setenv_paths.append(setenv_path) if len(setenv_paths) == 0: raise DistutilsSetupError( "Failed to find the Windows SDK with MSVC compiler version %s" % MSVC_VERSION) for setenv_path in setenv_paths: log.info("Found %s" % setenv_path) # Get SDK env (use latest SDK version installed on system) setenv_path = setenv_paths[-1] log.info("Using %s " % setenv_path) build_arch = "/x86" if platform_arch.startswith("32") else "/x64" build_type = "/Debug" if build_type.lower() == "debug" else "/Release" setenv_cmd = [setenv_path, build_arch, build_type] setenv_env = get_environment_from_batch_command(setenv_cmd) setenv_env_paths = os.pathsep.join([ setenv_env[k] for k in setenv_env if k.upper() == 'PATH' ]).split(os.pathsep) setenv_env_without_paths = dict([(k, setenv_env[k]) for k in setenv_env if k.upper() != 'PATH']) # Extend os.environ with SDK env log.info("Initializing Windows SDK env...") update_env_path(setenv_env_paths) for k in sorted(setenv_env_without_paths): v = setenv_env_without_paths[k] log.info("Inserting \"%s = %s\" to environment" % (k, v)) os.environ[k] = v log.info("Done initializing Windows SDK env")