def generate_a_cython_source(self, base, ext_name, source, extension): if self.inplace or not have_cython(): target_dir = os.path.dirname(base) else: target_dir = appendpath(self.build_src, os.path.dirname(base)) target_file = os.path.join(target_dir, ext_name + '.c') depends = [source] + extension.depends if self.force or newer_group(depends, target_file, 'newer'): if have_cython(): import Cython.Compiler.Main log.info("cythonc:> %s: %s " % (target_dir, target_file)) log.info("cwd %s " % (os.getcwd())) self.mkpath(target_dir) options = Cython.Compiler.Main.CompilationOptions( defaults=Cython.Compiler.Main.default_options, include_path=extension.include_dirs, output_file=target_file ) #log.info('\n'.join([s + ' ' + str(getattr(options, s)) for s in dir(options)])) # avoid calling compile_single, because it will give wrong module names. cython_result = Cython.Compiler.Main.compile([source], options=options) if cython_result.num_errors != 0: raise DistutilsError("%d errors while compiling %r with Cython" \ % (cython_result.num_errors, source)) elif os.path.isfile(target_file): log.warn("Cython required for compiling %r but not available,"\ " using old target %r"\ % (source, target_file)) else: raise DistutilsError("Cython required for compiling %r"\ " but notavailable" % (source,)) return target_file
def _libs_with_msvc_and_fortran(self, fcompiler, c_libraries, c_library_dirs): if fcompiler is None: return for libname in c_libraries: if libname.startswith('msvc'): continue fileexists = False for libdir in c_library_dirs or []: libfile = os.path.join(libdir, '%s.lib' % (libname)) if os.path.isfile(libfile): fileexists = True break if fileexists: continue # make g77-compiled static libs available to MSVC fileexists = False for libdir in c_library_dirs: libfile = os.path.join(libdir, 'lib%s.a' % (libname)) if os.path.isfile(libfile): # copy libname.a file to name.lib so that MSVC linker # can find it libfile2 = os.path.join(self.build_temp, libname + '.lib') copy_file(libfile, libfile2) if self.build_temp not in c_library_dirs: c_library_dirs.append(self.build_temp) fileexists = True break if fileexists: continue log.warn('could not find library %r in directories %s' % (libname, c_library_dirs)) # Always use system linker when using MSVC compiler. f_lib_dirs = [] for dir in fcompiler.library_dirs: # correct path when compiling in Cygwin but with normal Win # Python if dir.startswith('/usr/lib'): try: dir = subprocess.check_output(['cygpath', '-w', dir]) except (OSError, subprocess.CalledProcessError): pass else: dir = filepath_from_subprocess_output(dir) f_lib_dirs.append(dir) c_library_dirs.extend(f_lib_dirs) # make g77-compiled static libs available to MSVC for lib in fcompiler.libraries: if not lib.startswith('msvc'): c_libraries.append(lib) p = combine_paths(f_lib_dirs, 'lib' + lib + '.a') if p: dst_name = os.path.join(self.build_temp, lib + '.lib') if not os.path.isfile(dst_name): copy_file(p[0], dst_name) if self.build_temp not in c_library_dirs: c_library_dirs.append(self.build_temp)
def _find_existing_fcompiler(compiler_types, osname=None, platform=None, requiref90=False, c_compiler=None): from numpy.distutils.core import get_distribution dist = get_distribution(always=True) for compiler_type in compiler_types: v = None try: c = new_fcompiler(plat=platform, compiler=compiler_type, c_compiler=c_compiler) c.customize(dist) v = c.get_version() if requiref90 and c.compiler_f90 is None: v = None new_compiler = c.suggested_f90_compiler if new_compiler: log.warn( "Trying %r compiler as suggested by %r " "compiler for f90 support." % (compiler_type, new_compiler) ) c = new_fcompiler(plat=platform, compiler=new_compiler, c_compiler=c_compiler) c.customize(dist) v = c.get_version() if v is not None: compiler_type = new_compiler if requiref90 and c.compiler_f90 is None: raise ValueError("%s does not support compiling f90 codes, " "skipping." % (c.__class__.__name__)) except DistutilsModuleError: log.debug("_find_existing_fcompiler: compiler_type='%s' raised DistutilsModuleError", compiler_type) except CompilerNotFound: log.debug("_find_existing_fcompiler: compiler_type='%s' not found", compiler_type) if v is not None: return compiler_type return None
def finalize_options(self): log.info("unifing config_fc, config, build_clib, build_ext, build commands --fcompiler options") build_clib = self.get_finalized_command("build_clib") build_ext = self.get_finalized_command("build_ext") config = self.get_finalized_command("config") build = self.get_finalized_command("build") cmd_list = [self, config, build_clib, build_ext, build] for a in ["fcompiler"]: l = [] for c in cmd_list: v = getattr(c, a) if v is not None: if not isinstance(v, str): v = v.compiler_type if v not in l: l.append(v) if not l: v1 = None else: v1 = l[0] if len(l) > 1: log.warn(" commands have different --%s options: %s" ", using first in list as default" % (a, l)) if v1: for c in cmd_list: if getattr(c, a) is None: setattr(c, a, v1)
def CCompiler_customize(self, dist, need_cxx=0): # See FCompiler.customize for suggested usage. log.info('customize %s' % (self.__class__.__name__)) customize_compiler(self) if need_cxx: # In general, distutils uses -Wstrict-prototypes, but this option is # not valid for C++ code, only for C. Remove it if it's there to # avoid a spurious warning on every compilation. All the default # options used by distutils can be extracted with: # from distutils import sysconfig # sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'BASECFLAGS', # 'CCSHARED', 'LDSHARED', 'SO') print "compiler options1:", self.compiler_so try: self.compiler_so.remove('-Wstrict-prototypes') except (AttributeError, ValueError): pass print "compiler options2:", self.compiler_so if hasattr(self,'compiler') and self.compiler[0].find('cc')>=0: if not self.compiler_cxx: if self.compiler[0].startswith('gcc'): a, b = 'gcc', 'g++' else: a, b = 'cc', 'c++' self.compiler_cxx = [self.compiler[0].replace(a,b)]\ + self.compiler[1:] else: if hasattr(self,'compiler'): log.warn("#### %s #######" % (self.compiler,)) log.warn('Missing compiler_cxx fix for '+self.__class__.__name__) return
def generate_a_pyrex_source(self, base, ext_name, source, extension): if self.inplace or not have_pyrex(): target_dir = os.path.dirname(base) else: target_dir = appendpath(self.build_src, os.path.dirname(base)) target_file = os.path.join(target_dir, ext_name + ".c") depends = [source] + extension.depends if self.force or newer_group(depends, target_file, "newer"): if have_pyrex(): import Pyrex.Compiler.Main log.info("pyrexc:> %s" % (target_file)) self.mkpath(target_dir) options = Pyrex.Compiler.Main.CompilationOptions( defaults=Pyrex.Compiler.Main.default_options, include_path=extension.include_dirs, output_file=target_file, ) pyrex_result = Pyrex.Compiler.Main.compile(source, options=options) if pyrex_result.num_errors != 0: raise DistutilsError("%d errors while compiling %r with Pyrex" % (pyrex_result.num_errors, source)) elif os.path.isfile(target_file): log.warn( "Pyrex required for compiling %r but not available," " using old target %r" % (source, target_file) ) else: raise DistutilsError("Pyrex required for compiling %r" " but notavailable" % (source,)) return target_file
def new_fcompiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0, requiref90=False, c_compiler = None): """Generate an instance of some FCompiler subclass for the supplied platform/compiler combination. """ load_all_fcompiler_classes() if plat is None: plat = os.name if compiler is None: compiler = get_default_fcompiler(plat, requiref90=requiref90, c_compiler=c_compiler) if compiler in fcompiler_class: module_name, klass, long_description = fcompiler_class[compiler] elif compiler in fcompiler_aliases: module_name, klass, long_description = fcompiler_aliases[compiler] else: msg = "don't know how to compile Fortran code on platform '%s'" % plat if compiler is not None: msg = msg + " with '%s' compiler." % compiler msg = msg + " Supported compilers are: %s)" \ % (','.join(fcompiler_class.keys())) log.warn(msg) return None compiler = klass(verbose=verbose, dry_run=dry_run, force=force) compiler.c_compiler = c_compiler return compiler
def CCompiler_customize(self, dist, need_cxx=0): """ Do any platform-specific customization of a compiler instance. This method calls `distutils.sysconfig.customize_compiler` for platform-specific customization, as well as optionally remove a flag to suppress spurious warnings in case C++ code is being compiled. Parameters ---------- dist : object This parameter is not used for anything. need_cxx : bool, optional Whether or not C++ has to be compiled. If so (True), the ``"-Wstrict-prototypes"`` option is removed to prevent spurious warnings. Default is False. Returns ------- None Notes ----- All the default options used by distutils can be extracted with:: from distutils import sysconfig sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'BASECFLAGS', 'CCSHARED', 'LDSHARED', 'SO') """ # See FCompiler.customize for suggested usage. log.info('customize %s' % (self.__class__.__name__)) customize_compiler(self) if need_cxx: # In general, distutils uses -Wstrict-prototypes, but this option is # not valid for C++ code, only for C. Remove it if it's there to # avoid a spurious warning on every compilation. All the default # options used by distutils can be extracted with: # from distutils import sysconfig # sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'BASECFLAGS', # 'CCSHARED', 'LDSHARED', 'SO') try: self.compiler_so.remove('-Wstrict-prototypes') except (AttributeError, ValueError): pass if hasattr(self,'compiler') and 'cc' in self.compiler[0]: if not self.compiler_cxx: if self.compiler[0].startswith('gcc'): a, b = 'gcc', 'g++' else: a, b = 'cc', 'c++' self.compiler_cxx = [self.compiler[0].replace(a,b)]\ + self.compiler[1:] else: if hasattr(self,'compiler'): log.warn("#### %s #######" % (self.compiler,)) log.warn('Missing compiler_cxx fix for '+self.__class__.__name__) return
def CCompiler_get_version(self, force=0, ok_status=[0]): """ Compiler version. Returns None if compiler is not available. """ if not force and hasattr(self,'version'): return self.version try: version_cmd = self.version_cmd except AttributeError: return None cmd = ' '.join(version_cmd) try: matcher = self.version_match except AttributeError: try: pat = self.version_pattern except AttributeError: return None def matcher(version_string): m = re.match(pat, version_string) if not m: return None version = m.group('version') return version status, output = exec_command(cmd,use_tee=0) version = None if status in ok_status: version = matcher(output) if not version: log.warn("Couldn't match compiler version for %r" % (output,)) else: version = LooseVersion(version) self.version = version return version
def generate_def(dll, dfile): """Given a dll file location, get all its exported symbols and dump them into the given def file. The .def file will be overwritten""" dump = dump_table(dll) for i in range(len(dump)): if _START.match(dump[i].decode()): break else: raise ValueError("Symbol table not found") syms = [] for j in range(i+1, len(dump)): m = _TABLE.match(dump[j].decode()) if m: syms.append((int(m.group(1).strip()), m.group(2))) else: break if len(syms) == 0: log.warn('No symbols found in %s' % dll) d = open(dfile, 'w') d.write('LIBRARY %s\n' % os.path.basename(dll)) d.write(';CODE PRELOAD MOVEABLE DISCARDABLE\n') d.write(';DATA PRELOAD SINGLE\n') d.write('\nEXPORTS\n') for s in syms: #d.write('@%d %s\n' % (s[0], s[1])) d.write('%s\n' % s[1]) d.close()
def _build_import_library_x86(): """ Build the import libraries for Mingw32-gcc on Windows """ lib_name = "python%d%d.lib" % tuple(sys.version_info[:2]) lib_file = os.path.join(sys.prefix, 'libs', lib_name) out_name = "libpython%d%d.a" % tuple(sys.version_info[:2]) out_file = os.path.join(sys.prefix, 'libs', out_name) if not os.path.isfile(lib_file): log.warn('Cannot build import library: "%s" not found' % (lib_file)) return if os.path.isfile(out_file): log.debug('Skip building import library: "%s" exists' % (out_file)) return log.info('Building import library (ARCH=x86): "%s"' % (out_file)) from numpy.distutils import lib2def def_name = "python%d%d.def" % tuple(sys.version_info[:2]) def_file = os.path.join(sys.prefix, 'libs', def_name) nm_cmd = '%s %s' % (lib2def.DEFAULT_NM, lib_file) nm_output = lib2def.getnm(nm_cmd) dlist, flist = lib2def.parse_nm(nm_output) lib2def.output_def(dlist, flist, lib2def.DEF_HEADER, open(def_file, 'w')) dll_name = "python%d%d.dll" % tuple(sys.version_info[:2]) args = (dll_name, def_file, out_file) cmd = 'dlltool --dllname %s --def %s --output-lib %s' % args status = os.system(cmd) # for now, fail silently if status: log.warn('Failed to build import library for gcc. Linking will fail.') return
def package_check(pkg_name, version=None, optional=False, checker=LooseVersion, version_getter=None, ): ''' Check if package `pkg_name` is present, and correct version Parameters ---------- pkg_name : str name of package as imported into python version : {None, str}, optional minimum version of the package that we require. If None, we don't check the version. Default is None optional : {False, True}, optional If False, raise error for absent package or wrong version; otherwise warn checker : callable, optional callable with which to return comparable thing from version string. Default is ``distutils.version.LooseVersion`` version_getter : {None, callable}: Callable that takes `pkg_name` as argument, and returns the package version string - as in:: ``version = version_getter(pkg_name)`` If None, equivalent to:: mod = __import__(pkg_name); version = mod.__version__`` ''' if version_getter is None: def version_getter(pkg_name): mod = __import__(pkg_name) return mod.__version__ try: mod = __import__(pkg_name) except ImportError: if not optional: raise RuntimeError('Cannot import package "%s" ' '- is it installed?' % pkg_name) log.warn('Missing optional package "%s"; ' 'you may get run-time errors' % pkg_name) return if not version: return try: have_version = version_getter(pkg_name) except AttributeError: raise RuntimeError('Cannot find version for %s' % pkg_name) if checker(have_version) < checker(version): v_msg = 'You have version %s of package "%s"' \ ' but we need version >= %s' % ( have_version, pkg_name, version, ) if optional: log.warn(v_msg + '; you may get run-time errors') else: raise RuntimeError(v_msg)
def _find_existing_fcompiler(compilers, osname=None, platform=None, requiref90=None): for compiler in compilers: v = None try: c = new_fcompiler(plat=platform, compiler=compiler) c.customize() v = c.get_version() if requiref90 and c.compiler_f90 is None: v = None new_compiler = c.suggested_f90_compiler if new_compiler: log.warn('Trying %r compiler as suggested by %r compiler for f90 support.' % (compiler, new_compiler)) c = new_fcompiler(plat=platform, compiler=new_compiler) c.customize() v = c.get_version() if v is not None: compiler = new_compiler if requiref90 and c.compiler_f90 is None: raise ValueError,'%s does not support compiling f90 codes, skipping.' \ % (c.__class__.__name__) except DistutilsModuleError: pass except Exception, msg: log.warn(msg) if v is not None: return compiler
def build_msvcr_library(debug=False): if os.name != 'nt': return False # If the version number is None, then we couldn't find the MSVC runtime at # all, because we are running on a Python distribution which is customed # compiled; trust that the compiler is the same as the one available to us # now, and that it is capable of linking with the correct runtime without # any extra options. msvcr_ver = msvc_runtime_major() if msvcr_ver is None: log.debug('Skip building import library: ' 'Runtime is not compiled with MSVC') return False # Skip using a custom library for versions < MSVC 8.0 if msvcr_ver < 80: log.debug('Skip building msvcr library:' ' custom functionality not present') return False msvcr_name = msvc_runtime_library() if debug: msvcr_name += 'd' # Skip if custom library already exists out_name = "lib%s.a" % msvcr_name out_file = os.path.join(sys.prefix, 'libs', out_name) if os.path.isfile(out_file): log.debug('Skip building msvcr library: "%s" exists' % (out_file,)) return True # Find the msvcr dll msvcr_dll_name = msvcr_name + '.dll' dll_file = find_dll(msvcr_dll_name) if not dll_file: log.warn('Cannot build msvcr library: "%s" not found' % msvcr_dll_name) return False def_name = "lib%s.def" % msvcr_name def_file = os.path.join(sys.prefix, 'libs', def_name) log.info('Building msvcr library: "%s" (from %s)' \ % (out_file, dll_file)) # Generate a symbol definition file from the msvcr dll generate_def(dll_file, def_file) # Create a custom mingw library for the given symbol definitions cmd = ['dlltool', '-d', def_file, '-l', out_file] retcode = subprocess.call(cmd) # Clean up symbol definitions os.remove(def_file) return (not retcode)
def _libs_with_msvc_and_fortran(self, fcompiler, c_libraries, c_library_dirs): if fcompiler is None: return for libname in c_libraries: if libname.startswith("msvc"): continue fileexists = False for libdir in c_library_dirs or []: libfile = os.path.join(libdir, "%s.lib" % (libname)) if os.path.isfile(libfile): fileexists = True break if fileexists: continue # make g77-compiled static libs available to MSVC fileexists = False for libdir in c_library_dirs: libfile = os.path.join(libdir, "lib%s.a" % (libname)) if os.path.isfile(libfile): # copy libname.a file to name.lib so that MSVC linker # can find it libfile2 = os.path.join(self.build_temp, libname + ".lib") copy_file(libfile, libfile2) if self.build_temp not in c_library_dirs: c_library_dirs.append(self.build_temp) fileexists = True break if fileexists: continue log.warn("could not find library %r in directories %s" % (libname, c_library_dirs)) # Always use system linker when using MSVC compiler. f_lib_dirs = [] for dir in fcompiler.library_dirs: # correct path when compiling in Cygwin but with normal Win # Python if dir.startswith("/usr/lib"): s, o = exec_command(["cygpath", "-w", dir], use_tee=False) if not s: dir = o f_lib_dirs.append(dir) c_library_dirs.extend(f_lib_dirs) # make g77-compiled static libs available to MSVC for lib in fcompiler.libraries: if not lib.startswith("msvc"): c_libraries.append(lib) p = combine_paths(f_lib_dirs, "lib" + lib + ".a") if p: dst_name = os.path.join(self.build_temp, lib + ".lib") if not os.path.isfile(dst_name): copy_file(p[0], dst_name) if self.build_temp not in c_library_dirs: c_library_dirs.append(self.build_temp)
def _exec_command_posix( command, use_shell = None, use_tee = None, **env ): log.debug('_exec_command_posix(...)') if is_sequence(command): command_str = ' '.join(list(command)) else: command_str = command tmpfile = temp_file_name() stsfile = None if use_tee: stsfile = temp_file_name() filter = '' if use_tee == 2: filter = r'| tr -cd "\n" | tr "\n" "."; echo' command_posix = '( %s ; echo $? > %s ) 2>&1 | tee %s %s'\ % (command_str, stsfile, tmpfile, filter) else: stsfile = temp_file_name() command_posix = '( %s ; echo $? > %s ) > %s 2>&1'\ % (command_str, stsfile, tmpfile) #command_posix = '( %s ) > %s 2>&1' % (command_str,tmpfile) log.debug('Running os.system(%r)' % (command_posix)) status = os.system(command_posix) if use_tee: if status: # if command_tee fails then fall back to robust exec_command log.warn('_exec_command_posix failed (status=%s)' % status) return _exec_command(command, use_shell=use_shell, **env) if stsfile is not None: f = open_latin1(stsfile, 'r') status_text = f.read() status = int(status_text) f.close() os.remove(stsfile) f = open_latin1(tmpfile, 'r') text = f.read() f.close() os.remove(tmpfile) if text[-1:]=='\n': text = text[:-1] return status, text
def build_msvcr_library(debug=False): if os.name != 'nt': return False msvcr_name = msvc_runtime_library() # Skip using a custom library for versions < MSVC 8.0 msvcr_ver = msvc_runtime_major() if msvcr_ver and msvcr_ver < 80: log.debug('Skip building msvcr library:' ' custom functionality not present') return False if debug: msvcr_name += 'd' # Skip if custom library already exists out_name = "lib%s.a" % msvcr_name out_file = os.path.join(sys.prefix, 'libs', out_name) if os.path.isfile(out_file): log.debug('Skip building msvcr library: "%s" exists' % (out_file,)) return True # Find the msvcr dll msvcr_dll_name = msvcr_name + '.dll' dll_file = find_dll(msvcr_dll_name) if not dll_file: log.warn('Cannot build msvcr library: "%s" not found' % msvcr_dll_name) return False def_name = "lib%s.def" % msvcr_name def_file = os.path.join(sys.prefix, 'libs', def_name) log.info('Building msvcr library: "%s" (from %s)' \ % (out_file, dll_file)) # Generate a symbol definition file from the msvcr dll generate_def(dll_file, def_file) # Create a custom mingw library for the given symbol definitions cmd = ['dlltool', '-d', def_file, '-l', out_file] retcode = subprocess.call(cmd) # Clean up symbol definitions os.remove(def_file) return (not retcode)
def generate_a_script(build_dir, script=script, config=config): dist = config.get_distribution() install_lib = dist.get_command_obj('install_lib') if not install_lib.finalized: install_lib.finalize_options() script_replace_text = '' install_lib = install_lib.install_dir if install_lib is not None: script_replace_text = ''' import sys if %(d)r not in sys.path: sys.path.insert(0, %(d)r) ''' % dict(d=install_lib) if multiprocessing is not None: mp_install_lib = dirname(dirname(multiprocessing.__file__)) script_replace_text += ''' if %(d)r not in sys.path: sys.path.insert(0, %(d)r) ''' % dict(d=mp_install_lib) start_mark = '### START UPDATE SYS.PATH ###' end_mark = '### END UPDATE SYS.PATH ###' name = basename(script) if name.startswith (script_prefix): target_name = name elif wininst: target_name = script_prefix + '_' + name else: target_name = script_prefix + '.' + splitext(name)[0] target = join(build_dir, target_name) if newer(script, target) or 1: log.info('Creating %r', target) f = open (script, 'r') text = f.read() f.close() i = text.find(start_mark) if i != -1: j = text.find (end_mark) if j == -1: log.warn ("%r missing %r line", script, start_mark) new_text = text[:i+len (start_mark)] + script_replace_text + text[j:] else: new_text = text f = open(target, 'w') f.write(new_text) f.close()
def find_executable(exe, path=None, _cache={}): """Return full path of a executable or None. Symbolic links are not followed. """ key = exe, path try: return _cache[key] except KeyError: pass log.debug('find_executable(%r)' % exe) orig_exe = exe if path is None: path = os.environ.get('PATH', os.defpath) if os.name=='posix': realpath = os.path.realpath else: realpath = lambda a:a if exe.startswith('"'): exe = exe[1:-1] suffixes = [''] if os.name in ['nt', 'dos', 'os2']: fn, ext = os.path.splitext(exe) extra_suffixes = ['.exe', '.com', '.bat'] if ext.lower() not in extra_suffixes: suffixes = extra_suffixes if os.path.isabs(exe): paths = [''] else: paths = [ os.path.abspath(p) for p in path.split(os.pathsep) ] for path in paths: fn = os.path.join(path, exe) for s in suffixes: f_ext = fn+s if not os.path.islink(f_ext): f_ext = realpath(f_ext) if os.path.isfile(f_ext) and os.access(f_ext, os.X_OK): log.info('Found executable %s' % f_ext) _cache[key] = f_ext return f_ext log.warn('Could not locate executable %s' % orig_exe) return None
def find_executable(exe, path=None, _cache={}): """Return full path of a executable or None. Symbolic links are not followed. """ key = exe, path try: return _cache[key] except KeyError: pass log.debug("find_executable(%r)" % exe) orig_exe = exe if path is None: path = os.environ.get("PATH", os.defpath) if os.name == "posix": realpath = os.path.realpath else: realpath = lambda a: a if exe.startswith('"'): exe = exe[1:-1] suffixes = [""] if os.name in ["nt", "dos", "os2"]: fn, ext = os.path.splitext(exe) extra_suffixes = [".exe", ".com", ".bat"] if ext.lower() not in extra_suffixes: suffixes = extra_suffixes if os.path.isabs(exe): paths = [""] else: paths = [os.path.abspath(p) for p in path.split(os.pathsep)] for path in paths: fn = os.path.join(path, exe) for s in suffixes: f_ext = fn + s if not os.path.islink(f_ext): f_ext = realpath(f_ext) if os.path.isfile(f_ext) and os.access(f_ext, os.X_OK): log.good("Found executable %s" % f_ext) _cache[key] = f_ext return f_ext log.warn("Could not locate executable %s" % orig_exe) return None
def _build_import_library_x86(): """ Build the import libraries for Mingw32-gcc on Windows """ out_exists, out_file = _check_for_import_lib() if out_exists: log.debug('Skip building import library: "%s" exists', out_file) return lib_name = "python%d%d.lib" % tuple(sys.version_info[:2]) lib_file = os.path.join(sys.prefix, 'libs', lib_name) if not os.path.isfile(lib_file): # didn't find library file in virtualenv, try base distribution, too, # and use that instead if found there. for Python 2.7 venvs, the base # directory is in attribute real_prefix instead of base_prefix. if hasattr(sys, 'base_prefix'): base_lib = os.path.join(sys.base_prefix, 'libs', lib_name) elif hasattr(sys, 'real_prefix'): base_lib = os.path.join(sys.real_prefix, 'libs', lib_name) else: base_lib = '' # os.path.isfile('') == False if os.path.isfile(base_lib): lib_file = base_lib else: log.warn('Cannot build import library: "%s" not found', lib_file) return log.info('Building import library (ARCH=x86): "%s"', out_file) from numpy.distutils import lib2def def_name = "python%d%d.def" % tuple(sys.version_info[:2]) def_file = os.path.join(sys.prefix, 'libs', def_name) nm_cmd = '%s %s' % (lib2def.DEFAULT_NM, lib_file) nm_output = lib2def.getnm(nm_cmd) dlist, flist = lib2def.parse_nm(nm_output) lib2def.output_def(dlist, flist, lib2def.DEF_HEADER, open(def_file, 'w')) dll_name = find_python_dll () args = (dll_name, def_file, out_file) cmd = 'dlltool --dllname "%s" --def "%s" --output-lib "%s"' % args status = os.system(cmd) # for now, fail silently if status: log.warn('Failed to build import library for gcc. Linking will fail.') return
def finalize_options(self): """ Perhaps not necessary? (potential OSX problem) from sysdevel.distutils.prerequisites import gcc_is_64bit if ((self.f77exec is None and self.f90exec is None) or \ 'gfortran' in self.f77exec or 'gfortran' in self.f90exec) and \ 'darwin' in platform.system().lower(): ## Unify GCC and GFortran default outputs if gcc_is_64bit(): os.environ['FFLAGS'] = '-arch x86_64' os.environ['FCFLAGS'] = '-arch x86_64' else: os.environ['FFLAGS'] = '-arch i686' os.environ['FCFLAGS'] = '-arch i686' """ # the rest is *nearly* identical to that in the numpy original log.info('unifing config_fc, config, build_clib, build_shlib, ' + 'build_ext, build commands --fcompiler options') build_clib = self.get_finalized_command('build_clib') build_shlib = self.get_finalized_command('build_shlib') build_ext = self.get_finalized_command('build_ext') config = self.get_finalized_command('config') build = self.get_finalized_command('build') cmd_list = [self, config, build_clib, build_shlib, build_ext, build] for a in ['fcompiler']: l = [] for c in cmd_list: v = getattr(c,a) if v is not None: if not is_string(v): v = v.compiler_type if v not in l: l.append(v) if not l: v1 = None else: v1 = l[0] if len(l)>1: log.warn(' commands have different --%s options: %s'\ ', using first in list as default' % (a, l)) if v1: for c in cmd_list: if getattr(c,a) is None: setattr(c, a, v1)
def pyrex_sources(self, sources, extension): have_pyrex = False try: import Pyrex have_pyrex = True except ImportError: pass new_sources = [] ext_name = extension.name.split('.')[-1] for source in sources: (base, ext) = os.path.splitext(source) if ext == '.pyx': if self.inplace or not have_pyrex: target_dir = os.path.dirname(base) else: target_dir = appendpath(self.build_src, os.path.dirname(base)) target_file = os.path.join(target_dir, ext_name + '.c') depends = [source] + extension.depends if (self.force or newer_group(depends, target_file, 'newer')): if have_pyrex: log.info("pyrexc:> %s" % (target_file)) self.mkpath(target_dir) from Pyrex.Compiler import Main options = Main.CompilationOptions( defaults=Main.default_options, output_file=target_file) pyrex_result = Main.compile(source, options=options) if pyrex_result.num_errors != 0: raise DistutilsError,"%d errors while compiling %r with Pyrex" \ % (pyrex_result.num_errors, source) elif os.path.isfile(target_file): log.warn("Pyrex required for compiling %r but not available,"\ " using old target %r"\ % (source, target_file)) else: raise DistutilsError,"Pyrex required for compiling %r but not available" % (source) new_sources.append(target_file) else: new_sources.append(source) return new_sources
def CCompiler_customize(self, *args, **kw): need_cxx = kw.get('need_cxx', 0) # list unwanted flags (e.g. '-g') here. unwanted = [] # call the original method. numpy_CCompiler_customize(self, *args, **kw) # update arguments. ccshared = ' '.join(set(self.compiler_so) - set(self.compiler)) compiler = ' '.join(it for it in self.compiler if it not in unwanted) old_compiler = self.compiler self.set_executables( compiler=compiler, compiler_so=compiler + ' ' + ccshared, ) modified = self.compiler != old_compiler if modified and need_cxx and hasattr(self, 'compiler'): log.warn("#### %s ####### %s removed" % (self.compiler, unwanted)) return
def finalize_options(self): self.set_undefined_options('build', ('build_base', 'build_base'), ('build_lib', 'build_lib'), ('force', 'force')) if self.package is None: self.package = self.distribution.ext_package self.extensions = self.distribution.ext_modules self.libraries = self.distribution.libraries or [] self.shared_libraries = self.distribution.shared_libraries or [] self.py_modules = self.distribution.py_modules or [] self.data_files = self.distribution.data_files or [] if self.build_src is None: plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3]) self.build_src = os.path.join(self.build_base, 'src'+plat_specifier) # py_modules_dict is used in build_py.find_package_modules self.py_modules_dict = {} if self.f2pyflags: if self.f2py_opts: log.warn('ignoring --f2pyflags as --f2py-opts already used') else: self.f2py_opts = self.f2pyflags self.f2pyflags = None if self.f2py_opts is None: self.f2py_opts = [] else: self.f2py_opts = shlex.split(self.f2py_opts) if self.swigflags: if self.swig_opts: log.warn('ignoring --swigflags as --swig-opts already used') else: self.swig_opts = self.swigflags self.swigflags = None if self.swig_opts is None: self.swig_opts = [] else: self.swig_opts = shlex.split(self.swig_opts) # use options from build_ext command build_ext = self.get_finalized_command('build_ext') if self.inplace is None: self.inplace = build_ext.inplace if self.swig_cpp is None: self.swig_cpp = build_ext.swig_cpp for c in ['swig', 'swig_opt']: o = '--'+c.replace('_', '-') v = getattr(build_ext, c, None) if v: if getattr(self, c): log.warn('both build_src and build_ext define %s option' % (o)) else: log.info('using "%s=%s" option from build_ext command' % (o, v)) setattr(self, c, v)
def finalize_options(self): log.info('unifing config_cc, config, build_clib, build_ext, build commands --compiler options') build_clib = self.get_finalized_command('build_clib') build_ext = self.get_finalized_command('build_ext') config = self.get_finalized_command('config') build = self.get_finalized_command('build') cmd_list = [self, config, build_clib, build_ext, build] for a in ['compiler']: l = [] for c in cmd_list: v = getattr(c,a) if v is not None: if not isinstance(v, str): v = v.compiler_type if v not in l: l.append(v) if not l: v1 = None else: v1 = l[0] if len(l)>1: log.warn(' commands have different --%s options: %s'\ ', using first in list as default' % (a, l)) if v1: for c in cmd_list: if getattr(c,a) is None: setattr(c, a, v1) return
def finalize_options(self): self.set_undefined_options( "build", ("build_base", "build_base"), ("build_lib", "build_lib"), ("force", "force") ) if self.package is None: self.package = self.distribution.ext_package self.extensions = self.distribution.ext_modules self.libraries = self.distribution.libraries or [] self.py_modules = self.distribution.py_modules or [] self.data_files = self.distribution.data_files or [] if self.build_src is None: plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3]) self.build_src = os.path.join(self.build_base, "src" + plat_specifier) # py_modules_dict is used in build_py.find_package_modules self.py_modules_dict = {} if self.f2pyflags: if self.f2py_opts: log.warn("ignoring --f2pyflags as --f2py-opts already used") else: self.f2py_opts = self.f2pyflags self.f2pyflags = None if self.f2py_opts is None: self.f2py_opts = [] else: self.f2py_opts = shlex.split(self.f2py_opts) if self.swigflags: if self.swig_opts: log.warn("ignoring --swigflags as --swig-opts already used") else: self.swig_opts = self.swigflags self.swigflags = None if self.swig_opts is None: self.swig_opts = [] else: self.swig_opts = shlex.split(self.swig_opts) # use options from build_ext command build_ext = self.get_finalized_command("build_ext") if self.inplace is None: self.inplace = build_ext.inplace if self.swig_cpp is None: self.swig_cpp = build_ext.swig_cpp for c in ["swig", "swig_opt"]: o = "--" + c.replace("_", "-") v = getattr(build_ext, c, None) if v: if getattr(self, c): log.warn("both build_src and build_ext define %s option" % (o)) else: log.info('using "%s=%s" option from build_ext command' % (o, v)) setattr(self, c, v)
def show_fcompilers(dist = None): """ Print list of available compilers (used by the "--help-fcompiler" option to "config_fc"). """ if dist is None: from distutils.dist import Distribution dist = Distribution() dist.script_name = os.path.basename(sys.argv[0]) dist.script_args = ['config_fc'] + sys.argv[1:] dist.cmdclass['config_fc'] = config_fc dist.parse_config_files() dist.parse_command_line() compilers = [] compilers_na = [] compilers_ni = [] for compiler in fcompiler_class.keys(): v = 'N/A' log.set_verbosity(-2) try: c = new_fcompiler(compiler=compiler) c.customize(dist) v = c.get_version() except DistutilsModuleError: pass except Exception, msg: log.warn(msg) if v is None: compilers_na.append(("fcompiler="+compiler, None, fcompiler_class[compiler][2])) elif v=='N/A': compilers_ni.append(("fcompiler="+compiler, None, fcompiler_class[compiler][2])) else: compilers.append(("fcompiler="+compiler, None, fcompiler_class[compiler][2] + ' (%s)' % v))
def package_check(pkg_name, version=None, optional=False, checker=LooseVersion, version_getter=None, messages=None, show_only=False): """ Check if package `pkg_name` is present, and in correct version. Parameters ---------- pkg_name : str or sequence of str The name of the package as imported into python. Alternative names (e.g. for different versions) may be given in a list. version : str, optional The minimum version of the package that is required. If not given, the version is not checked. optional : bool, optional If False, raise error for absent package or wrong version; otherwise warn checker : callable, optional If given, the callable with which to return a comparable thing from a version string. The default is ``distutils.version.LooseVersion``. version_getter : callable, optional: If given, the callable that takes `pkg_name` as argument, and returns the package version string - as in:: ``version = version_getter(pkg_name)`` The default is equivalent to:: mod = __import__(pkg_name); version = mod.__version__`` messages : dict, optional If given, the dictionary providing (some of) output messages. show_only : bool If True, do not raise exceptions, only show the package name and version information. """ if version_getter is None: def version_getter(pkg_name): mod = __import__(pkg_name) return mod.__version__ if messages is None: messages = {} msgs = { 'available': '%s is available', 'missing': '%s is missing', 'opt suffix': '; you may get run-time errors', 'version': '%s is available in version %s', 'version old': '%s is available in version %s, but >= %s is needed', 'no version': '%s is available, cannot determine version', } msgs.update(messages) if isinstance(pkg_name, str): names = [pkg_name] else: names = pkg_name import_ok = False for pkg_name in names: try: __import__(pkg_name) except ImportError: pass else: import_ok = True pkg_info = pkg_name + (' (optional)' if optional else '') if not import_ok: if not (optional or show_only): raise RuntimeError(msgs['missing'] % pkg_name) log.warn(msgs['missing'] % pkg_info + msgs['opt suffix']) return if not version: if show_only: log.info(msgs['available'] % pkg_info) return try: have_version = version_getter(pkg_name) except AttributeError: raise RuntimeError(msgs['no version'] % pkg_info) if not have_version: if optional or show_only: log.warn(msgs['no version'] % pkg_info) else: raise RuntimeError(msgs['no version'] % pkg_info) elif checker(have_version) < checker(version): if optional or show_only: log.warn(msgs['version old'] % (pkg_info, have_version, version) + msgs['opt suffix']) else: raise RuntimeError(msgs['version old'] % (pkg_info, have_version, version)) elif show_only: log.info(msgs['version'] % (pkg_info, have_version))
def f2py_sources(self, sources, extension): new_sources = [] f2py_sources = [] f_sources = [] f2py_targets = {} target_dirs = [] ext_name = extension.name.split('.')[-1] skip_f2py = 0 for source in sources: (base, ext) = os.path.splitext(source) if ext == '.pyf': # F2PY interface file if self.inplace: target_dir = os.path.dirname(base) else: target_dir = appendpath(self.build_src, os.path.dirname(base)) if os.path.isfile(source): name = get_f2py_modulename(source) if name != ext_name: raise DistutilsSetupError( 'mismatch of extension names: %s ' 'provides %r but expected %r' % (source, name, ext_name)) target_file = os.path.join(target_dir, name + 'module.c') else: log.debug(' source %s does not exist: skipping f2py\'ing.' \ % (source)) name = ext_name skip_f2py = 1 target_file = os.path.join(target_dir, name + 'module.c') if not os.path.isfile(target_file): log.warn(' target %s does not exist:\n ' \ 'Assuming %smodule.c was generated with ' \ '"build_src --inplace" command.' \ % (target_file, name)) target_dir = os.path.dirname(base) target_file = os.path.join(target_dir, name + 'module.c') if not os.path.isfile(target_file): raise DistutilsSetupError("%r missing" % (target_file, )) log.info(' Yes! Using %r as up-to-date target.' \ % (target_file)) target_dirs.append(target_dir) f2py_sources.append(source) f2py_targets[source] = target_file new_sources.append(target_file) elif fortran_ext_match(ext): f_sources.append(source) else: new_sources.append(source) if not (f2py_sources or f_sources): return new_sources for d in target_dirs: self.mkpath(d) f2py_options = extension.f2py_options + self.f2py_opts if self.distribution.libraries: for name, build_info in self.distribution.libraries: if name in extension.libraries: f2py_options.extend(build_info.get('f2py_options', [])) log.info("f2py options: %s" % (f2py_options)) if f2py_sources: if len(f2py_sources) != 1: raise DistutilsSetupError( 'only one .pyf file is allowed per extension module but got' \ ' more: %r' % (f2py_sources,)) source = f2py_sources[0] target_file = f2py_targets[source] target_dir = os.path.dirname(target_file) or '.' depends = [source] + extension.depends if (self.force or newer_group(depends, target_file, 'newer')) \ and not skip_f2py: log.info("f2py: %s" % (source)) import numpy.f2py numpy.f2py.run_main(f2py_options + ['--build-dir', target_dir, source]) else: log.debug(" skipping '%s' f2py interface (up-to-date)" % (source)) else: #XXX TODO: --inplace support for sdist command if is_sequence(extension): name = extension[0] else: name = extension.name target_dir = os.path.join(*([self.build_src] + name.split('.')[:-1])) target_file = os.path.join(target_dir, ext_name + 'module.c') new_sources.append(target_file) depends = f_sources + extension.depends if (self.force or newer_group(depends, target_file, 'newer')) \ and not skip_f2py: log.info("f2py:> %s" % (target_file)) self.mkpath(target_dir) import numpy.f2py numpy.f2py.run_main(f2py_options + ['--lower', '--build-dir', target_dir] + \ ['-m', ext_name] + f_sources) else: log.debug(" skipping f2py fortran files for '%s' (up-to-date)" \ % (target_file)) if not os.path.isfile(target_file): raise DistutilsError("f2py target file %r not generated" % (target_file, )) build_dir = os.path.join(self.build_src, target_dir) target_c = os.path.join(build_dir, 'fortranobject.c') target_h = os.path.join(build_dir, 'fortranobject.h') log.info(" adding '%s' to sources." % (target_c)) new_sources.append(target_c) if build_dir not in extension.include_dirs: log.info(" adding '%s' to include_dirs." % (build_dir)) extension.include_dirs.append(build_dir) if not skip_f2py: import numpy.f2py d = os.path.dirname(numpy.f2py.__file__) source_c = os.path.join(d, 'src', 'fortranobject.c') source_h = os.path.join(d, 'src', 'fortranobject.h') if newer(source_c, target_c) or newer(source_h, target_h): self.mkpath(os.path.dirname(target_c)) self.copy_file(source_c, target_c) self.copy_file(source_h, target_h) else: if not os.path.isfile(target_c): raise DistutilsSetupError("f2py target_c file %r not found" % (target_c, )) if not os.path.isfile(target_h): raise DistutilsSetupError("f2py target_h file %r not found" % (target_h, )) for name_ext in ['-f2pywrappers.f', '-f2pywrappers2.f90']: filename = os.path.join(target_dir, ext_name + name_ext) if os.path.isfile(filename): log.info(" adding '%s' to sources." % (filename)) f_sources.append(filename) return new_sources + f_sources
def swig_sources(self, sources, extension): # Assuming SWIG 1.3.14 or later. See compatibility note in # http://www.swig.org/Doc1.3/Python.html#Python_nn6 new_sources = [] swig_sources = [] swig_targets = {} target_dirs = [] py_files = [] # swig generated .py files target_ext = '.c' if '-c++' in extension.swig_opts: typ = 'c++' is_cpp = True extension.swig_opts.remove('-c++') elif self.swig_cpp: typ = 'c++' is_cpp = True else: typ = None is_cpp = False skip_swig = 0 ext_name = extension.name.split('.')[-1] for source in sources: (base, ext) = os.path.splitext(source) if ext == '.i': # SWIG interface file # the code below assumes that the sources list # contains not more than one .i SWIG interface file if self.inplace: target_dir = os.path.dirname(base) py_target_dir = self.ext_target_dir else: target_dir = appendpath(self.build_src, os.path.dirname(base)) py_target_dir = target_dir if os.path.isfile(source): name = get_swig_modulename(source) if name != ext_name[1:]: raise DistutilsSetupError( 'mismatch of extension names: %s provides %r' ' but expected %r' % (source, name, ext_name[1:])) if typ is None: typ = get_swig_target(source) is_cpp = typ == 'c++' else: typ2 = get_swig_target(source) if typ2 is None: log.warn('source %r does not define swig target, assuming %s swig target' \ % (source, typ)) elif typ != typ2: log.warn('expected %r but source %r defines %r swig target' \ % (typ, source, typ2)) if typ2 == 'c++': log.warn( 'resetting swig target to c++ (some targets may have .c extension)' ) is_cpp = True else: log.warn( 'assuming that %r has c++ swig target' % (source)) if is_cpp: target_ext = '.cpp' target_file = os.path.join(target_dir, '%s_wrap%s' \ % (name, target_ext)) else: log.warn(' source %s does not exist: skipping swig\'ing.' \ % (source)) name = ext_name[1:] skip_swig = 1 target_file = _find_swig_target(target_dir, name) if not os.path.isfile(target_file): log.warn(' target %s does not exist:\n ' \ 'Assuming %s_wrap.{c,cpp} was generated with ' \ '"build_src --inplace" command.' \ % (target_file, name)) target_dir = os.path.dirname(base) target_file = _find_swig_target(target_dir, name) if not os.path.isfile(target_file): raise DistutilsSetupError("%r missing" % (target_file, )) log.warn(' Yes! Using %r as up-to-date target.' \ % (target_file)) target_dirs.append(target_dir) new_sources.append(target_file) py_files.append(os.path.join(py_target_dir, name + '.py')) swig_sources.append(source) swig_targets[source] = new_sources[-1] else: new_sources.append(source) if not swig_sources: return new_sources if skip_swig: return new_sources + py_files for d in target_dirs: self.mkpath(d) swig = self.swig or self.find_swig() swig_cmd = [swig, "-python"] + extension.swig_opts if is_cpp: swig_cmd.append('-c++') for d in extension.include_dirs: swig_cmd.append('-I' + d) for source in swig_sources: target = swig_targets[source] depends = [source] + extension.depends if self.force or newer_group(depends, target, 'newer'): log.info("%s: %s" % (os.path.basename(swig) \ + (is_cpp and '++' or ''), source)) self.spawn(swig_cmd + self.swig_opts \ + ["-o", target, '-outdir', py_target_dir, source]) else: log.debug(" skipping '%s' swig interface (up-to-date)" \ % (source)) return new_sources + py_files
def build_extension(self, ext): sources = ext.sources if sources is None or not is_sequence(sources): raise DistutilsSetupError( ("in 'ext_modules' option (extension '%s'), " + "'sources' must be present and must be " + "a list of source filenames") % ext.name) sources = list(sources) if not sources: return fullname = self.get_ext_fullname(ext.name) if self.inplace: modpath = fullname.split(".") package = ".".join(modpath[0:-1]) base = modpath[-1] build_py = self.get_finalized_command("build_py") package_dir = build_py.get_package_dir(package) ext_filename = os.path.join(package_dir, self.get_ext_filename(base)) else: ext_filename = os.path.join(self.build_lib, self.get_ext_filename(fullname)) depends = sources + ext.depends if not (self.force or newer_group(depends, ext_filename, "newer")): log.debug("skipping '%s' extension (up-to-date)", ext.name) return else: log.info("building '%s' extension", ext.name) extra_args = ext.extra_compile_args or [] macros = ext.define_macros[:] for undef in ext.undef_macros: macros.append((undef, )) c_sources, cxx_sources, f_sources, fmodule_sources = filter_sources( ext.sources) if self.compiler.compiler_type == "msvc": if cxx_sources: # Needed to compile kiva.agg._agg extension. extra_args.append("/Zm1000") # this hack works around the msvc compiler attributes # problem, msvc uses its own convention :( c_sources += cxx_sources cxx_sources = [] # Set Fortran/C++ compilers for compilation and linking. if ext.language == "f90": fcompiler = self._f90_compiler elif ext.language == "f77": fcompiler = self._f77_compiler else: # in case ext.language is c++, for instance fcompiler = self._f90_compiler or self._f77_compiler if fcompiler is not None: fcompiler.extra_f77_compile_args = (( ext.extra_f77_compile_args or []) if hasattr( ext, "extra_f77_compile_args") else []) fcompiler.extra_f90_compile_args = (( ext.extra_f90_compile_args or []) if hasattr( ext, "extra_f90_compile_args") else []) cxx_compiler = self._cxx_compiler # check for the availability of required compilers if cxx_sources and cxx_compiler is None: raise DistutilsError( "extension %r has C++ sourcesbut no C++ compiler found" % (ext.name)) if (f_sources or fmodule_sources) and fcompiler is None: raise DistutilsError( "extension %r has Fortran sources but no Fortran compiler found" % (ext.name)) if ext.language in ["f77", "f90"] and fcompiler is None: self.warn("extension %r has Fortran libraries " "but no Fortran linker found, using default linker" % (ext.name)) if ext.language == "c++" and cxx_compiler is None: self.warn("extension %r has C++ libraries " "but no C++ linker found, using default linker" % (ext.name)) kws = {"depends": ext.depends} output_dir = self.build_temp include_dirs = ext.include_dirs + get_numpy_include_dirs() c_objects = [] if c_sources: log.info("compiling C sources") c_objects = self.compiler.compile(c_sources, output_dir=output_dir, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args, **kws) if cxx_sources: log.info("compiling C++ sources") c_objects += cxx_compiler.compile(cxx_sources, output_dir=output_dir, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args, **kws) extra_postargs = [] f_objects = [] if fmodule_sources: log.info("compiling Fortran 90 module sources") module_dirs = ext.module_dirs[:] module_build_dir = os.path.join( self.build_temp, os.path.dirname(self.get_ext_filename(fullname))) self.mkpath(module_build_dir) if fcompiler.module_dir_switch is None: existing_modules = glob("*.mod") extra_postargs += fcompiler.module_options(module_dirs, module_build_dir) f_objects += fcompiler.compile( fmodule_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs, depends=ext.depends, ) if fcompiler.module_dir_switch is None: for f in glob("*.mod"): if f in existing_modules: continue t = os.path.join(module_build_dir, f) if os.path.abspath(f) == os.path.abspath(t): continue if os.path.isfile(t): os.remove(t) try: self.move_file(f, module_build_dir) except DistutilsFileError: log.warn("failed to move %r to %r" % (f, module_build_dir)) if f_sources: log.info("compiling Fortran sources") f_objects += fcompiler.compile( f_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs, depends=ext.depends, ) if f_objects and not fcompiler.can_ccompiler_link(self.compiler): unlinkable_fobjects = f_objects objects = c_objects else: unlinkable_fobjects = [] objects = c_objects + f_objects if ext.extra_objects: objects.extend(ext.extra_objects) extra_args = ext.extra_link_args or [] libraries = self.get_libraries(ext)[:] library_dirs = ext.library_dirs[:] linker = self.compiler.link_shared_object # Always use system linker when using MSVC compiler. if self.compiler.compiler_type in ("msvc", "intelw", "intelemw"): # expand libraries with fcompiler libraries as we are # not using fcompiler linker self._libs_with_msvc_and_fortran(fcompiler, libraries, library_dirs) elif ext.language in ["f77", "f90"] and fcompiler is not None: linker = fcompiler.link_shared_object if ext.language == "c++" and cxx_compiler is not None: linker = cxx_compiler.link_shared_object if fcompiler is not None: objects, libraries = self._process_unlinkable_fobjects( objects, libraries, fcompiler, library_dirs, unlinkable_fobjects) linker( objects, ext_filename, libraries=libraries, library_dirs=library_dirs, runtime_library_dirs=ext.runtime_library_dirs, extra_postargs=extra_args, export_symbols=self.get_export_symbols(ext), debug=self.debug, build_temp=self.build_temp, target_lang=ext.language, )
def package_check(pkg_name, version=None, optional=False, checker=LooseVersion, version_getter=None, messages=None): """ Check if package `pkg_name` is present, and correct version Parameters ---------- pkg_name : str or sequence of str name of package as imported into python. Alternative names (e.g. for different versions) may be given in a list. version : {None, str}, optional minimum version of the package that we require. If None, we don't check the version. Default is None optional : {False, True}, optional If False, raise error for absent package or wrong version; otherwise warn checker : callable, optional callable with which to return comparable thing from version string. Default is ``distutils.version.LooseVersion`` version_getter : {None, callable}: Callable that takes `pkg_name` as argument, and returns the package version string - as in:: ``version = version_getter(pkg_name)`` If None, equivalent to:: mod = __import__(pkg_name); version = mod.__version__`` messages : None or dict, optional dictionary giving output messages """ if version_getter is None: def version_getter(pkg_name): mod = __import__(pkg_name) return mod.__version__ if messages is None: messages = {} msgs = { 'missing': 'Cannot import package "%s" - is it installed?', 'missing opt': 'Missing optional package "%s"', 'opt suffix': '; you may get run-time errors', 'version too old': 'You have version %s of package "%s"' ' but we need version >= %s', 'no version': 'cannot determine version of %s!', } msgs.update(messages) if isinstance(pkg_name, str): names = [pkg_name] else: names = pkg_name import_ok = False for pkg_name in names: try: __import__(pkg_name) except ImportError: pass else: import_ok = True if not import_ok: if not optional: raise RuntimeError(msgs['missing'] % pkg_name) log.warn(msgs['missing opt'] % pkg_name + msgs['opt suffix']) return if not version: return try: have_version = version_getter(pkg_name) except AttributeError: raise RuntimeError('Cannot find version for %s' % pkg_name) if not have_version: if optional: log.warn(msgs['no version'] % pkg_name) else: raise RuntimeError(msgs['no version'] % pkg_name) elif checker(have_version) < checker(version): if optional: log.warn(msgs['version too old'] % (have_version, pkg_name, version) + msgs['opt suffix']) else: raise RuntimeError(msgs['version too old'] % (have_version, pkg_name, version))
def _exec_command(command, use_shell=None, use_tee=None, **env): log.debug('_exec_command(...)') if use_shell is None: use_shell = os.name == 'posix' if use_tee is None: use_tee = os.name == 'posix' using_command = 0 if use_shell: # We use shell (unless use_shell==0) so that wildcards can be # used. sh = os.environ.get('SHELL', '/bin/sh') if is_sequence(command): argv = [sh, '-c', ' '.join(list(command))] else: argv = [sh, '-c', command] else: # On NT, DOS we avoid using command.com as it's exit status is # not related to the exit status of a command. if is_sequence(command): argv = command[:] else: argv = shlex.split(command) if hasattr(os, 'spawnvpe'): spawn_command = os.spawnvpe else: spawn_command = os.spawnve argv[0] = find_executable(argv[0]) or argv[0] if not os.path.isfile(argv[0]): log.warn('Executable %s does not exist' % (argv[0])) if os.name in ['nt', 'dos']: # argv[0] might be internal command argv = [os.environ['COMSPEC'], '/C'] + argv using_command = 1 _so_has_fileno = _supports_fileno(sys.stdout) _se_has_fileno = _supports_fileno(sys.stderr) so_flush = sys.stdout.flush se_flush = sys.stderr.flush if _so_has_fileno: so_fileno = sys.stdout.fileno() so_dup = os.dup(so_fileno) if _se_has_fileno: se_fileno = sys.stderr.fileno() se_dup = os.dup(se_fileno) outfile = temp_file_name() fout = open(outfile, 'w') if using_command: errfile = temp_file_name() ferr = open(errfile, 'w') log.debug('Running %s(%s,%r,%r,os.environ)' \ % (spawn_command.__name__, os.P_WAIT, argv[0], argv)) argv0 = argv[0] if not using_command: argv[0] = quote_arg(argv0) so_flush() se_flush() if _so_has_fileno: os.dup2(fout.fileno(), so_fileno) if _se_has_fileno: if using_command: #XXX: disabled for now as it does not work from cmd under win32. # Tests fail on msys os.dup2(ferr.fileno(), se_fileno) else: os.dup2(fout.fileno(), se_fileno) try: status = spawn_command(os.P_WAIT, argv0, argv, os.environ) except OSError: errmess = str(get_exception()) status = 999 sys.stderr.write('%s: %s' % (errmess, argv[0])) so_flush() se_flush() if _so_has_fileno: os.dup2(so_dup, so_fileno) if _se_has_fileno: os.dup2(se_dup, se_fileno) fout.close() fout = open_latin1(outfile, 'r') text = fout.read() fout.close() os.remove(outfile) if using_command: ferr.close() ferr = open_latin1(errfile, 'r') errmess = ferr.read() ferr.close() os.remove(errfile) if errmess and not status: # Not sure how to handle the case where errmess # contains only warning messages and that should # not be treated as errors. #status = 998 if text: text = text + '\n' #text = '%sCOMMAND %r FAILED: %s' %(text,command,errmess) text = text + errmess print(errmess) if text[-1:] == '\n': text = text[:-1] if status is None: status = 0 if use_tee: print(text) return status, text
def build_extension(self, ext): sources = ext.sources if sources is None or not is_sequence(sources): raise DistutilsSetupError( ("in 'ext_modules' option (extension '%s'), " + "'sources' must be present and must be " + "a list of source filenames") % ext.name) sources = list(sources) if not sources: return fullname = self.get_ext_fullname(ext.name) if self.inplace: modpath = fullname.split('.') package = '.'.join(modpath[0:-1]) base = modpath[-1] build_py = self.get_finalized_command('build_py') package_dir = build_py.get_package_dir(package) ext_filename = os.path.join(package_dir, self.get_ext_filename(base)) else: ext_filename = os.path.join(self.build_lib, self.get_ext_filename(fullname)) depends = sources + ext.depends if not (self.force or newer_group(depends, ext_filename, 'newer')): log.debug("skipping '%s' extension (up-to-date)", ext.name) return else: log.info("building '%s' extension", ext.name) extra_args = ext.extra_compile_args or [] cxx_extra_args = ext.extra_compile_args or [] extra_link_args = ext.extra_link_args or [] c = os.path.basename(self.compiler.compiler[0]) cxx = os.path.basename(self.compiler.compiler_cxx[0]) if None in copt: extra_args += copt[None] if c in copt: extra_args += copt[c] if None in cxxopt: cxx_extra_args += cxxopt[None] if cxx in cxxopt: cxx_extra_args += cxxopt[cxx] if None in lopt: extra_link_args += lopt[None] if c in lopt: extra_link_args += lopt[c] if cxx in lopt: extra_link_args += lopt[cxx] macros = ext.define_macros[:] for undef in ext.undef_macros: macros.append((undef, )) c_sources, cxx_sources, f_sources, fmodule_sources = \ filter_sources(ext.sources) if self.compiler.compiler_type == 'msvc': if cxx_sources: # Needed to compile kiva.agg._agg extension. cxx_extra_args.append('/Zm1000') # this hack works around the msvc compiler attributes # problem, msvc uses its own convention :( c_sources += cxx_sources cxx_sources = [] # Set Fortran/C++ compilers for compilation and linking. if ext.language == 'f90': fcompiler = self._f90_compiler elif ext.language == 'f77': fcompiler = self._f77_compiler else: # in case ext.language is c++, for instance fcompiler = self._f90_compiler or self._f77_compiler if fcompiler is not None: fcompiler.extra_f77_compile_args = ( ext.extra_f77_compile_args or []) if hasattr( ext, 'extra_f77_compile_args') else [] fcompiler.extra_f90_compile_args = ( ext.extra_f90_compile_args or []) if hasattr( ext, 'extra_f90_compile_args') else [] cxx_compiler = self._cxx_compiler # check for the availability of required compilers if cxx_sources and cxx_compiler is None: raise DistutilsError("extension %r has C++ sources" \ "but no C++ compiler found" % (ext.name)) if (f_sources or fmodule_sources) and fcompiler is None: raise DistutilsError("extension %r has Fortran sources " \ "but no Fortran compiler found" % (ext.name)) if ext.language in ['f77', 'f90'] and fcompiler is None: self.warn("extension %r has Fortran libraries " \ "but no Fortran linker found, using default linker" % (ext.name)) if ext.language == 'c++' and cxx_compiler is None: self.warn("extension %r has C++ libraries " \ "but no C++ linker found, using default linker" % (ext.name)) kws = {'depends': ext.depends} output_dir = self.build_temp include_dirs = ext.include_dirs + get_numpy_include_dirs() c_objects = [] if c_sources: log.info("compiling C sources with arguments %r", extra_args) c_objects = self.compiler.compile(c_sources, output_dir=output_dir, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args, **kws) if cxx_sources: log.info("compiling C++ sources with arguments %r", cxx_extra_args) c_objects += cxx_compiler.compile(cxx_sources, output_dir=output_dir, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=cxx_extra_args, **kws) extra_postargs = [] f_objects = [] if fmodule_sources: log.info("compiling Fortran 90 module sources") module_dirs = ext.module_dirs[:] module_build_dir = os.path.join( self.build_temp, os.path.dirname(self.get_ext_filename(fullname))) self.mkpath(module_build_dir) if fcompiler.module_dir_switch is None: existing_modules = glob('*.mod') extra_postargs += fcompiler.module_options(module_dirs, module_build_dir) f_objects += fcompiler.compile(fmodule_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs, depends=ext.depends) if fcompiler.module_dir_switch is None: for f in glob('*.mod'): if f in existing_modules: continue t = os.path.join(module_build_dir, f) if os.path.abspath(f) == os.path.abspath(t): continue if os.path.isfile(t): os.remove(t) try: self.move_file(f, module_build_dir) except DistutilsFileError: log.warn('failed to move %r to %r' % (f, module_build_dir)) if f_sources: log.info("compiling Fortran sources") f_objects += fcompiler.compile(f_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs, depends=ext.depends) objects = c_objects + f_objects if ext.extra_objects: objects.extend(ext.extra_objects) libraries = self.get_libraries(ext)[:] library_dirs = ext.library_dirs[:] linker = self.compiler.link_shared_object # Always use system linker when using MSVC compiler. if self.compiler.compiler_type == 'msvc': # expand libraries with fcompiler libraries as we are # not using fcompiler linker self._libs_with_msvc_and_fortran(fcompiler, libraries, library_dirs) elif ext.language in ['f77', 'f90'] and fcompiler is not None: linker = fcompiler.link_shared_object if ext.language == 'c++' and cxx_compiler is not None: linker = cxx_compiler.link_shared_object if sys.version[:3] >= '2.3': kws = {'target_lang': ext.language} else: kws = {} linker(objects, ext_filename, libraries=libraries, library_dirs=library_dirs, runtime_library_dirs=ext.runtime_library_dirs, extra_postargs=extra_link_args, export_symbols=self.get_export_symbols(ext), debug=self.debug, build_temp=self.build_temp, **kws)
# I took one version in my SxS directory: no idea if it is the good # one, and we can't retrieve it from python _MSVCRVER_TO_FULLVER['80'] = "8.0.50727.42" _MSVCRVER_TO_FULLVER['90'] = "9.0.21022.8" # Value from msvcrt.CRT_ASSEMBLY_VERSION under Python 3.3.0 # on Windows XP: _MSVCRVER_TO_FULLVER['100'] = "10.0.30319.460" if hasattr(msvcrt, "CRT_ASSEMBLY_VERSION"): major, minor, rest = msvcrt.CRT_ASSEMBLY_VERSION.split(".", 2) _MSVCRVER_TO_FULLVER[major + minor] = msvcrt.CRT_ASSEMBLY_VERSION del major, minor, rest except ImportError: # If we are here, means python was not built with MSVC. Not sure what # to do in that case: manifest building will fail, but it should not be # used in that case anyway log.warn('Cannot import msvcrt: using manifest will not be possible') def msvc_manifest_xml(maj, min): """Given a major and minor version of the MSVCR, returns the corresponding XML file.""" try: fullver = _MSVCRVER_TO_FULLVER[str(maj * 10 + min)] except KeyError: raise ValueError("Version %d,%d of MSVCRT not supported yet" % (maj, min)) # Don't be fooled, it looks like an XML, but it is not. In particular, it # should not have any space before starting, and its size should be # divisible by 4, most likely for alignement constraints when the xml is # embedded in the binary... # This template was copied directly from the python 2.6 binary (using
def build_a_library(self, build_info, lib_name, libraries): # default compilers compiler = self.compiler fcompiler = self._f_compiler sources = build_info.get('sources') if sources is None or not is_sequence(sources): raise DistutilsSetupError( ("in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames") % lib_name) sources = list(sources) c_sources, cxx_sources, f_sources, fmodule_sources \ = filter_sources(sources) requiref90 = not not fmodule_sources or \ build_info.get('language', 'c') == 'f90' # save source type information so that build_ext can use it. source_languages = [] if c_sources: source_languages.append('c') if cxx_sources: source_languages.append('c++') if requiref90: source_languages.append('f90') elif f_sources: source_languages.append('f77') build_info['source_languages'] = source_languages lib_file = compiler.library_filename(lib_name, output_dir=self.build_clib) depends = sources + build_info.get('depends', []) force_rebuild = self.force if not self.disable_optimization and not self.compiler_opt.is_cached(): log.debug("Detected changes on compiler optimizations") force_rebuild = True if not (force_rebuild or newer_group(depends, lib_file, 'newer')): log.debug("skipping '%s' library (up-to-date)", lib_name) return else: log.info("building '%s' library", lib_name) config_fc = build_info.get('config_fc', {}) if fcompiler is not None and config_fc: log.info('using additional config_fc from setup script ' 'for fortran compiler: %s' % (config_fc, )) from numpy.distutils.fcompiler import new_fcompiler fcompiler = new_fcompiler(compiler=fcompiler.compiler_type, verbose=self.verbose, dry_run=self.dry_run, force=self.force, requiref90=requiref90, c_compiler=self.compiler) if fcompiler is not None: dist = self.distribution base_config_fc = dist.get_option_dict('config_fc').copy() base_config_fc.update(config_fc) fcompiler.customize(base_config_fc) # check availability of Fortran compilers if (f_sources or fmodule_sources) and fcompiler is None: raise DistutilsError("library %s has Fortran sources" " but no Fortran compiler found" % (lib_name)) if fcompiler is not None: fcompiler.extra_f77_compile_args = build_info.get( 'extra_f77_compile_args') or [] fcompiler.extra_f90_compile_args = build_info.get( 'extra_f90_compile_args') or [] macros = build_info.get('macros') if macros is None: macros = [] include_dirs = build_info.get('include_dirs') if include_dirs is None: include_dirs = [] extra_postargs = build_info.get('extra_compiler_args') or [] include_dirs.extend(get_numpy_include_dirs()) # where compiled F90 module files are: module_dirs = build_info.get('module_dirs') or [] module_build_dir = os.path.dirname(lib_file) if requiref90: self.mkpath(module_build_dir) if compiler.compiler_type == 'msvc': # this hack works around the msvc compiler attributes # problem, msvc uses its own convention :( c_sources += cxx_sources cxx_sources = [] # filtering C dispatch-table sources when optimization is not disabled, # otherwise treated as normal sources. copt_c_sources = [] copt_cxx_sources = [] copt_baseline_flags = [] copt_macros = [] if not self.disable_optimization: bsrc_dir = self.get_finalized_command("build_src").build_src dispatch_hpath = os.path.join("numpy", "distutils", "include") dispatch_hpath = os.path.join(bsrc_dir, dispatch_hpath) include_dirs.append(dispatch_hpath) copt_build_src = None if self.inplace else bsrc_dir for _srcs, _dst, _ext in (((c_sources, ), copt_c_sources, ('.dispatch.c', )), ((c_sources, cxx_sources), copt_cxx_sources, ('.dispatch.cpp', '.dispatch.cxx'))): for _src in _srcs: _dst += [ _src.pop(_src.index(s)) for s in _src[:] if s.endswith(_ext) ] copt_baseline_flags = self.compiler_opt.cpu_baseline_flags() else: copt_macros.append(("NPY_DISABLE_OPTIMIZATION", 1)) objects = [] if copt_cxx_sources: log.info("compiling C++ dispatch-able sources") objects += self.compiler_opt.try_dispatch( copt_c_sources, output_dir=self.build_temp, src_dir=copt_build_src, macros=macros + copt_macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs, ccompiler=cxx_compiler) if copt_c_sources: log.info("compiling C dispatch-able sources") objects += self.compiler_opt.try_dispatch( copt_c_sources, output_dir=self.build_temp, src_dir=copt_build_src, macros=macros + copt_macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) if c_sources: log.info("compiling C sources") objects += compiler.compile(c_sources, output_dir=self.build_temp, macros=macros + copt_macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs + copt_baseline_flags) if cxx_sources: log.info("compiling C++ sources") cxx_compiler = compiler.cxx_compiler() cxx_objects = cxx_compiler.compile(cxx_sources, output_dir=self.build_temp, macros=macros + copt_macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs + copt_baseline_flags) objects.extend(cxx_objects) if f_sources or fmodule_sources: extra_postargs = [] f_objects = [] if requiref90: if fcompiler.module_dir_switch is None: existing_modules = glob('*.mod') extra_postargs += fcompiler.module_options( module_dirs, module_build_dir) if fmodule_sources: log.info("compiling Fortran 90 module sources") f_objects += fcompiler.compile(fmodule_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) if requiref90 and self._f_compiler.module_dir_switch is None: # move new compiled F90 module files to module_build_dir for f in glob('*.mod'): if f in existing_modules: continue t = os.path.join(module_build_dir, f) if os.path.abspath(f) == os.path.abspath(t): continue if os.path.isfile(t): os.remove(t) try: self.move_file(f, module_build_dir) except DistutilsFileError: log.warn('failed to move %r to %r' % (f, module_build_dir)) if f_sources: log.info("compiling Fortran sources") f_objects += fcompiler.compile(f_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) else: f_objects = [] if f_objects and not fcompiler.can_ccompiler_link(compiler): # Default linker cannot link Fortran object files, and results # need to be wrapped later. Instead of creating a real static # library, just keep track of the object files. listfn = os.path.join(self.build_clib, lib_name + '.fobjects') with open(listfn, 'w') as f: f.write("\n".join(os.path.abspath(obj) for obj in f_objects)) listfn = os.path.join(self.build_clib, lib_name + '.cobjects') with open(listfn, 'w') as f: f.write("\n".join(os.path.abspath(obj) for obj in objects)) # create empty "library" file for dependency tracking lib_fname = os.path.join(self.build_clib, lib_name + compiler.static_lib_extension) with open(lib_fname, 'wb') as f: pass else: # assume that default linker is suitable for # linking Fortran object files objects.extend(f_objects) compiler.create_static_lib(objects, lib_name, output_dir=self.build_clib, debug=self.debug) # fix library dependencies clib_libraries = build_info.get('libraries', []) for lname, binfo in libraries: if lname in clib_libraries: clib_libraries.extend(binfo.get('libraries', [])) if clib_libraries: build_info['libraries'] = clib_libraries
def CCompiler_customize(self, dist, need_cxx=0): """ Do any platform-specific customization of a compiler instance. This method calls `distutils.sysconfig.customize_compiler` for platform-specific customization, as well as optionally remove a flag to suppress spurious warnings in case C++ code is being compiled. Parameters ---------- dist : object This parameter is not used for anything. need_cxx : bool, optional Whether or not C++ has to be compiled. If so (True), the ``"-Wstrict-prototypes"`` option is removed to prevent spurious warnings. Default is False. Returns ------- None Notes ----- All the default options used by distutils can be extracted with:: from distutils import sysconfig sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'BASECFLAGS', 'CCSHARED', 'LDSHARED', 'SO') """ # See FCompiler.customize for suggested usage. log.info('customize %s' % (self.__class__.__name__)) customize_compiler(self) if need_cxx: # In general, distutils uses -Wstrict-prototypes, but this option is # not valid for C++ code, only for C. Remove it if it's there to # avoid a spurious warning on every compilation. try: self.compiler_so.remove('-Wstrict-prototypes') except (AttributeError, ValueError): pass if hasattr(self, 'compiler') and 'cc' in self.compiler[0]: if not self.compiler_cxx: if self.compiler[0].startswith('gcc'): a, b = 'gcc', 'g++' else: a, b = 'cc', 'c++' self.compiler_cxx = [self.compiler[0].replace(a, b)]\ + self.compiler[1:] else: if hasattr(self, 'compiler'): log.warn("#### %s #######" % (self.compiler,)) if not hasattr(self, 'compiler_cxx'): log.warn('Missing compiler_cxx fix for ' + self.__class__.__name__) # check if compiler supports gcc style automatic dependencies # run on every extension so skip for known good compilers if hasattr(self, 'compiler') and ('gcc' in self.compiler[0] or 'g++' in self.compiler[0] or 'clang' in self.compiler[0]): self._auto_depends = True elif os.name == 'posix': import tempfile import shutil tmpdir = tempfile.mkdtemp() try: fn = os.path.join(tmpdir, "file.c") with open(fn, "w") as f: f.write("int a;\n") self.compile([fn], output_dir=tmpdir, extra_preargs=['-MMD', '-MF', fn + '.d']) self._auto_depends = True except CompileError: self._auto_depends = False finally: shutil.rmtree(tmpdir) return
def _exec_command(command, use_shell=None, use_tee=None, **env): log.debug('_exec_command(...)') if use_shell is None: use_shell = os.name == 'posix' if use_tee is None: use_tee = os.name == 'posix' using_command = 0 if use_shell: # We use shell (unless use_shell==0) so that wildcards can be # used. sh = os.environ.get('SHELL', '/bin/sh') if is_sequence(command): argv = [sh, '-c', ' '.join(list(command))] else: argv = [sh, '-c', command] else: # On NT, DOS we avoid using command.com as it's exit status is # not related to the exit status of a command. if is_sequence(command): argv = command[:] else: argv = shlex.split(command) if hasattr(os, 'spawnvpe'): spawn_command = os.spawnvpe else: spawn_command = os.spawnve argv[0] = find_executable(argv[0]) or argv[0] if not os.path.isfile(argv[0]): log.warn('Executable %s does not exist' % (argv[0])) if os.name in ['nt', 'dos']: # argv[0] might be internal command argv = [os.environ['COMSPEC'], '/C'] + argv using_command = 1 _so_has_fileno = _supports_fileno(sys.stdout) _se_has_fileno = _supports_fileno(sys.stderr) so_flush = sys.stdout.flush se_flush = sys.stderr.flush if _so_has_fileno: so_fileno = sys.stdout.fileno() so_dup = os.dup(so_fileno) if _se_has_fileno: se_fileno = sys.stderr.fileno() se_dup = os.dup(se_fileno) outfile = temp_file_name() fout = open(outfile, 'w') if using_command: errfile = temp_file_name() ferr = open(errfile, 'w') log.debug('Running %s(%s,%r,%r,os.environ)' \ % (spawn_command.__name__, os.P_WAIT, argv[0], argv)) if sys.version_info[0] >= 3 and os.name == 'nt': # Pre-encode os.environ, discarding un-encodable entries, # to avoid it failing during encoding as part of spawn. Failure # is possible if the environment contains entries that are not # encoded using the system codepage as windows expects. # # This is not necessary on unix, where os.environ is encoded # using the surrogateescape error handler and decoded using # it as part of spawn. encoded_environ = {} for k, v in os.environ.items(): try: encoded_environ[k.encode( sys.getfilesystemencoding())] = v.encode( sys.getfilesystemencoding()) except UnicodeEncodeError: log.debug("ignoring un-encodable env entry %s", k) else: encoded_environ = os.environ argv0 = argv[0] if not using_command: argv[0] = quote_arg(argv0) so_flush() se_flush() if _so_has_fileno: os.dup2(fout.fileno(), so_fileno) if _se_has_fileno: if using_command: #XXX: disabled for now as it does not work from cmd under win32. # Tests fail on msys os.dup2(ferr.fileno(), se_fileno) else: os.dup2(fout.fileno(), se_fileno) try: status = spawn_command(os.P_WAIT, argv0, argv, encoded_environ) except Exception: errmess = str(get_exception()) status = 999 sys.stderr.write('%s: %s' % (errmess, argv[0])) so_flush() se_flush() if _so_has_fileno: os.dup2(so_dup, so_fileno) os.close(so_dup) if _se_has_fileno: os.dup2(se_dup, se_fileno) os.close(se_dup) fout.close() fout = open_latin1(outfile, 'r') text = fout.read() fout.close() os.remove(outfile) if using_command: ferr.close() ferr = open_latin1(errfile, 'r') errmess = ferr.read() ferr.close() os.remove(errfile) if errmess and not status: # Not sure how to handle the case where errmess # contains only warning messages and that should # not be treated as errors. #status = 998 if text: text = text + '\n' #text = '%sCOMMAND %r FAILED: %s' %(text,command,errmess) text = text + errmess print(errmess) if text[-1:] == '\n': text = text[:-1] if status is None: status = 0 if use_tee: print(text) return status, text
def swig_sources(self, sources, extension): # Assuming SWIG 1.3.14 or later. See compatibility note in # http://www.swig.org/Doc1.3/Python.html#Python_nn6 new_sources = [] swig_sources = [] swig_targets = {} target_dirs = [] py_files = [] # swig generated .py files target_ext = ".c" if self.swig_cpp: typ = "c++" is_cpp = True else: typ = None is_cpp = False skip_swig = 0 ext_name = extension.name.split(".")[-1] for source in sources: (base, ext) = os.path.splitext(source) if ext == ".i": # SWIG interface file if self.inplace: target_dir = os.path.dirname(base) py_target_dir = self.ext_target_dir else: target_dir = appendpath(self.build_src, os.path.dirname(base)) py_target_dir = target_dir if os.path.isfile(source): name = get_swig_modulename(source) # if name != ext_name: # raise DistutilsSetupError( # 'mismatch of extension names: %s provides %r' # ' but expected %r' % (source, name, ext_name)) if typ is None: typ = get_swig_target(source) is_cpp = typ == "c++" if is_cpp: target_ext = ".cpp" else: typ2 = get_swig_target(source) if typ != typ2: log.warn( "expected %r but source %r defines %r swig target" % (typ, source, typ2)) if typ2 == "c++": log.warn( "resetting swig target to c++ (some targets may have .c extension)" ) is_cpp = True target_ext = ".cpp" else: log.warn("assuming that %r has c++ swig target" % (source)) target_file = os.path.join(target_dir, "%s_wrap%s" % (name, target_ext)) else: log.warn(" source %s does not exist: skipping swig'ing." % (source)) name = ext_name skip_swig = 1 target_file = _find_swig_target(target_dir, name) if not os.path.isfile(target_file): log.warn(( "target {} does not exist:\n" + "Assuming {}_wrap.{c,cpp} was generated with 'build_src --inplace' command." ).format(target_file, name)) target_dir = os.path.dirname(base) target_file = _find_swig_target(target_dir, name) if not os.path.isfile(target_file): raise DistutilsSetupError("%r missing" % (target_file, )) log.warn(" Yes! Using %r as up-to-date target." % (target_file)) target_dirs.append(target_dir) new_sources.append(target_file) # py_files.append(os.path.join(py_target_dir, name+'.py')) swig_sources.append(source) swig_targets[source] = new_sources[-1] else: new_sources.append(source) if not swig_sources: return new_sources if skip_swig: return new_sources + py_files for d in target_dirs: self.mkpath(d) swig = self.swig or self.find_swig() swig_cmd = [swig, "-python"] + extension.swig_opts if is_cpp: swig_cmd.append("-c++") for d in extension.include_dirs: swig_cmd.append("-I" + d) for source in swig_sources: target = swig_targets[source] depends = [source] + extension.depends if self.force or newer_group(depends, target, "newer"): log.info("%s: %s" % (os.path.basename(swig) + (is_cpp and "++" or ""), source)) self.spawn(swig_cmd + self.swig_opts + ["-o", target, "-outdir", py_target_dir, source]) else: log.debug(" skipping '%s' swig interface (up-to-date)" % (source)) return new_sources + py_files
def finalize_options(self): self.set_undefined_options( "build", ("build_base", "build_base"), ("build_lib", "build_lib"), ("force", "force"), ) if self.package is None: self.package = self.distribution.ext_package self.extensions = self.distribution.ext_modules self.libraries = self.distribution.libraries or [] self.py_modules = self.distribution.py_modules or [] self.data_files = self.distribution.data_files or [] if self.build_src is None: plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3]) self.build_src = os.path.join(self.build_base, "src" + plat_specifier) # py_modules_dict is used in build_py.find_package_modules self.py_modules_dict = {} if self.f2pyflags: if self.f2py_opts: log.warn("ignoring --f2pyflags as --f2py-opts already used") else: self.f2py_opts = self.f2pyflags self.f2pyflags = None if self.f2py_opts is None: self.f2py_opts = [] else: self.f2py_opts = shlex.split(self.f2py_opts) if self.swigflags: if self.swig_opts: log.warn("ignoring --swigflags as --swig-opts already used") else: self.swig_opts = self.swigflags self.swigflags = None if self.swig_opts is None: self.swig_opts = [] else: self.swig_opts = shlex.split(self.swig_opts) # use options from build_ext command build_ext = self.get_finalized_command("build_ext") if self.inplace is None: self.inplace = build_ext.inplace if self.swig_cpp is None: self.swig_cpp = build_ext.swig_cpp for c in ["swig", "swig_opt"]: o = "--" + c.replace("_", "-") v = getattr(build_ext, c, None) if v: if getattr(self, c): log.warn("both build_src and build_ext define %s option" % (o)) else: log.info('using "%s=%s" option from build_ext command' % (o, v)) setattr(self, c, v)
def run(self): if not self.extensions: return # Make sure that extension sources are complete. self.run_command('build_src') if self.distribution.has_c_libraries(): if self.inplace: if self.distribution.have_run.get('build_clib'): log.warn('build_clib already run, it is too late to ' \ 'ensure in-place build of build_clib') build_clib = self.distribution.get_command_obj( 'build_clib') else: build_clib = self.distribution.get_command_obj( 'build_clib') build_clib.inplace = 1 build_clib.ensure_finalized() build_clib.run() self.distribution.have_run['build_clib'] = 1 else: self.run_command('build_clib') build_clib = self.get_finalized_command('build_clib') self.library_dirs.append(build_clib.build_clib) else: build_clib = None # Not including C libraries to the list of # extension libraries automatically to prevent # bogus linking commands. Extensions must # explicitly specify the C libraries that they use. from distutils.ccompiler import new_compiler from numpy.distutils.fcompiler import new_fcompiler compiler_type = self.compiler # Initialize C compiler: self.compiler = new_compiler(compiler=compiler_type, verbose=self.verbose, dry_run=self.dry_run, force=self.force) self.compiler.customize(self.distribution) self.compiler.customize_cmd(self) self.compiler.show_customization() # Create mapping of libraries built by build_clib: clibs = {} if build_clib is not None: for libname, build_info in build_clib.libraries or []: if libname in clibs and clibs[libname] != build_info: log.warn('library %r defined more than once,'\ ' overwriting build_info\n%s... \nwith\n%s...' \ % (libname, repr(clibs[libname])[:300], repr(build_info)[:300])) clibs[libname] = build_info # .. and distribution libraries: for libname, build_info in self.distribution.libraries or []: if libname in clibs: # build_clib libraries have a precedence before distribution ones continue clibs[libname] = build_info # Determine if C++/Fortran 77/Fortran 90 compilers are needed. # Update extension libraries, library_dirs, and macros. all_languages = set() for ext in self.extensions: ext_languages = set() c_libs = [] c_lib_dirs = [] macros = [] for libname in ext.libraries: if libname in clibs: binfo = clibs[libname] c_libs += binfo.get('libraries', []) c_lib_dirs += binfo.get('library_dirs', []) for m in binfo.get('macros', []): if m not in macros: macros.append(m) for l in clibs.get(libname, {}).get('source_languages', []): ext_languages.add(l) if c_libs: new_c_libs = ext.libraries + c_libs log.info('updating extension %r libraries from %r to %r' % (ext.name, ext.libraries, new_c_libs)) ext.libraries = new_c_libs ext.library_dirs = ext.library_dirs + c_lib_dirs if macros: log.info('extending extension %r defined_macros with %r' % (ext.name, macros)) ext.define_macros = ext.define_macros + macros # determine extension languages if has_f_sources(ext.sources): ext_languages.add('f77') if has_cxx_sources(ext.sources): ext_languages.add('c++') l = ext.language or self.compiler.detect_language(ext.sources) if l: ext_languages.add(l) # reset language attribute for choosing proper linker if 'c++' in ext_languages: ext_language = 'c++' elif 'f90' in ext_languages: ext_language = 'f90' elif 'f77' in ext_languages: ext_language = 'f77' else: ext_language = 'c' # default if l and l != ext_language and ext.language: log.warn('resetting extension %r language from %r to %r.' % (ext.name, l, ext_language)) ext.language = ext_language # global language all_languages.update(ext_languages) need_f90_compiler = 'f90' in all_languages need_f77_compiler = 'f77' in all_languages need_cxx_compiler = 'c++' in all_languages # Initialize C++ compiler: if need_cxx_compiler: self._cxx_compiler = new_compiler(compiler=compiler_type, verbose=self.verbose, dry_run=self.dry_run, force=self.force) compiler = self._cxx_compiler compiler.customize(self.distribution, need_cxx=need_cxx_compiler) compiler.customize_cmd(self) compiler.show_customization() self._cxx_compiler = compiler.cxx_compiler() else: self._cxx_compiler = None # Initialize Fortran 77 compiler: if need_f77_compiler: ctype = self.fcompiler self._f77_compiler = new_fcompiler(compiler=self.fcompiler, verbose=self.verbose, dry_run=self.dry_run, force=self.force, requiref90=False, c_compiler=self.compiler) fcompiler = self._f77_compiler if fcompiler: ctype = fcompiler.compiler_type fcompiler.customize(self.distribution) if fcompiler and fcompiler.get_version(): fcompiler.customize_cmd(self) fcompiler.show_customization() else: self.warn('f77_compiler=%s is not available.' % (ctype)) self._f77_compiler = None else: self._f77_compiler = None # Initialize Fortran 90 compiler: if need_f90_compiler: ctype = self.fcompiler self._f90_compiler = new_fcompiler(compiler=self.fcompiler, verbose=self.verbose, dry_run=self.dry_run, force=self.force, requiref90=True, c_compiler=self.compiler) fcompiler = self._f90_compiler if fcompiler: ctype = fcompiler.compiler_type fcompiler.customize(self.distribution) if fcompiler and fcompiler.get_version(): fcompiler.customize_cmd(self) fcompiler.show_customization() else: self.warn('f90_compiler=%s is not available.' % (ctype)) self._f90_compiler = None else: self._f90_compiler = None # Build extensions self.build_extensions()
def create_extensions(): try: from Cython.Build import cythonize except ImportError: log.warn('Could not import cython, ACE chemistry') log.warn('and BH Mie will not be installed') return [], [] extensions = [] data_files = [] if _have_fortran_compiler(): log.info('Detected FORTRAN compiler') log.info('ACE chemistry will be installed') ext, dat = build_ace() extensions.append(ext) data_files.append(dat) else: log.warn('No suitable FORTRAN compiler') log.warn('ACE chemistry will not be installed') if _have_c_compiler(): log.info('Detected C compiler') log.info('BH Mie will be installed') extensions.append(build_bhmie()) else: log.warn('No suitable C compiler') log.warn('BH Mie will not be installed') if len(extensions) > 0: extensions = cythonize(extensions, language_level=3) return extensions, data_files
def _exec_command(command, use_shell=None, use_tee=None, **env): log.debug('_exec_command(...)') if use_shell is None: use_shell = os.name == 'posix' if use_tee is None: use_tee = os.name == 'posix' using_command = 0 if use_shell: # We use shell (unless use_shell==0) so that wildcards can be # used. sh = os.environ.get('SHELL', '/bin/sh') if is_sequence(command): argv = [sh, '-c', ' '.join(list(command))] else: argv = [sh, '-c', command] else: # On NT, DOS we avoid using command.com as it's exit status is # not related to the exit status of a command. if is_sequence(command): argv = command[:] else: argv = splitcmdline(command) if hasattr(os, 'spawnvpe'): spawn_command = os.spawnvpe else: spawn_command = os.spawnve argv[0] = find_executable(argv[0]) or argv[0] if not os.path.isfile(argv[0]): log.warn('Executable %s does not exist' % (argv[0])) if os.name in ['nt', 'dos']: # argv[0] might be internal command argv = [os.environ['COMSPEC'], '/C'] + argv using_command = 1 # sys.__std*__ is used instead of sys.std* because environments # like IDLE, PyCrust, etc overwrite sys.std* commands. so_fileno = sys.__stdout__.fileno() se_fileno = sys.__stderr__.fileno() so_flush = sys.__stdout__.flush se_flush = sys.__stderr__.flush so_dup = os.dup(so_fileno) se_dup = os.dup(se_fileno) outfile = temp_file_name() fout = open(outfile, 'w') if using_command: errfile = temp_file_name() ferr = open(errfile, 'w') log.debug('Running %s(%s,%r,%r,os.environ)' \ % (spawn_command.__name__,os.P_WAIT,argv[0],argv)) argv0 = argv[0] if not using_command: argv[0] = quote_arg(argv0) so_flush() se_flush() os.dup2(fout.fileno(), so_fileno) if using_command: #XXX: disabled for now as it does not work from cmd under win32. # Tests fail on msys os.dup2(ferr.fileno(), se_fileno) else: os.dup2(fout.fileno(), se_fileno) try: status = spawn_command(os.P_WAIT, argv0, argv, os.environ) except OSError, errmess: status = 999 sys.stderr.write('%s: %s' % (errmess, argv[0]))
def run(self): if not self.extensions: return # Make sure that extension sources are complete. self.run_command("build_src") if self.distribution.has_c_libraries(): if self.inplace: if self.distribution.have_run.get("build_clib"): log.warn("build_clib already run, it is too late to " "ensure in-place build of build_clib") build_clib = self.distribution.get_command_obj( "build_clib") else: build_clib = self.distribution.get_command_obj( "build_clib") build_clib.inplace = 1 build_clib.ensure_finalized() build_clib.run() self.distribution.have_run["build_clib"] = 1 else: self.run_command("build_clib") build_clib = self.get_finalized_command("build_clib") self.library_dirs.append(build_clib.build_clib) else: build_clib = None # Not including C libraries to the list of # extension libraries automatically to prevent # bogus linking commands. Extensions must # explicitly specify the C libraries that they use. from distutils.ccompiler import new_compiler from numpy.distutils.fcompiler import new_fcompiler compiler_type = self.compiler # Initialize C compiler: self.compiler = new_compiler( compiler=compiler_type, verbose=self.verbose, dry_run=self.dry_run, force=self.force, ) self.compiler.customize(self.distribution) self.compiler.customize_cmd(self) self.compiler.show_customization() # Setup directory for storing generated extra DLL files on Windows self.extra_dll_dir = os.path.join(self.build_temp, ".libs") if not os.path.isdir(self.extra_dll_dir): os.makedirs(self.extra_dll_dir) # Create mapping of libraries built by build_clib: clibs = {} if build_clib is not None: for libname, build_info in build_clib.libraries or []: if libname in clibs and clibs[libname] != build_info: log.warn("library %r defined more than once," " overwriting build_info\n%s... \nwith\n%s..." % (libname, repr(clibs[libname])[:300], repr(build_info)[:300])) clibs[libname] = build_info # .. and distribution libraries: for libname, build_info in self.distribution.libraries or []: if libname in clibs: # build_clib libraries have a precedence before distribution ones continue clibs[libname] = build_info # Determine if C++/Fortran 77/Fortran 90 compilers are needed. # Update extension libraries, library_dirs, and macros. all_languages = set() for ext in self.extensions: ext_languages = set() c_libs = [] c_lib_dirs = [] macros = [] for libname in ext.libraries: if libname in clibs: binfo = clibs[libname] c_libs += binfo.get("libraries", []) c_lib_dirs += binfo.get("library_dirs", []) for m in binfo.get("macros", []): if m not in macros: macros.append(m) for l in clibs.get(libname, {}).get("source_languages", []): ext_languages.add(l) if c_libs: new_c_libs = ext.libraries + c_libs log.info("updating extension %r libraries from %r to %r" % (ext.name, ext.libraries, new_c_libs)) ext.libraries = new_c_libs ext.library_dirs = ext.library_dirs + c_lib_dirs if macros: log.info("extending extension %r defined_macros with %r" % (ext.name, macros)) ext.define_macros = ext.define_macros + macros # determine extension languages if has_f_sources(ext.sources): ext_languages.add("f77") if has_cxx_sources(ext.sources): ext_languages.add("c++") l = ext.language or self.compiler.detect_language(ext.sources) if l: ext_languages.add(l) # reset language attribute for choosing proper linker if "c++" in ext_languages: ext_language = "c++" elif "f90" in ext_languages: ext_language = "f90" elif "f77" in ext_languages: ext_language = "f77" else: ext_language = "c" # default if l and l != ext_language and ext.language: log.warn("resetting extension %r language from %r to %r." % (ext.name, l, ext_language)) ext.language = ext_language # global language all_languages.update(ext_languages) need_f90_compiler = "f90" in all_languages need_f77_compiler = "f77" in all_languages need_cxx_compiler = "c++" in all_languages # Initialize C++ compiler: if need_cxx_compiler: self._cxx_compiler = new_compiler( compiler=compiler_type, verbose=self.verbose, dry_run=self.dry_run, force=self.force, ) compiler = self._cxx_compiler compiler.customize(self.distribution, need_cxx=need_cxx_compiler) compiler.customize_cmd(self) compiler.show_customization() self._cxx_compiler = compiler.cxx_compiler() else: self._cxx_compiler = None # Initialize Fortran 77 compiler: if need_f77_compiler: ctype = self.fcompiler self._f77_compiler = new_fcompiler( compiler=self.fcompiler, verbose=self.verbose, dry_run=self.dry_run, force=self.force, requiref90=False, c_compiler=self.compiler, ) fcompiler = self._f77_compiler if fcompiler: ctype = fcompiler.compiler_type fcompiler.customize(self.distribution) if fcompiler and fcompiler.get_version(): fcompiler.customize_cmd(self) fcompiler.show_customization() else: self.warn("f77_compiler=%s is not available." % (ctype)) self._f77_compiler = None else: self._f77_compiler = None # Initialize Fortran 90 compiler: if need_f90_compiler: ctype = self.fcompiler self._f90_compiler = new_fcompiler( compiler=self.fcompiler, verbose=self.verbose, dry_run=self.dry_run, force=self.force, requiref90=True, c_compiler=self.compiler, ) fcompiler = self._f90_compiler if fcompiler: ctype = fcompiler.compiler_type fcompiler.customize(self.distribution) if fcompiler and fcompiler.get_version(): fcompiler.customize_cmd(self) fcompiler.show_customization() else: self.warn("f90_compiler=%s is not available." % (ctype)) self._f90_compiler = None else: self._f90_compiler = None # Build extensions self.build_extensions() # Copy over any extra DLL files # FIXME: In the case where there are more than two packages, # we blindly assume that both packages need all of the libraries, # resulting in a larger wheel than is required. This should be fixed, # but it's so rare that I won't bother to handle it. pkg_roots = { self.get_ext_fullname(ext.name).split(".")[0] for ext in self.extensions } for pkg_root in pkg_roots: shared_lib_dir = os.path.join(pkg_root, ".libs") if not self.inplace: shared_lib_dir = os.path.join(self.build_lib, shared_lib_dir) for fn in os.listdir(self.extra_dll_dir): if not os.path.isdir(shared_lib_dir): os.makedirs(shared_lib_dir) if not fn.lower().endswith(".dll"): continue runtime_lib = os.path.join(self.extra_dll_dir, fn) copy_file(runtime_lib, shared_lib_dir)
def build_a_library(self, build_info, lib_name, libraries): # default compilers compiler = self.compiler fcompiler = self._f_compiler sources = build_info.get('sources') if sources is None or not is_sequence(sources): raise DistutilsSetupError(("in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames") % lib_name) sources = list(sources) c_sources, cxx_sources, f_sources, fmodule_sources \ = filter_sources(sources) requiref90 = not not fmodule_sources or \ build_info.get('language', 'c')=='f90' # save source type information so that build_ext can use it. source_languages = [] if c_sources: source_languages.append('c') if cxx_sources: source_languages.append('c++') if requiref90: source_languages.append('f90') elif f_sources: source_languages.append('f77') build_info['source_languages'] = source_languages lib_file = compiler.library_filename(lib_name, output_dir=self.build_clib) depends = sources + build_info.get('depends', []) if not (self.force or newer_group(depends, lib_file, 'newer')): log.debug("skipping '%s' library (up-to-date)", lib_name) return else: log.info("building '%s' library", lib_name) config_fc = build_info.get('config_fc', {}) if fcompiler is not None and config_fc: log.info('using additional config_fc from setup script '\ 'for fortran compiler: %s' \ % (config_fc,)) from numpy.distutils.fcompiler import new_fcompiler fcompiler = new_fcompiler(compiler=fcompiler.compiler_type, verbose=self.verbose, dry_run=self.dry_run, force=self.force, requiref90=requiref90, c_compiler=self.compiler) if fcompiler is not None: dist = self.distribution base_config_fc = dist.get_option_dict('config_fc').copy() base_config_fc.update(config_fc) fcompiler.customize(base_config_fc) # check availability of Fortran compilers if (f_sources or fmodule_sources) and fcompiler is None: raise DistutilsError("library %s has Fortran sources"\ " but no Fortran compiler found" % (lib_name)) if fcompiler is not None: fcompiler.extra_f77_compile_args = build_info.get('extra_f77_compile_args') or [] fcompiler.extra_f90_compile_args = build_info.get('extra_f90_compile_args') or [] macros = build_info.get('macros') include_dirs = build_info.get('include_dirs') if include_dirs is None: include_dirs = [] extra_postargs = build_info.get('extra_compiler_args') or [] include_dirs.extend(get_numpy_include_dirs()) # where compiled F90 module files are: module_dirs = build_info.get('module_dirs') or [] module_build_dir = os.path.dirname(lib_file) if requiref90: self.mkpath(module_build_dir) if compiler.compiler_type=='msvc': # this hack works around the msvc compiler attributes # problem, msvc uses its own convention :( c_sources += cxx_sources cxx_sources = [] objects = [] if c_sources: log.info("compiling C sources") objects = compiler.compile(c_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) if cxx_sources: log.info("compiling C++ sources") cxx_compiler = compiler.cxx_compiler() cxx_objects = cxx_compiler.compile(cxx_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) objects.extend(cxx_objects) if f_sources or fmodule_sources: extra_postargs = [] f_objects = [] if requiref90: if fcompiler.module_dir_switch is None: existing_modules = glob('*.mod') extra_postargs += fcompiler.module_options(\ module_dirs, module_build_dir) if fmodule_sources: log.info("compiling Fortran 90 module sources") f_objects += fcompiler.compile(fmodule_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) if requiref90 and self._f_compiler.module_dir_switch is None: # move new compiled F90 module files to module_build_dir for f in glob('*.mod'): if f in existing_modules: continue t = os.path.join(module_build_dir, f) if os.path.abspath(f)==os.path.abspath(t): continue if os.path.isfile(t): os.remove(t) try: self.move_file(f, module_build_dir) except DistutilsFileError: log.warn('failed to move %r to %r' \ % (f, module_build_dir)) if f_sources: log.info("compiling Fortran sources") f_objects += fcompiler.compile(f_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) else: f_objects = [] objects.extend(f_objects) # assume that default linker is suitable for # linking Fortran object files compiler.create_static_lib(objects, lib_name, output_dir=self.build_clib, debug=self.debug) # fix library dependencies clib_libraries = build_info.get('libraries', []) for lname, binfo in libraries: if lname in clib_libraries: clib_libraries.extend(binfo[1].get('libraries', [])) if clib_libraries: build_info['libraries'] = clib_libraries
def find_executables(self): """Go through the self.executables dictionary, and attempt to find and assign appropriate executables. Executable names are looked for in the environment (environment variables, the distutils.cfg, and command line), the 0th-element of the command list, and the self.possible_executables list. Also, if the 0th element is "<F77>" or "<F90>", the Fortran 77 or the Fortran 90 compiler executable is used, unless overridden by an environment setting. Subclasses should call this if overridden. """ assert self._is_customised exe_cache = self._exe_cache def cached_find_executable(exe): if exe in exe_cache: return exe_cache[exe] fc_exe = find_executable(exe) exe_cache[exe] = exe_cache[fc_exe] = fc_exe return fc_exe def verify_command_form(name, value): if value is not None and not is_sequence_of_strings(value): raise ValueError( "%s value %r is invalid in class %s" % (name, value, self.__class__.__name__)) def set_exe(exe_key, f77=None, f90=None): cmd = self.executables.get(exe_key, None) if not cmd: return None # Note that we get cmd[0] here if the environment doesn't # have anything set exe_from_environ = getattr(self.command_vars, exe_key) if not exe_from_environ: possibles = [f90, f77] + self.possible_executables else: possibles = [exe_from_environ] + self.possible_executables seen = set() unique_possibles = [] for e in possibles: if e == '<F77>': e = f77 elif e == '<F90>': e = f90 if not e or e in seen: continue seen.add(e) unique_possibles.append(e) for exe in unique_possibles: fc_exe = cached_find_executable(exe) if fc_exe: cmd[0] = fc_exe return fc_exe self.set_command(exe_key, None) return None ctype = self.compiler_type f90 = set_exe('compiler_f90') if not f90: f77 = set_exe('compiler_f77') if f77: log.warn('%s: no Fortran 90 compiler found' % ctype) else: raise CompilerNotFound('%s: f90 nor f77' % ctype) else: f77 = set_exe('compiler_f77', f90=f90) if not f77: log.warn('%s: no Fortran 77 compiler found' % ctype) set_exe('compiler_fix', f90=f90) set_exe('linker_so', f77=f77, f90=f90) set_exe('linker_exe', f77=f77, f90=f90) set_exe('version_cmd', f77=f77, f90=f90) set_exe('archiver') set_exe('ranlib')
def test_nt(**kws): pythonexe = get_pythonexe() echo = find_executable('echo') using_cygwin_echo = echo != 'echo' if using_cygwin_echo: log.warn('Using cygwin echo in win32 environment is not supported') s, o=exec_command(pythonexe\ +' -c "import os;print os.environ.get(\'AAA\',\'\')"') assert s == 0 and o == '', (s, o) s, o=exec_command(pythonexe\ +' -c "import os;print os.environ.get(\'AAA\')"', AAA='Tere') assert s == 0 and o == 'Tere', (s, o) os.environ['BBB'] = 'Hi' s, o=exec_command(pythonexe\ +' -c "import os;print os.environ.get(\'BBB\',\'\')"') assert s == 0 and o == 'Hi', (s, o) s, o=exec_command(pythonexe\ +' -c "import os;print os.environ.get(\'BBB\',\'\')"', BBB='Hey') assert s == 0 and o == 'Hey', (s, o) s, o=exec_command(pythonexe\ +' -c "import os;print os.environ.get(\'BBB\',\'\')"') assert s == 0 and o == 'Hi', (s, o) elif 0: s, o = exec_command('echo Hello') assert s == 0 and o == 'Hello', (s, o) s, o = exec_command('echo a%AAA%') assert s == 0 and o == 'a', (s, o) s, o = exec_command('echo a%AAA%', AAA='Tere') assert s == 0 and o == 'aTere', (s, o) os.environ['BBB'] = 'Hi' s, o = exec_command('echo a%BBB%') assert s == 0 and o == 'aHi', (s, o) s, o = exec_command('echo a%BBB%', BBB='Hey') assert s == 0 and o == 'aHey', (s, o) s, o = exec_command('echo a%BBB%') assert s == 0 and o == 'aHi', (s, o) s, o = exec_command('this_is_not_a_command') assert s and o != '', (s, o) s, o = exec_command('type not_existing_file') assert s and o != '', (s, o) s, o = exec_command('echo path=%path%') assert s == 0 and o != '', (s, o) s, o=exec_command('%s -c "import sys;sys.stderr.write(sys.platform)"' \ % pythonexe) assert s == 0 and o == 'win32', (s, o) s, o = exec_command('%s -c "raise \'Ignore me.\'"' % pythonexe) assert s == 1 and o, (s, o) s, o=exec_command('%s -c "import sys;sys.stderr.write(\'0\');sys.stderr.write(\'1\');sys.stderr.write(\'2\')"'\ % pythonexe) assert s == 0 and o == '012', (s, o) s, o = exec_command('%s -c "import sys;sys.exit(15)"' % pythonexe) assert s == 15 and o == '', (s, o) s, o = exec_command('%s -c "print \'Heipa\'"' % pythonexe) assert s == 0 and o == 'Heipa', (s, o) print('ok')
def swig_sources(self, sources, extension): # Assuming SWIG 1.3.14 or later. See compatibility note in # http://www.swig.org/Doc1.3/Python.html#Python_nn6 new_sources = [] swig_sources = [] swig_targets = {} target_dirs = [] py_files = [] # swig generated .py files target_ext = ".c" if "-c++" in extension.swig_opts: typ = "c++" is_cpp = True extension.swig_opts.remove("-c++") elif self.swig_cpp: typ = "c++" is_cpp = True else: typ = None is_cpp = False skip_swig = 0 ext_name = extension.name.split(".")[-1] for source in sources: (base, ext) = os.path.splitext(source) if ext == ".i": # SWIG interface file # the code below assumes that the sources list # contains not more than one .i SWIG interface file if self.inplace: target_dir = os.path.dirname(base) py_target_dir = self.ext_target_dir else: target_dir = appendpath(self.build_src, os.path.dirname(base)) py_target_dir = target_dir if os.path.isfile(source): name = get_swig_modulename(source) if name != ext_name[1:]: raise DistutilsSetupError( "mismatch of extension names: %s provides %r" " but expected %r" % (source, name, ext_name[1:]) ) if typ is None: typ = get_swig_target(source) is_cpp = typ == "c++" if is_cpp: target_ext = ".cpp" else: typ2 = get_swig_target(source) if typ2 is None: log.warn("source %r does not define swig target, assuming %s swig target" % (source, typ)) if is_cpp: target_ext = ".cpp" elif typ != typ2: log.warn("expected %r but source %r defines %r swig target" % (typ, source, typ2)) if typ2 == "c++": log.warn("resetting swig target to c++ (some targets may have .c extension)") is_cpp = True target_ext = ".cpp" else: log.warn("assuming that %r has c++ swig target" % (source)) target_file = os.path.join(target_dir, "%s_wrap%s" % (name, target_ext)) else: log.warn(" source %s does not exist: skipping swig'ing." % (source)) name = ext_name[1:] skip_swig = 1 target_file = _find_swig_target(target_dir, name) if not os.path.isfile(target_file): log.warn( " target %s does not exist:\n " "Assuming %s_wrap.{c,cpp} was generated with " '"build_src --inplace" command.' % (target_file, name) ) target_dir = os.path.dirname(base) target_file = _find_swig_target(target_dir, name) if not os.path.isfile(target_file): raise DistutilsSetupError("%r missing" % (target_file,)) log.warn(" Yes! Using %r as up-to-date target." % (target_file)) target_dirs.append(target_dir) new_sources.append(target_file) py_files.append(os.path.join(py_target_dir, name + ".py")) swig_sources.append(source) swig_targets[source] = new_sources[-1] else: new_sources.append(source) if not swig_sources: return new_sources if skip_swig: return new_sources + py_files for d in target_dirs: self.mkpath(d) swig = self.swig or self.find_swig() swig_cmd = [swig, "-python"] + extension.swig_opts if is_cpp: swig_cmd.append("-c++") for d in extension.include_dirs: swig_cmd.append("-I" + d) for source in swig_sources: target = swig_targets[source] depends = [source] + extension.depends if self.force or newer_group(depends, target, "newer"): log.info("%s: %s" % (os.path.basename(swig) + (is_cpp and "++" or ""), source)) self.spawn(swig_cmd + self.swig_opts + ["-o", target, "-outdir", py_target_dir, source]) else: log.debug(" skipping '%s' swig interface (up-to-date)" % (source)) return new_sources + py_files
def build_extension(self, ext): sources = ext.sources if sources is None or not is_sequence(sources): raise DistutilsSetupError( ("in 'ext_modules' option (extension '%s'), " + "'sources' must be present and must be " + "a list of source filenames") % ext.name) sources = list(sources) if not sources: return fullname = self.get_ext_fullname(ext.name) if self.inplace: modpath = fullname.split('.') package = '.'.join(modpath[0:-1]) base = modpath[-1] build_py = self.get_finalized_command('build_py') package_dir = build_py.get_package_dir(package) ext_filename = os.path.join(package_dir, self.get_ext_filename(base)) else: ext_filename = os.path.join(self.build_lib, self.get_ext_filename(fullname)) depends = sources + ext.depends force_rebuild = self.force if not self.disable_optimization and not self.compiler_opt.is_cached(): log.debug("Detected changes on compiler optimizations") force_rebuild = True if not (force_rebuild or newer_group(depends, ext_filename, 'newer')): log.debug("skipping '%s' extension (up-to-date)", ext.name) return else: log.info("building '%s' extension", ext.name) extra_args = ext.extra_compile_args or [] macros = ext.define_macros[:] for undef in ext.undef_macros: macros.append((undef, )) c_sources, cxx_sources, f_sources, fmodule_sources = \ filter_sources(ext.sources) if self.compiler.compiler_type == 'msvc': if cxx_sources: # Needed to compile kiva.agg._agg extension. extra_args.append('/Zm1000') # this hack works around the msvc compiler attributes # problem, msvc uses its own convention :( c_sources += cxx_sources cxx_sources = [] # Set Fortran/C++ compilers for compilation and linking. if ext.language == 'f90': fcompiler = self._f90_compiler elif ext.language == 'f77': fcompiler = self._f77_compiler else: # in case ext.language is c++, for instance fcompiler = self._f90_compiler or self._f77_compiler if fcompiler is not None: fcompiler.extra_f77_compile_args = ( ext.extra_f77_compile_args or []) if hasattr( ext, 'extra_f77_compile_args') else [] fcompiler.extra_f90_compile_args = ( ext.extra_f90_compile_args or []) if hasattr( ext, 'extra_f90_compile_args') else [] cxx_compiler = self._cxx_compiler # check for the availability of required compilers if cxx_sources and cxx_compiler is None: raise DistutilsError("extension %r has C++ sources" "but no C++ compiler found" % (ext.name)) if (f_sources or fmodule_sources) and fcompiler is None: raise DistutilsError("extension %r has Fortran sources " "but no Fortran compiler found" % (ext.name)) if ext.language in ['f77', 'f90'] and fcompiler is None: self.warn("extension %r has Fortran libraries " "but no Fortran linker found, using default linker" % (ext.name)) if ext.language == 'c++' and cxx_compiler is None: self.warn("extension %r has C++ libraries " "but no C++ linker found, using default linker" % (ext.name)) kws = {'depends': ext.depends} output_dir = self.build_temp include_dirs = ext.include_dirs + get_numpy_include_dirs() # filtering C dispatch-table sources when optimization is not disabled, # otherwise treated as normal sources. copt_c_sources = [] copt_cxx_sources = [] copt_baseline_flags = [] copt_macros = [] if not self.disable_optimization: bsrc_dir = self.get_finalized_command("build_src").build_src dispatch_hpath = os.path.join("numpy", "distutils", "include") dispatch_hpath = os.path.join(bsrc_dir, dispatch_hpath) include_dirs.append(dispatch_hpath) copt_build_src = None if self.inplace else bsrc_dir for _srcs, _dst, _ext in (((c_sources, ), copt_c_sources, ('.dispatch.c', )), ((c_sources, cxx_sources), copt_cxx_sources, ('.dispatch.cpp', '.dispatch.cxx'))): for _src in _srcs: _dst += [ _src.pop(_src.index(s)) for s in _src[:] if s.endswith(_ext) ] copt_baseline_flags = self.compiler_opt.cpu_baseline_flags() else: copt_macros.append(("NPY_DISABLE_OPTIMIZATION", 1)) c_objects = [] if copt_cxx_sources: log.info("compiling C++ dispatch-able sources") c_objects += self.compiler_opt.try_dispatch( copt_cxx_sources, output_dir=output_dir, src_dir=copt_build_src, macros=macros + copt_macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args, ccompiler=cxx_compiler, **kws) if copt_c_sources: log.info("compiling C dispatch-able sources") c_objects += self.compiler_opt.try_dispatch( copt_c_sources, output_dir=output_dir, src_dir=copt_build_src, macros=macros + copt_macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args, **kws) if c_sources: log.info("compiling C sources") c_objects += self.compiler.compile(c_sources, output_dir=output_dir, macros=macros + copt_macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args + copt_baseline_flags, **kws) if cxx_sources: log.info("compiling C++ sources") c_objects += cxx_compiler.compile(cxx_sources, output_dir=output_dir, macros=macros + copt_macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args + copt_baseline_flags, **kws) extra_postargs = [] f_objects = [] if fmodule_sources: log.info("compiling Fortran 90 module sources") module_dirs = ext.module_dirs[:] module_build_dir = os.path.join( self.build_temp, os.path.dirname(self.get_ext_filename(fullname))) self.mkpath(module_build_dir) if fcompiler.module_dir_switch is None: existing_modules = glob('*.mod') extra_postargs += fcompiler.module_options(module_dirs, module_build_dir) f_objects += fcompiler.compile(fmodule_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs, depends=ext.depends) if fcompiler.module_dir_switch is None: for f in glob('*.mod'): if f in existing_modules: continue t = os.path.join(module_build_dir, f) if os.path.abspath(f) == os.path.abspath(t): continue if os.path.isfile(t): os.remove(t) try: self.move_file(f, module_build_dir) except DistutilsFileError: log.warn('failed to move %r to %r' % (f, module_build_dir)) if f_sources: log.info("compiling Fortran sources") f_objects += fcompiler.compile(f_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs, depends=ext.depends) if f_objects and not fcompiler.can_ccompiler_link(self.compiler): unlinkable_fobjects = f_objects objects = c_objects else: unlinkable_fobjects = [] objects = c_objects + f_objects if ext.extra_objects: objects.extend(ext.extra_objects) extra_args = ext.extra_link_args or [] libraries = self.get_libraries(ext)[:] library_dirs = ext.library_dirs[:] linker = self.compiler.link_shared_object # Always use system linker when using MSVC compiler. if self.compiler.compiler_type in ('msvc', 'intelw', 'intelemw'): # expand libraries with fcompiler libraries as we are # not using fcompiler linker self._libs_with_msvc_and_fortran(fcompiler, libraries, library_dirs) elif ext.language in ['f77', 'f90'] and fcompiler is not None: linker = fcompiler.link_shared_object if ext.language == 'c++' and cxx_compiler is not None: linker = cxx_compiler.link_shared_object if fcompiler is not None: objects, libraries = self._process_unlinkable_fobjects( objects, libraries, fcompiler, library_dirs, unlinkable_fobjects) linker(objects, ext_filename, libraries=libraries, library_dirs=library_dirs, runtime_library_dirs=ext.runtime_library_dirs, extra_postargs=extra_args, export_symbols=self.get_export_symbols(ext), debug=self.debug, build_temp=self.build_temp, target_lang=ext.language)
def run(self): if not self.extensions: return # Make sure that extension sources are complete. self.run_command('build_src') if self.distribution.has_c_libraries(): if self.inplace: if self.distribution.have_run.get('build_clib'): log.warn('build_clib already run, it is too late to ' 'ensure in-place build of build_clib') build_clib = self.distribution.get_command_obj( 'build_clib') else: build_clib = self.distribution.get_command_obj( 'build_clib') build_clib.inplace = 1 build_clib.ensure_finalized() build_clib.run() self.distribution.have_run['build_clib'] = 1 else: self.run_command('build_clib') build_clib = self.get_finalized_command('build_clib') self.library_dirs.append(build_clib.build_clib) else: build_clib = None # Not including C libraries to the list of # extension libraries automatically to prevent # bogus linking commands. Extensions must # explicitly specify the C libraries that they use. from distutils.ccompiler import new_compiler from numpy.distutils.fcompiler import new_fcompiler compiler_type = self.compiler # Initialize C compiler: self.compiler = new_compiler(compiler=compiler_type, verbose=self.verbose, dry_run=self.dry_run, force=self.force) self.compiler.customize(self.distribution) self.compiler.customize_cmd(self) if self.warn_error: self.compiler.compiler.append('-Werror') self.compiler.compiler_so.append('-Werror') self.compiler.show_customization() if not self.disable_optimization: dispatch_hpath = os.path.join("numpy", "distutils", "include", "npy_cpu_dispatch_config.h") dispatch_hpath = os.path.join( self.get_finalized_command("build_src").build_src, dispatch_hpath) opt_cache_path = os.path.abspath( os.path.join(self.build_temp, 'ccompiler_opt_cache_ext.py')) if hasattr(self, "compiler_opt"): # By default `CCompilerOpt` update the cache at the exit of # the process, which may lead to duplicate building # (see build_extension()/force_rebuild) if run() called # multiple times within the same os process/thread without # giving the chance the previous instances of `CCompilerOpt` # to update the cache. self.compiler_opt.cache_flush() self.compiler_opt = new_ccompiler_opt( compiler=self.compiler, dispatch_hpath=dispatch_hpath, cpu_baseline=self.cpu_baseline, cpu_dispatch=self.cpu_dispatch, cache_path=opt_cache_path) def report(copt): log.info("\n########### EXT COMPILER OPTIMIZATION ###########") log.info(copt.report(full=True)) import atexit atexit.register(report, self.compiler_opt) # Setup directory for storing generated extra DLL files on Windows self.extra_dll_dir = os.path.join(self.build_temp, '.libs') if not os.path.isdir(self.extra_dll_dir): os.makedirs(self.extra_dll_dir) # Create mapping of libraries built by build_clib: clibs = {} if build_clib is not None: for libname, build_info in build_clib.libraries or []: if libname in clibs and clibs[libname] != build_info: log.warn('library %r defined more than once,' ' overwriting build_info\n%s... \nwith\n%s...' % (libname, repr(clibs[libname])[:300], repr(build_info)[:300])) clibs[libname] = build_info # .. and distribution libraries: for libname, build_info in self.distribution.libraries or []: if libname in clibs: # build_clib libraries have a precedence before distribution ones continue clibs[libname] = build_info # Determine if C++/Fortran 77/Fortran 90 compilers are needed. # Update extension libraries, library_dirs, and macros. all_languages = set() for ext in self.extensions: ext_languages = set() c_libs = [] c_lib_dirs = [] macros = [] for libname in ext.libraries: if libname in clibs: binfo = clibs[libname] c_libs += binfo.get('libraries', []) c_lib_dirs += binfo.get('library_dirs', []) for m in binfo.get('macros', []): if m not in macros: macros.append(m) for l in clibs.get(libname, {}).get('source_languages', []): ext_languages.add(l) if c_libs: new_c_libs = ext.libraries + c_libs log.info('updating extension %r libraries from %r to %r' % (ext.name, ext.libraries, new_c_libs)) ext.libraries = new_c_libs ext.library_dirs = ext.library_dirs + c_lib_dirs if macros: log.info('extending extension %r defined_macros with %r' % (ext.name, macros)) ext.define_macros = ext.define_macros + macros # determine extension languages if has_f_sources(ext.sources): ext_languages.add('f77') if has_cxx_sources(ext.sources): ext_languages.add('c++') l = ext.language or self.compiler.detect_language(ext.sources) if l: ext_languages.add(l) # reset language attribute for choosing proper linker if 'c++' in ext_languages: ext_language = 'c++' elif 'f90' in ext_languages: ext_language = 'f90' elif 'f77' in ext_languages: ext_language = 'f77' else: ext_language = 'c' # default if l and l != ext_language and ext.language: log.warn('resetting extension %r language from %r to %r.' % (ext.name, l, ext_language)) ext.language = ext_language # global language all_languages.update(ext_languages) need_f90_compiler = 'f90' in all_languages need_f77_compiler = 'f77' in all_languages need_cxx_compiler = 'c++' in all_languages # Initialize C++ compiler: if need_cxx_compiler: self._cxx_compiler = new_compiler(compiler=compiler_type, verbose=self.verbose, dry_run=self.dry_run, force=self.force) compiler = self._cxx_compiler compiler.customize(self.distribution, need_cxx=need_cxx_compiler) compiler.customize_cmd(self) compiler.show_customization() self._cxx_compiler = compiler.cxx_compiler() else: self._cxx_compiler = None # Initialize Fortran 77 compiler: if need_f77_compiler: ctype = self.fcompiler self._f77_compiler = new_fcompiler(compiler=self.fcompiler, verbose=self.verbose, dry_run=self.dry_run, force=self.force, requiref90=False, c_compiler=self.compiler) fcompiler = self._f77_compiler if fcompiler: ctype = fcompiler.compiler_type fcompiler.customize(self.distribution) if fcompiler and fcompiler.get_version(): fcompiler.customize_cmd(self) fcompiler.show_customization() else: self.warn('f77_compiler=%s is not available.' % (ctype)) self._f77_compiler = None else: self._f77_compiler = None # Initialize Fortran 90 compiler: if need_f90_compiler: ctype = self.fcompiler self._f90_compiler = new_fcompiler(compiler=self.fcompiler, verbose=self.verbose, dry_run=self.dry_run, force=self.force, requiref90=True, c_compiler=self.compiler) fcompiler = self._f90_compiler if fcompiler: ctype = fcompiler.compiler_type fcompiler.customize(self.distribution) if fcompiler and fcompiler.get_version(): fcompiler.customize_cmd(self) fcompiler.show_customization() else: self.warn('f90_compiler=%s is not available.' % (ctype)) self._f90_compiler = None else: self._f90_compiler = None # Build extensions self.build_extensions() # Copy over any extra DLL files # FIXME: In the case where there are more than two packages, # we blindly assume that both packages need all of the libraries, # resulting in a larger wheel than is required. This should be fixed, # but it's so rare that I won't bother to handle it. pkg_roots = { self.get_ext_fullname(ext.name).split('.')[0] for ext in self.extensions } for pkg_root in pkg_roots: shared_lib_dir = os.path.join(pkg_root, '.libs') if not self.inplace: shared_lib_dir = os.path.join(self.build_lib, shared_lib_dir) for fn in os.listdir(self.extra_dll_dir): if not os.path.isdir(shared_lib_dir): os.makedirs(shared_lib_dir) if not fn.lower().endswith('.dll'): continue runtime_lib = os.path.join(self.extra_dll_dir, fn) copy_file(runtime_lib, shared_lib_dir)