def build_extension(self, ext): """ Compile manually the py_mini_racer extension, bypass setuptools """ try: if not is_v8_built(): self.run_command('build_v8') self.debug = True if V8_PATH: dest_filename = join(self.build_lib, "py_mini_racer") copy_file(V8_PATH, dest_filename, verbose=self.verbose, dry_run=self.dry_run) else: build_ext.build_extension(self, ext) except Exception as e: traceback.print_exc() # Alter message err_msg = """py_mini_racer failed to build, ensure you have an up-to-date pip (>= 8.1) to use the wheel instead To update pip: 'pip install -U pip' See also: https://github.com/sqreen/PyMiniRacer#binary-builds-availability Original error: %s""" raise Exception(err_msg % repr(e))
def build_extension(self, ext): '''Concretely builds the extension given as input''' def linker_can_remove_symbols(linker): '''Tests if the `ld` linker can remove unused symbols from linked libraries. In this case, use the --no-as-needed flag during link''' import tempfile f, name = tempfile.mkstemp() del f cmd = linker + ['-Wl,--no-as-needed', '-lm', '-o', name] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output = proc.communicate()[0] if os.path.exists(name): os.unlink(name) return True if proc.returncode == 0 else False def ld_ok(opt): '''Tells if a certain option is a go for the linker''' if opt.find('-L') == 0: return False return True # Some clean-up on the linker which is screwed up... self.compiler.linker_so = [k for k in self.compiler.linker_so if ld_ok(k)] if self.linker_is_smart is None: self.linker_is_smart = linker_can_remove_symbols(self.compiler.linker_so) if self.linker_is_smart: self.compiler.linker_so += ['-Wl,--no-as-needed'] build_ext_base.build_extension(self, ext)
def build_extension(self, ext): """Builds the given extension. When the extension is of type Library, it compiles the library with CMake, otherwise the default compilation mechanism is used. Afterwards, it adds the according library, and the include and library directories of the Library's, so that other Extensions can find the newly generated lib. """ # HACK: remove the "-Wstrict-prototypes" option keyword self.compiler.compiler = [c for c in self.compiler.compiler if c != "-Wstrict-prototypes"] self.compiler.compiler_so = [c for c in self.compiler.compiler_so if c != "-Wstrict-prototypes"] if "-Wno-strict-aliasing" not in self.compiler.compiler: self.compiler.compiler.append("-Wno-strict-aliasing") if "-Wno-strict-aliasing" not in self.compiler.compiler_so: self.compiler.compiler_so.append("-Wno-strict-aliasing") # check if it is our type of extension if isinstance(ext, Library): # TODO: get compiler and add it to the compiler # TODO: get the debug status and add the build_type parameter # build libraries using the provided functions # compile ext.compile(self.build_lib) libs = [ext.c_name] lib_dirs = [ext.c_target_directory] include_dirs = [ext.c_self_include_directory] # set the DEFAULT library path and include path for all other extensions for other_ext in self.extensions: if other_ext != ext: other_ext.libraries = libs + (other_ext.libraries if other_ext.libraries else []) other_ext.library_dirs = lib_dirs + (other_ext.library_dirs if other_ext.library_dirs else []) other_ext.include_dirs = include_dirs + (other_ext.include_dirs if other_ext.include_dirs else []) else: # all other libs are build with the default command _build_ext.build_extension(self, ext)
def build_extension(self, ext): try: build_ext.build_extension(self, ext) except CCompilerError, x: print ('*'*70+'\n')*3 print """WARNING: The %s extension module to the Vision Egg could not be compiled. The Vision Egg should run, but the features present in that file will not be available. Above is the ouput showing how the compilation failed."""%ext.name if sys.platform == 'win32': print print """I see you are using Windows. The default compiler for this platform is the Microsoft Visual Studio C compiler. However, a free alternative compiler called mingw can be used instead.""" print print ('*'*70+'\n')*3 global extension_build_failed if not extension_build_failed: extension_build_failed = 1
def build_extension(self, ext): if not isinstance(ext, SharedLibrary): build_ext.build_extension(self, ext) return distutils.log.info("running '{}'".format(ext.cmd)) if not self.dry_run: rv = subprocess.Popen( ext.cmd, cwd=ext.cwd, env=ext.env, shell=True).wait() if rv != 0: sys.exit(rv) lib_name = ext.name.split(".")[-1] + ext.suffix lib_fullpath = path.join(ext.output_dir, lib_name) dest_path = self.get_ext_fullpath(ext.name) distutils.dir_util.mkpath( path.dirname(dest_path), verbose=self.verbose, dry_run=self.dry_run) distutils.file_util.copy_file( lib_fullpath, dest_path, verbose=self.verbose, dry_run=self.dry_run)
def build_extension(self, ext): global enable_pcre if enable_pcre: ext.extra_compile_args.append('-DENABLE_PCRE') ext.libraries=['pcre'] ext.extra_compile_args.append('-Wall') ext.extra_compile_args.append('-DVERSION=%s' % version) build_ext.build_extension(self, ext)
def build_extension(self, ext): """ Compile manually the py_mini_racer extension, bypass setuptools """ if not is_v8_built(): self.run_command('build_v8') self.debug = True build_ext.build_extension(self, ext)
def build_torch_extension(build_ext, options, abi_compile_flags): check_torch_import() have_cuda = is_torch_cuda() if not have_cuda and check_macro(options['MACROS'], 'HAVE_CUDA'): raise DistutilsPlatformError( 'Horovod build with GPU support was requested, but this PyTorch ' 'installation does not support CUDA.') # Update HAVE_CUDA to mean that PyTorch supports CUDA. Internally, we will be checking # HOROVOD_GPU_(ALLREDUCE|ALLGATHER|BROADCAST) to decide whether we should use GPU # version or transfer tensors to CPU memory for those operations. updated_macros = set_macro( options['MACROS'], 'HAVE_CUDA', str(int(have_cuda))) # Create_extension overwrites these files which are customized, we need to protect them. with protect_files('horovod/torch/mpi_lib/__init__.py', 'horovod/torch/mpi_lib_impl/__init__.py'): from torch.utils.ffi import create_extension ffi_iface = create_extension( name='horovod.torch.mpi_lib', headers=['horovod/torch/interface.h'] + (['horovod/torch/interface_cuda.h'] if have_cuda else []), with_cuda=have_cuda, language='c', package=True, sources=[], extra_compile_args=['-std=c11', '-fPIC', '-O2'] ) ffi_impl = create_extension( name='horovod.torch.mpi_lib_impl', headers=[], with_cuda=have_cuda, language='c++', package=True, source_extension='.cc', define_macros=updated_macros, include_dirs=options['INCLUDES'], sources=options['SOURCES'] + ['horovod/torch/mpi_ops.cc', 'horovod/torch/handle_manager.cc', 'horovod/torch/ready_event.cc', 'horovod/torch/tensor_util.cc', 'horovod/torch/cuda_util.cc', 'horovod/torch/adapter.cc'], extra_compile_args=options['COMPILE_FLAGS'] + abi_compile_flags, extra_link_args=options['LINK_FLAGS'], library_dirs=options['LIBRARY_DIRS'], libraries=options['LIBRARIES'] ) for ffi, setuptools_ext in [(ffi_iface, torch_mpi_lib), (ffi_impl, torch_mpi_lib_impl)]: ffi_ext = ffi.distutils_extension() # ffi_ext is distutils Extension, not setuptools Extension for k, v in ffi_ext.__dict__.items(): setuptools_ext.__dict__[k] = v build_ext.build_extension(setuptools_ext)
def build_extension(self, ext): name = ext.name try: build_ext.build_extension(self, ext) except Exception: e = sys.exc_info()[1] sys.stdout.write('%s\n' % str(e)) warnings.warn(self.warning_message + "The %s extension module" % (name,) + "The output above this warning shows how the compilation failed.")
def build_extension(self, extension): # We add the numpy include dir right before building the # extension, in order to avoid having to import numpy when # the setup script is imported, which would prevent # installation before manual installation of numpy. if isinstance(extension, ExtensionUsingNumpy): numpy_incpath = get_numpy_incpath() if numpy_incpath not in extension.include_dirs: extension.include_dirs.append(numpy_incpath) BaseBuildExtCommand.build_extension(self, extension)
def build_extension(self, ext): """Wrap `build_extension` with `BuildFailed`.""" try: # Uncomment to test compile failures: # raise errors.CCompilerError("OOPS") build_ext.build_extension(self, ext) except ext_errors: raise BuildFailed() except ValueError: # this can happen on Windows 64 bit, see Python issue 7511 if "'path'" in str(sys.exc_info()[1]): # works with both py 2/3 raise BuildFailed() raise
def build_common_extension(build_ext, options, abi_compile_flags): common_mpi_lib.define_macros = options['MACROS'] common_mpi_lib.include_dirs = options['INCLUDES'] common_mpi_lib.sources = options['SOURCES'] + ['horovod/common/common.cc', 'horovod/common/mpi_message.cc', 'horovod/common/operations.cc', 'horovod/common/timeline.cc'] common_mpi_lib.extra_compile_args = options['COMPILE_FLAGS'] + abi_compile_flags common_mpi_lib.extra_link_args = options['LINK_FLAGS'] common_mpi_lib.library_dirs = options['LIBRARY_DIRS'] common_mpi_lib.libraries = options['LIBRARIES'] build_ext.build_extension(common_mpi_lib)
def build_extension(self, ext): dirs = ext.include_dirs + self.compiler.include_dirs # Look for the Boost headers and make sure that we can find them. boost_include = find_boost(hint=dirs) if boost_include is None: raise RuntimeError("Required library Boost not found. " "Check the documentation for solutions.") # Update the extension's include directories. ext.include_dirs += [boost_include] # Run the standard build procedure. _build_ext.build_extension(self, ext)
def build_extension(self, ext): """Overridden method. Builds each Extension.""" ext.runtime_library_dirs = self.rpath # Unfortunately things don't work as they should on the Mac... if platform.system() == 'Darwin': for rpath in self.rpath: ext.extra_link_args.append('-Wl,-rpath,'+rpath) # Don't try to build MS extension if liboskar_ms is not found. if 'measurement_set' in ext.name: if not self.dir_contains('oskar_ms.', self.library_dirs): return build_ext.build_extension(self, ext)
def build_extension(self, ext): include_dirs = ext.include_dirs + self.compiler.include_dirs library_dirs = ext.library_dirs + self.compiler.library_dirs libs = list(ext.libraries) ext.libraries = [] for lib in libs: if not hasattr(lib, "find_include"): ext.libraries += lib continue ext.include_dirs += lib.find_include(hint=include_dirs)[1] lds, libs = lib.find_libraries(hint=library_dirs) ext.library_dirs += lds ext.libraries += libs ext.extra_compile_args += lib.extra_compile_args() _build_ext.build_extension(self, ext)
def build_extension(self, ext): dirs = ext.include_dirs + self.compiler.include_dirs # Look for the Eigen headers and make sure that we can find them. eigen_include = find_eigen(hint=dirs) if eigen_include is None: raise RuntimeError("Required library Eigen not found. " "Check the documentation for solutions.") # Update the extension's include directories. ext.include_dirs += [eigen_include] ext.extra_compile_args += ["-Wno-unused-function", "-Wno-uninitialized"] # Run the standard build procedure. _build_ext.build_extension(self, ext)
def build_extension(self,ext): # Load includes from module directories first! include_dirs = ['/usr/local/include', '/usr/include'] include_dirs.extend(self.strip_includes(self.compiler.compiler)) include_dirs.extend(self.strip_includes(self.compiler.compiler_so)) include_dirs.extend(self.strip_includes(self.compiler.compiler_cxx)) include_dirs.extend(self.strip_includes(self.compiler.linker_so)) include_dirs.extend(self.strip_includes(self.compiler.linker_exe)) include_dirs.extend(self.strip_includes(self.compiler.preprocessor)) self.compiler.include_dirs.extend(self.uniq(include_dirs)) if self.with_proj4: self.check_header(ext,"proj_api.h") self.check_lib(ext,"pj_init_plus", "proj", "'libproj' is required but it doesn't seem to be installed on this system.",["m"]) else: ext.extra_compile_args.append("-DOMIT_PROJ") if self.with_geos: self.check_header(ext,"geos_c.h") self.check_lib(ext,"GEOSTopologyPreserveSimplify","geos_c","'libgeos_c' is required but it doesn't seem to be installed on this system.",["m","geos"]) if self.with_geosadvanced: ext.extra_compile_args.append("-DGEOS_ADVANCED") self.check_lib(ext,"GEOSCoveredBy","geos_c","obsolete 'libgeos_c' (< v.3.3.0). please retry specifying: --without-geosadvanced.",["m","geos"]) else: ext.extra_compile_args.append("-DOMIT_GEOS") if self.with_iconv: if sys.platform.startswith("darwin") or not self.compiler.has_function("iconv"): ext.libraries.append("iconv") else: ext.extra_compile_args.append("-DOMIT_ICONV") if self.with_freexl: self.check_header(ext,"freexl.h") self.check_lib(ext,"freexl_open","freexl","'libfreexl' is required but it doesn't seem to be installed on this system.",["m"]) else: ext.extra_compile_args.append("-DOMIT_FREEXL") if self.with_spatialite_init_ex: self.check_header(ext,"spatialite.h") self.check_lib(ext,"spatialite_init_ex","spatialite","'libspatialite' is required but it doesn't seem to be installed on this system.",["m"]) ext.extra_compile_args.append("-DSPATIALITE_HAS_INIT_EX=1") else: ext.extra_compile_args.append("-DSPATIALITE_HAS_INIT_EX=0") build_ext.build_extension(self,ext)
def build_extension(self, ext): import numpy numpy_incl = numpy.get_include() if hasattr(ext, 'include_dirs') and not numpy_incl in ext.include_dirs: ext.include_dirs.append(numpy_incl) try: build_ext.build_extension(self, ext) except (CompileError, DistutilsPlatformError) as ex: if FAIL_ON_ERROR: raise ex else: error_msg = ('Building %s failed (see error message(s) ' 'above) -- pure Python version will be used ' 'instead.') % ext.name sys.stderr.write('*' * len(error_msg) + '\n' + error_msg + '\n' + '*' * len(error_msg) + '\n')
def build_extension(self, ext): self.gevent_prepare(ext) try: return build_ext.build_extension(self, ext) except ext_errors: if getattr(ext, 'optional', False): raise BuildFailed() raise
def build_tf_extension(build_ext, options): check_tf_version() tf_compile_flags, tf_link_flags = get_tf_flags(build_ext, options['COMPILE_FLAGS']) tensorflow_mpi_lib.define_macros = options['MACROS'] tensorflow_mpi_lib.include_dirs = options['INCLUDES'] tensorflow_mpi_lib.sources = options['SOURCES'] + ['horovod/tensorflow/mpi_ops.cc'] tensorflow_mpi_lib.extra_compile_args = options['COMPILE_FLAGS'] + tf_compile_flags tensorflow_mpi_lib.extra_link_args = options['LINK_FLAGS'] + tf_link_flags tensorflow_mpi_lib.library_dirs = options['LIBRARY_DIRS'] tensorflow_mpi_lib.libraries = options['LIBRARIES'] build_ext.build_extension(tensorflow_mpi_lib) # Return ABI flags used for TensorFlow compilation. We will use this flag # to compile all the libraries. return [flag for flag in tf_compile_flags if '_GLIBCXX_USE_CXX11_ABI' in flag]
def build_extension(self, ext): dirs = ext.include_dirs + self.compiler.include_dirs # Look for the Eigen headers and make sure that we can find them. eigen_include = find_eigen(hint=dirs) if eigen_include is None: raise RuntimeError("Required library Eigen 3 not found. " "Check the documentation for solutions.") # Look for the HODLR headers and make sure that we can find them. hodlr_include = find_hodlr(hint=dirs) if hodlr_include is None: raise RuntimeError("Required library HODLR not found. " "Check the documentation for solutions.") # Update the extension's include directories. ext.include_dirs += [eigen_include, hodlr_include] # Run the standard build procedure. _build_ext.build_extension(self, ext)
def build_extension(self, ext): """Remove the -Wstrict-prototypes option from the compiler command. This option isn't supported for C++, so we remove it to avoid annoying warnings. """ try: self.compiler.compiler_so.remove('-Wstrict-prototypes') except (AttributeError, ValueError): pass return du_build_ext.build_extension(self, ext)
def build_extension(self, ext): for i in range(len(ext.sources)): source = ext.sources[i] if os.path.splitext(source)[1] == ".pyx": result = compile_pyx( source, cplus=True, generate_pxi=True, full_module_name=ext.name) if result.num_errors == 0: ext.sources[i] = result.c_file return _build_ext.build_extension(self, ext)
def build_extension(self, ext): """Concretely builds the extension given as input""" def linker_can_remove_symbols(linker): """Tests if the `ld` linker can remove unused symbols from linked libraries. In this case, use the --no-as-needed flag during link""" import tempfile f, name = tempfile.mkstemp() del f cmd = linker + ["-Wl,--no-as-needed", "-lm", "-o", name] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output = proc.communicate()[0] if not isinstance(output, str) and isinstance(output, bytes): output = output.decode("utf8") if os.path.exists(name): os.unlink(name) return True if proc.returncode == 0 else False def ld_ok(opt): """Tells if a certain option is a go for the linker""" if opt.find("-L") == 0: return False return True # Some clean-up on the linker which is screwed up... self.compiler.linker_so = [k for k in self.compiler.linker_so if ld_ok(k)] if self.linker_is_smart is None: self.linker_is_smart = linker_can_remove_symbols(self.compiler.linker_so) if self.linker_is_smart: self.compiler.linker_so += ["-Wl,--no-as-needed"] if hasattr(self.compiler, "dll_libraries") and self.compiler.dll_libraries is None: self.compiler.dll_libraries = [] build_ext_base.build_extension(self, ext)
def build_extension(self, ext): # Find a termcap library if os.environ.get('RL_TERMCAP'): termcap = os.environ.get('RL_TERMCAP') else: termcap = self.find_termcap(ext) if termcap: ext.libraries.append(termcap) else: log.warn('WARNING: Failed to find a termcap library') # Prepare the source tree self.configure_static_readline() return build_ext.build_extension(self, ext)
def build_extension(self, ext): ccflags = ext.extra_compile_args linkflags = ext.extra_link_args if self.debug: for flag in ["-DNDEBUG"]: if flag in ccflags: ccflags.remove(flag) ccflags.extend([ "-O0", "-g", "-DHEAVY_DEBUG", ]) linkflags.extend([ "-O0", ]) else: linkflags.append("-O3") ccflags.append("-O3") return build_ext.build_extension(self, ext)
def build_extension(self, ext): self.init_info_and_rpaths(ext) self.prep_build(ext) self.add_inc_and_lib_bundled(ext, self.get_lcb_api_flags()) build_ext.build_extension(self, ext)
def build_extension(self, ext): if ext.language == "java": return self.build_java_ext(ext) print("Call build ext") return build_ext.build_extension(self, ext)
def build_extension(self, ext): try: build_ext.build_extension(self, ext) except Exception as e: self._unavailable(e) self.extensions = [] # avoid copying missing files (it would fail).
def build_python(self, ext): # extdir = pathlib.Path(self.get_ext_fullpath(ext.name)) build_temp = pathlib.Path(self.build_temp) # ext.library_dirs.append(str(extdir.parent.absolute())) ext.library_dirs.append(str(build_temp.absolute())) build_ext.build_extension(self, ext)
def build_extension(self, ext): try: build_ext.build_extension(self, ext) except BaseException as e: raise CompilingFailed(e)
def build_extension(self, ext): # We override this instead of setting extra_compile_args directly on # the Extension() instantiations below because we want to use the same # logic to resolve the location of gdal-config throughout. ext.extra_compile_args.extend(self.extra_cflags) return build_ext.build_extension(self, ext)
def build_extension(self, ext): ext.include_dirs.append(numpy_include_dir()) if isinstance(ext, StaticLibrary): self.build_static_extension(ext) else: _build_ext.build_extension(self, ext)
def build_extension(self, ext): try: build_ext.build_extension(self, ext) except ext_errors as e: print(e) raise BuildFailed()
def build_extension(self, ext): try: build_ext.build_extension(self, ext) except (CCompilerError, DistutilsExecError, DistutilsPlatformError, ValueError): pass
def build_extension(self, ext): if sys.platform == "linux": build_ext.build_extension(self, ext)
def build_extension(self, ext): try: build_ext.build_extension(self, ext) except (CCompilerError, DistutilsExecError) as e: self._unavailable(e)
def build_extension(self, ext): subprocess.check_call(['cargo', 'build'] + (['--release'] if MODE == 'release' else [])) setuptools_build_ext.build_extension(self, ext)
def build_extension(self, ext): ext.extra_compile_args.extend(self.extra_cflags) return build_ext.build_extension(self, ext)
def build_extension(self, ext): # Load includes from module directories first! include_dirs = ['/usr/local/include', '/usr/include'] include_dirs.extend(self.strip_includes(self.compiler.compiler)) include_dirs.extend(self.strip_includes(self.compiler.compiler_so)) include_dirs.extend(self.strip_includes(self.compiler.compiler_cxx)) include_dirs.extend(self.strip_includes(self.compiler.linker_so)) include_dirs.extend(self.strip_includes(self.compiler.linker_exe)) include_dirs.extend(self.strip_includes(self.compiler.preprocessor)) self.compiler.include_dirs.extend(self.uniq(include_dirs)) if self.with_proj4: self.check_header(ext, "proj_api.h") self.check_lib( ext, "pj_init_plus", "proj", "'libproj' is required but it doesn't seem to be installed on this system.", ["m"]) else: ext.extra_compile_args.append("-DOMIT_PROJ") if self.with_geos: self.check_header(ext, "geos_c.h") self.check_lib( ext, "GEOSTopologyPreserveSimplify", "geos_c", "'libgeos_c' is required but it doesn't seem to be installed on this system.", ["m", "geos"]) if self.with_geosadvanced: ext.extra_compile_args.append("-DGEOS_ADVANCED") self.check_lib( ext, "GEOSCoveredBy", "geos_c", "obsolete 'libgeos_c' (< v.3.3.0). please retry specifying: --without-geosadvanced.", ["m", "geos"]) else: ext.extra_compile_args.append("-DOMIT_GEOS") if self.with_iconv: if sys.platform.startswith( "darwin") or not self.compiler.has_function("iconv"): ext.libraries.append("iconv") else: ext.extra_compile_args.append("-DOMIT_ICONV") if self.with_freexl: self.check_header(ext, "freexl.h") self.check_lib( ext, "freexl_open", "freexl", "'libfreexl' is required but it doesn't seem to be installed on this system.", ["m"]) else: ext.extra_compile_args.append("-DOMIT_FREEXL") if self.with_spatialite_init_ex: self.check_header(ext, "spatialite.h") self.check_lib( ext, "spatialite_init_ex", "spatialite", "'libspatialite' is required but it doesn't seem to be installed on this system.", ["m"]) ext.extra_compile_args.append("-DSPATIALITE_HAS_INIT_EX=1") else: ext.extra_compile_args.append("-DSPATIALITE_HAS_INIT_EX=0") build_ext.build_extension(self, ext)
def build_extension(self, ext): try: build_ext.build_extension(self, ext) except (CCompilerError, DistutilsExecError, DistutilsPlatformError, IOError, ValueError): raise BuildFailed()
def build_extension(self, ext): try: distutils_build_ext.build_extension(self, ext) except ext_errors, x: raise BuildFailed(x)
def build_extension(self, ext): try: build_ext.build_extension(self, ext) except ext_errors as x: raise BuildFailed(x)
def build_extension(self, ext): if ext.name == "MeCab._MeCab": maybe_build_libmecab_and_adjust_flags(ext) _build_ext.build_extension(self, ext) if ext.name == "MeCab._MeCab": discard_swig_wrappers(ext)
def build_extension(self, ext): try: build_ext.build_extension(self, ext) except (CCompilerError, DistutilsExecError, OSError) as e: self._unavailable(e)
def build_extension(self, ext): try: build_ext.build_extension(self, ext) except ext_errors: raise BuildFailed()
def build_extension(self, ext): if isinstance(ext, StaticLibrary): self.build_static_extension(ext) else: _build_ext.build_extension(self, ext)
def build_extension(self, ext): if self.use_cpp_0x: ext.extra_compile_args += ['-std=c++0x'] ext.define_macros += [('PYTANGO_HAS_UNIQUE_PTR', '1')] dftbuild_ext.build_extension(self, ext)
def build_extension(self, ext): try: base_build_ext.build_extension(self, ext) except Exception: exception = sys.exc_info()[0] logging.warn("building the %s failed with %s", ext.name, exception)