def run(self): try: _build_ext.run(self) except CompileError: warn('Failed to build extension modules') import traceback print(traceback.format_exc(), file=sys.stderr)
def run(self): if not self.dry_run: import numpy import bohrium_api for ext in self.extensions: ext.include_dirs.extend([numpy.get_include(), bohrium_api.get_include()]) setup_build_ext.run(self)
def run(self): inst_gdal_version = self.get_gdal_config('version') if inst_gdal_version != GDAL_VERSION: raise GDALConfigError('Version mismatch %s != %s' % ( inst_gdal_version, GDAL_VERSION)) build_ext.run(self)
def run(self): """Run extension builder.""" if "%x" % sys.maxsize != '7fffffffffffffff': raise DistutilsPlatformError("%s require 64-bit operating system" % SETUP_METADATA["packages"]) if "z" not in self.libraries: zcmd = ['bash', '-c', 'cd ' + ZLIBDIR + ' && ( test Makefile -nt' ' configure || bash ./configure --static ) && make -f ' 'Makefile.pic PIC'] spawn(cmd=zcmd, dry_run=self.dry_run) self.extensions[0].extra_objects.extend( path_join("third-party", "zlib", bn + ".lo") for bn in [ "adler32", "compress", "crc32", "deflate", "gzclose", "gzlib", "gzread", "gzwrite", "infback", "inffast", "inflate", "inftrees", "trees", "uncompr", "zutil"]) if "bz2" not in self.libraries: bz2cmd = ['bash', '-c', 'cd ' + BZIP2DIR + ' && make -f ' 'Makefile-libbz2_so all'] spawn(cmd=bz2cmd, dry_run=self.dry_run) self.extensions[0].extra_objects.extend( path_join("third-party", "bzip2", bn + ".o") for bn in [ "blocksort", "huffman", "crctable", "randtable", "compress", "decompress", "bzlib"]) _build_ext.run(self)
def run(self): """Overridden method. Runs the build. Library directories and include directories are checked here, first. """ # Check we can find the OSKAR library. d = self.dir_contains('oskar.', self.library_dirs) if not d: raise RuntimeError( "Could not find OSKAR library. " "Check that OSKAR has already been installed on this system, " "and set the library path to build_ext " "using -L or --library-dirs") self.rpath.append(d) self.libraries.append('oskar') # Check we can find the OSKAR headers. h = self.find_file(join('oskar', 'oskar_version.h'), self.include_dirs) if not h: raise RuntimeError( "Could not find oskar/oskar_version.h. " "Check that OSKAR has already been installed on this system, " "and set the include path to build_ext " "using -I or --include-dirs") self.include_dirs.insert(0, dirname(h)) self.include_dirs.insert(0, get_include()) # Check the version of OSKAR is compatible. version = self.get_oskar_version(h) if not version.startswith(oskar_compatibility_version): raise RuntimeError( "The version of OSKAR found is not compatible with oskarpy. " "Found OSKAR %s, but require OSKAR %s." % ( version, oskar_compatibility_version) ) build_ext.run(self)
def run(self): # Run others commands self.run_command("scons") self.run_command("cmake") # Add lib_dirs and include_dirs in packages # Copy the directories containing the files generated # by scons and the like. if is_conda_build(): print('Building directly with conda. Skip the bin, include and lib dirs.') return old_build_ext.run(self) for d in (self.distribution.lib_dirs, self.distribution.inc_dirs, self.distribution.bin_dirs, # self.distribution.share_dirs, ): if (not d or self.inplace == 1): continue if (not os.path.exists(self.build_lib)): self.mkpath(self.build_lib) for (name, dir) in d.items(): copy_data_tree(dir, pj(self.build_lib, name)) return old_build_ext.run(self)
def run(self): # Import numpy here, only when headers are needed import numpy # Add numpy headers to include_dirs self.include_dirs.append(numpy.get_include()) # Call original build_ext command build_ext.run(self)
def run(self): # Bail out if we don't have the Python include files include_dir = get_python_inc() if not os.path.isfile(os.path.join(include_dir, "Python.h")): print("You will need the Python headers to compile this extension.") sys.exit(1) # Print a warning if pkg-config is not available or does not know about igraph if buildcfg.use_pkgconfig: detected = buildcfg.detect_from_pkgconfig() else: detected = False # Check whether we have already compiled igraph in a previous run. # If so, it should be found in igraphcore/include and # igraphcore/lib if os.path.exists("igraphcore"): buildcfg.use_built_igraph() detected = True # Download and compile igraph if the user did not disable it and # we do not know the libraries from pkg-config yet if not detected: if buildcfg.download_igraph_if_needed and is_unix_like(): detected = buildcfg.download_and_compile_igraph() if detected: buildcfg.use_built_igraph() else: sys.exit(1) # Fall back to an educated guess if everything else failed if not detected: buildcfg.use_educated_guess() # Replaces library names with full paths to static libraries # where possible. libm.a is excluded because it caused problems # on Sabayon Linux where libm.a is probably not compiled with # -fPIC if buildcfg.static_extension: buildcfg.replace_static_libraries(exclusions=["m"]) # Prints basic build information buildcfg.print_build_info() ext = first(extension for extension in self.extensions if extension.name == "igraph._igraph") buildcfg.configure(ext) # Run any pre-build hooks for hook in buildcfg.pre_build_hooks: hook(self) # Run the original build_ext command build_ext.run(self) # Run any post-build hooks for hook in buildcfg.post_build_hooks: hook(self)
def run(self): try: self.generate_c_file() except DistutilsPlatformError: if os.path.exists("polycomp/speedups.c"): print("Found existing C file, ignoring errors.") else: raise build_ext.run(self)
def run(self): try: build_ext.run(self) except Exception as e: warnings.warn(''' Unable to build speedups module, defaulting to pure Python. Note that the pure Python version is more than fast enough in most cases %r ''' % e)
def run(self): return_code = subprocess.call(['./build_libpg_query.sh']) if return_code: sys.stderr.write(''' An error occurred during extension building. Make sure you have bison and flex installed on your system. ''') sys.exit(return_code) build_ext.run(self)
def run(self): build_ext.run(self) cmd = [sys.executable, os.path.join(here_dir, 'build.py')] spawn(cmd, dry_run=self.dry_run) # HACK: this makes sure the library file (which is large) is only # included in binary builds, not source builds. self.distribution.package_data = { "libqemu.binding": ["*.dll", "*.so", "*.dylib"] }
def run(self): try: build_ext.run(self) except Exception: e = sys.exc_info()[1] sys.stdout.write('%s\n' % str(e)) warnings.warn(self.warning_message + "Extension modules" + "There was an issue with your platform configuration - see above.")
def run(self): self._run_config_if_needed() self._check_prefix_modified() self._configure_compiler() log.info('include dirs: %r', self.include_dirs) log.info('library dirs: %r', self.library_dirs) _build_ext.run(self)
def run(self): build_ext.run(self) cmd = [sys.executable, os.path.join(here_dir, 'ffi', 'build.py')] spawn(cmd, dry_run=self.dry_run) # HACK: this makes sure the library file (which is large) is only # included in binary builds, not source builds. self.distribution.package_data = { "llvmlite.binding": get_library_files(), }
def run(self): BuildExtCommand.run(self) # If we are not a light build we want to also execute build_js as # part of our build_ext pipeline. Because setuptools subclasses # this thing really oddly we cannot use sub_commands but have to # manually invoke it here. if not IS_LIGHT_BUILD: self.run_command('build_js')
def run(self): # First, we build the standard extensions. _build_ext.run(self) # Then, we build the driver if required. if not self.skip_driver: self.real_build_lib = os.path.realpath(self.build_lib) if platform.system().lower() == "linux": self._build_linux_driver() elif platform.system().lower() == "darwin": self._build_darwin_driver()
def run(self): self.distribution.fetch_build_eggs(numpy_requirement) numpy_incl = pkg_resources.resource_filename('numpy', 'core/include') for ext in self.extensions: if (hasattr(ext, 'include_dirs') and numpy_incl not in ext.include_dirs): ext.include_dirs.append(numpy_incl) build_ext.run(self)
def run(self): self.generate_protoc() try: self.generate_c_file() except DistutilsPlatformError: if os.path.exists('imposm/cache/tc.c'): print 'Found existing C file. Ignoring previous error.' else: raise build_ext.run(self)
def run(self): try: subprocess.check_call("cd pachi_py; mkdir -p build && cd build && cmake ../pachi && make -j4", shell=True) except subprocess.CalledProcessError as e: print("Could not build pachi-py: %s" % e) raise # Prevent numpy from trying to setup __builtins__.__NUMPY_SETUP__ = False import numpy self.include_dirs.append(numpy.get_include()) _build_ext.run(self)
def run(self): if HAS_CYTHON: print('*** NOTE: Found Cython, extension files will be ' 'transpiled if this is an install invocation.', file=sys.stderr) else: print('*** WARNING: Cython not found, assuming cythonized ' 'C files available for compilation.', file=sys.stderr) _build_ext.run(self)
def run(self): # FIXME - both of these should be in the build directory. configpath = os.path.join('src', 'config.h') paramspath = os.path.join('configutils', 'params.cache') if not os.path.exists(configpath) or not os.path.exists(paramspath): self.run_command('config') params = read_params() self.use_param(params, 'include_dirs') self.use_param(params, 'library_dirs') self.use_param(params, 'libraries') du_build_ext.run(self)
def run(self): import pkg_resources # At this point we can be sure pip has already installed numpy numpy_incl = pkg_resources.resource_filename('numpy', 'core/include') for ext in self.extensions: if (hasattr(ext, 'include_dirs') and numpy_incl not in ext.include_dirs): ext.include_dirs.append(numpy_incl) _build_ext.run(self)
def run(self): # According to # https://pip.pypa.io/en/stable/reference/pip_install.html#installation-order # at this point we can be sure pip has already installed numpy numpy_incl = pkg_resources.resource_filename('numpy', 'core/include') for ext in self.extensions: if (hasattr(ext, 'include_dirs') and numpy_incl not in ext.include_dirs): ext.include_dirs.append(numpy_incl) build_ext.run(self)
def run(self): # For extensions that require 'numpy' in their include dirs, # replace 'numpy' with the actual paths import numpy np_include = numpy.get_include() for extension in self.extensions: if 'numpy' in extension.include_dirs: idx = extension.include_dirs.index('numpy') extension.include_dirs.insert(idx, np_include) extension.include_dirs.remove('numpy') build_ext.run(self)
def run(self): """Run the command""" if not self.with_mysql_capi: return if os.name == 'nt': build_ext.run(self) else: self.real_build_extensions = self.build_extensions self.build_extensions = lambda: None build_ext.run(self) self.fix_compiler() self.real_build_extensions()
def run(self): # If we were asked to build any C/C++ libraries, add the directory # where we built them to the include path. (It's already on the library # path.) if self.distribution.has_c_libraries(): self.run_command('build_clib') build_clib = self.get_finalized_command('build_clib') for key, value in build_clib.build_args.items(): for ext in self.extensions: if not hasattr(ext, key) or getattr(ext, key) is None: setattr(ext, key, value) else: getattr(ext, key).extend(value) build_ext.run(self)
def run(self): if "z" and "bz2" not in self.libraries: zcmd = ['bash', '-c', 'cd ' + ZLIBDIR + ' && ( test Makefile -nt' ' configure || bash ./configure --static ) && make -f ' 'Makefile.pic PIC'] spawn(cmd=zcmd, dry_run=self.dry_run) bz2cmd = ['bash', '-c', 'cd ' + BZIP2DIR + ' && make -f ' 'Makefile-libbz2_so all'] spawn(cmd=bz2cmd, dry_run=self.dry_run) else: for ext in self.extensions: ext.extra_objects = [] _build_ext.run(self)
def run(self): # We write a header file containing everything we have discovered by # inspecting the libraries which exist. This is the main mechanism we # use to detect differenced between FFmpeg anf Libav. include_dir = os.path.join(self.build_temp, "include") pyav_dir = os.path.join(include_dir, "pyav") try: os.makedirs(pyav_dir) except OSError as e: if e.errno != errno.EEXIST: raise header_path = os.path.join(pyav_dir, "config.h") print("writing", header_path) with open(header_path, "w") as fh: fh.write("#ifndef PYAV_COMPAT_H\n") fh.write("#define PYAV_COMPAT_H\n") for k, v in config_macros: fh.write("#define %s %s\n" % (k, v)) fh.write("#endif\n") self.include_dirs = self.include_dirs or [] self.include_dirs.append(include_dir) self.run_command("cythonize") return build_ext.run(self)
def run(self): """Iterates through the list of Extension packages and reorders them, so that the Library's come first """ # here, we simply re-order the extensions such that we get the Library first self.extensions = [ext for ext in self.extensions if isinstance(ext, Library)] + [ext for ext in self.extensions if not isinstance(ext, Library)] # call the base class function return _build_ext.run(self)
def run(self): def compile_hts(): subprocess.check_call(['make', 'libhts.a']) self.execute(compile_hts, [], 'Compiling htslib using Makefile') build_ext.run(self)
def run(self): """Overridden method. Runs the build. Library directories and include directories are checked here, first. """ # Check we can find the OSKAR library. # For some reason, run() is sometimes called again after the build # has already happened. # Make sure not to fail the check the second time. if not self._checked_lib: self._checked_lib = True if os.getenv('OSKAR_LIB_DIR'): self.library_dirs.append(os.getenv('OSKAR_LIB_DIR')) if platform.system() == 'Windows': self.library_dirs.append('C:\\Program Files\\OSKAR\\lib') for i, test_dir in enumerate(self.library_dirs): self.library_dirs[i] = test_dir.strip('\"') directory = self.dir_contains('oskar.', self.library_dirs) if not directory: raise RuntimeError( "Could not find OSKAR library. " "Check that OSKAR has already been installed on " "this system, and either set the environment variable " "OSKAR_LIB_DIR, or set the library path to build_ext " "using -L or --library-dirs") if platform.system() != 'Windows': self.rpath.append(directory) self.libraries.append('oskar') self.libraries.append('oskar_apps') self.libraries.append('oskar_binary') self.libraries.append('oskar_settings') if self.dir_contains('oskar_ms.', self.library_dirs): self.libraries.append('oskar_ms') # Check we can find the OSKAR headers. if not self._checked_inc: from numpy import get_include self._checked_inc = True if os.getenv('OSKAR_INC_DIR'): self.include_dirs.append(os.getenv('OSKAR_INC_DIR')) if platform.system() == 'Windows': self.include_dirs.append('C:\\Program Files\\OSKAR\\include') header = self.find_file( os.path.join('oskar', 'oskar_version.h'), self.include_dirs) if not header: raise RuntimeError( "Could not find oskar/oskar_version.h. " "Check that OSKAR has already been installed on " "this system, and either set the environment variable " "OSKAR_INC_DIR, or set the include path to build_ext " "using -I or --include-dirs") self.include_dirs.insert(0, os.path.dirname(header)) self.include_dirs.insert(0, get_include()) for i, test_dir in enumerate(self.include_dirs): self.include_dirs[i] = test_dir.strip('\"') # Check the version of OSKAR is compatible. version = self.get_oskar_version(header) if not version.startswith(OSKAR_COMPATIBILITY_VERSION): raise RuntimeError( "The version of OSKAR found is not compatible with " "oskarpy. Found OSKAR %s, but require OSKAR %s." % ( version, OSKAR_COMPATIBILITY_VERSION) ) build_ext.run(self)
def run(self): """Wrap `run` with `BuildFailed`.""" try: build_ext.run(self) except errors.DistutilsPlatformError: raise BuildFailed()
def run(self): try: build_ext.run(self) except Exception as e: self._unavailable(e) self.extensions = [] # avoid copying missing files (it would fail).
def run(self): pkg_path = os.path.join(module_name, get_platform()) install_vart_pkg(pkg_path, platform.processor()) build_ext.run(self) overlay_path = os.path.join(self.build_lib, module_name, 'overlays') resolve_overlay_d(overlay_path)
def run(self): if not check_dependencies_once(): return build_ext.run(self)
def run(self): self.make_libceed_so() build_ext.run(self)
def run(self): self.cmake_build() _build_ext.run(self)
def run(self): _build_libgraphqlparser() build_ext.run(self)
def run(self): try: build_ext.run(self) except DistutilsPlatformError: raise BuildFailed()
def run(self): self.run_command('DownloadTemplates') _build_ext.run(self)
def run(self): import numpy self.include_dirs.append(numpy.get_include()) build_ext.run(self)
def run(self): try: build_ext.run(self) except DistutilsPlatformError as e: self._unavailable(e)
def run(self): os.system('make -C postqe/fortran all') build_ext.run(self)
def run(self): if os.name != 'posix': # Windows case pass else: setup_context_cc() build_ext.run(self)
def run(self): for package in good_packages: package.do_custom_build() return BuildExtCommand.run(self)
def run(self): build_ext.run(self) if sys.platform == "win32": self._copy_windows_dlls()
def run(self): if self.portage_ext_modules: _build_ext.run(self)
def run(self): p = Popen(['make'] + LIB_OBJECTS, cwd=DIR) p.wait() if p.returncode != 0: raise Exception('Could not build %s' % (', '.join(LIB_OBJECTS))) BuildExt.run(self)
def run(self): try: _build_ext.run(self) except (CCompilerError, DistutilsError, CompileError): self.warn('\n\n*** Building the extension failed. ***')
def run(self): # Bail out if we don't have the Python include files include_dir = get_python_inc() if not os.path.isfile(os.path.join(include_dir, "Python.h")): print("You will need the Python headers to compile this extension.") sys.exit(1) # Check whether we have already compiled igraph in a previous run. # If so, it should be found in vendor/build/igraph/include and # vendor/build/igraph/lib if os.path.exists(os.path.join("vendor", "build", "igraph")): buildcfg.use_built_igraph() detected = True else: detected = False # If igraph is provided as a git submodule, use that if not detected: if os.path.isfile( os.path.join("vendor", "source", "igraph", "configure.ac") ): detected = buildcfg.compile_igraph_from_vendor_source() if detected: buildcfg.use_built_igraph() else: sys.exit(1) # Try detecting with pkg-config if we haven't found the submodule if not detected: if buildcfg.use_pkgconfig: detected = buildcfg.detect_from_pkgconfig() else: detected = False # Download and compile igraph if the user did not disable it and # we do not know the libraries from pkg-config yet if not detected: if buildcfg.download_igraph_if_needed and is_unix_like(): detected = buildcfg.download_and_compile_igraph() if detected: buildcfg.use_built_igraph() else: sys.exit(1) # Fall back to an educated guess if everything else failed if not detected: buildcfg.use_educated_guess() # Replaces library names with full paths to static libraries # where possible. libm.a is excluded because it caused problems # on Sabayon Linux where libm.a is probably not compiled with # -fPIC if buildcfg.static_extension: buildcfg.replace_static_libraries(exclusions=["m"]) # Prints basic build information buildcfg.print_build_info() ext = first( extension for extension in self.extensions if extension.name == "igraph._igraph" ) buildcfg.configure(ext) # Run any pre-build hooks for hook in buildcfg.pre_build_hooks: hook(self) # Run the original build_ext command build_ext.run(self) # Run any post-build hooks for hook in buildcfg.post_build_hooks: hook(self)
def run(self): self._add_build_settings() self._convert_abspath_libraries() _build_ext.run(self)
def run(self): try: super(build_ext, self).run() except TypeError: # distutils parent class an old style Python class _build_ext.run(self)
def run(self): # Compile dffi with cmake using the LLVM provided by LLVM_CONFIG, # creating a full static library with LLVM inside! # We then patch the extensions to link with this static library. # Inspired by https://stackoverflow.com/questions/42585210/extending-setuptools-extension-to-use-cmake-in-setup-py# # TODO: this is the less painfull and integrated setup I managed to do, if # someone as a better idea, please let me know :) LLVM_CONFIG = os.getenv("LLVM_CONFIG") if LLVM_CONFIG is None: raise RuntimeError( "The LLVM_CONFIG environment variable must be set to a valid path to an llvm-config binary!" ) cwd = os.path.abspath(os.getcwd()) source_dir = os.path.join(this_dir, "../..") build_temp = os.path.abspath(self.build_temp) try: os.makedirs(build_temp) except OSError as e: if e.errno != errno.EEXIST or not os.path.isdir(build_temp): raise os.chdir(build_temp) # Use Ninja if it is available try: subprocess.check_call(['ninja', '--version']) use_ninja = True except: use_ninja = False cmake_args = [ '-DLLVM_CONFIG=%s' % LLVM_CONFIG, "-DCMAKE_BUILD_TYPE=release", "-DDFFI_STATIC_LLVM=ON", "-DPYTHON_BINDINGS=OFF", "-DBUILD_TESTS=OFF", source_dir ] if use_ninja: cmake_args.extend(("-G", "Ninja")) if platform.system() == "Darwin": # Compile for both 32 and 64 bits cmake_args.append("-DCMAKE_OSX_ARCHITECTURES='x86_64;i386'") subprocess.check_call(['cmake'] + cmake_args) subprocess.check_call(['cmake', '--build', '.']) # Get static library path from cmake # TODO: get encoding from the current environment? vars_ = subprocess.check_output(['cmake', '-L', '-N', '.']).decode("utf8") for v in vars_.split("\n"): v = v.strip() if v.startswith("DFFI_STATIC_LLVM_PATH:"): DFFI_STATIC_LLVM_PATH = v[v.index('=') + 1:] break else: raise RuntimeError( "unable to get DFFI_STATIC_LLVM_PATH from cmake! This is an internal error, please submit a bug report!" ) for ext in self.extensions: ext.include_dirs.append(os.path.join(build_temp, "include")) ext.extra_link_args.append(DFFI_STATIC_LLVM_PATH) os.chdir(cwd) build_ext.run(self)
def run(self): build_ext.run(self) cphoc_lib = glob('build/lib.*/cphoc.*.so')[0] shutil.copy(cphoc_lib, 'pythia/utils/phoc/cphoc.so')
def run(self): try: build_ext.run(self) except DistutilsPlatformError as e: self._unavailable(e) self.extensions = [] # avoid copying missing files (it would fail).
def run(self): build_ext.run(self)
def run(self): try: _build_ext.run(self) except CompileError: warn('Failed to build extension modules')
def run(self): self._patch_headers() _build_ext.run(self)
def run(self): # generate the git commit include file get_commit() _build_ext.run(self)