def build_extension(self, ext): try: import pybind11 except ImportError: import pip pip.main(['install', 'pybind11>=2.1.1']) import pybind11 extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name))) coverage_compiler_flag = '-DCOVERAGE=False' if 'YDK_COVERAGE' in os.environ: coverage_compiler_flag = '-DCOVERAGE=True' cmake_args = ['-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={0}'.format(extdir), '-DPYBIND11_INCLUDE={0};{1}'.format( pybind11.get_include(), pybind11.get_include(user=True)), '-DPYTHON_VERSION={0}'.format( get_python_version()), '-DCMAKE_BUILD_TYPE=Release', coverage_compiler_flag] if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) cmake3_installed = (0 == subprocess.call(['which', 'cmake3'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)) if(cmake3_installed): cmake_executable = 'cmake3' else: cmake_executable = 'cmake' subprocess.check_call([cmake_executable, ext.sourcedir] + cmake_args, cwd=self.build_temp) subprocess.check_call([cmake_executable, '--build', '.'], cwd=self.build_temp)
def build_extensions(self): ct = self.compiler.compiler_type opts = self.c_opts.get(ct, []) if ct == 'unix': opts.append('-DVERSION_INFO="%s"' % self.distribution.get_version()) opts.append(cpp_flag(self.compiler)) if has_flag(self.compiler, '-fvisibility=hidden'): opts.append('-fvisibility=hidden') elif ct == 'msvc': opts.append('/DVERSION_INFO=\\"%s\\"' % self.distribution.get_version()) # extend include dirs here (don't assume numpy/pybind11 are installed when first run, since # pip could have installed them as part of executing this script import pybind11 import numpy as np for ext in self.extensions: ext.extra_compile_args.extend(opts) ext.extra_link_args.extend(self.link_opts.get(ct, [])) ext.include_dirs.extend([ # Path to pybind11 headers pybind11.get_include(), pybind11.get_include(True), # Path to numpy headers np.get_include() ]) build_ext.build_extensions(self)
def build_extensions(self): ct = self.compiler.compiler_type opts = self.c_opts.get(ct, []) import pybind11 for ext in self.extensions: ext.extra_compile_args = opts ext.include_dirs.append(pybind11.get_include()) ext.include_dirs.append(pybind11.get_include(user=True)) build_ext.build_extensions(self)
def finalize_options(self): super().finalize_options() import numpy import pybind11 self.include_dirs.extend([ numpy.get_include(), pybind11.get_include(user=True), pybind11.get_include(), ])
def add_dirs(builder, output=False): # We need to do most of this both for build_clib and build_ext, so separate it out here. # First some basic ones we always need. builder.include_dirs.append('include') builder.include_dirs.append('include/galsim') # Look for fftw3. fftw_lib = find_fftw_lib(output=output) fftw_libpath, fftw_libname = os.path.split(fftw_lib) if hasattr(builder, 'library_dirs'): if fftw_libpath != '': builder.library_dirs.append(fftw_libpath) builder.libraries.append('galsim') # Make sure galsim comes before fftw3 builder.libraries.append(os.path.split(fftw_lib)[1].split('.')[0][3:]) fftw_include = os.path.join(os.path.split(fftw_libpath)[0], 'include') if os.path.isfile(os.path.join(fftw_include, 'fftw3.h')): print('Include directory for fftw3 is ',fftw_include) # Usually, the fftw3.h file is in an associated include dir, but not always. builder.include_dirs.append(fftw_include) else: # If not, we have our own copy of fftw3.h here. print('Using local copy of fftw3.h') builder.include_dirs.append('include/fftw3') # Look for Eigen/Core eigen_dir = find_eigen_dir(output=output) builder.include_dirs.append(eigen_dir) # Finally, add pybind11's include dir import pybind11 print('PyBind11 is version ',pybind11.__version__) print('Looking for pybind11 header files: ') locations = [pybind11.get_include(user=True), pybind11.get_include(user=False), '/usr/include', '/usr/local/include', None] for try_dir in locations: if try_dir is None: # Last time through, raise an error. print("Could not find pybind11 header files.") print("They should have been in one of the following locations:") for l in locations: if l is not None: print(" ", l) raise OSError("Could not find PyBind11") print(' ',try_dir,end='') if os.path.isfile(os.path.join(try_dir, 'pybind11/pybind11.h')): print(' (yes)') builder.include_dirs.append(try_dir) break else: print(' (no)')
def build_c_extension(module_name): print(module_name) ext_modules = [ distutils.core.Extension( module_name, [same_dir('cii_atom.c'), same_dir('cii_atom_pybind11.cpp')], include_dirs=[pybind11.get_include(True), pybind11.get_include(False), os.getcwd()], ) ] distutils.core.setup(name=module_name, ext_modules=ext_modules, script_args=['build_ext', '--inplace'])
def build_module(full_module_name, filepath): build_path = tempfile.mkdtemp() cfg_globals = run_config_script(filepath) cfg = form_config(cfg_globals) system_include_dirs = [ pybind11.get_include(), pybind11.get_include(True) ] ext = ImportCppExt( get_ext_dir(filepath), full_module_name, sources = [filepath], include_dirs = system_include_dirs + get_user_include_dirs(filepath), extra_compile_args = cfg['compiler_args'], extra_link_args = cfg['linker_args'] ) args = ['build_ext', '--inplace'] args.append('--build-temp=' + build_path) args.append('--build-lib=' + build_path) if quiet: args.append('-q') else: args.append('-v') setuptools_args = dict( name = full_module_name, ext_modules = [ext], script_args = args, cmdclass = { 'build_ext': BuildImportCppExt } ) if quiet: with stdchannel_redirected("stdout"): with stdchannel_redirected("stderr"): setuptools.setup(**setuptools_args) else: setuptools.setup(**setuptools_args) shutil.rmtree(build_path)
def build_extensions(self): # The include directory for the celerite headers localincl = "vendor" if not os.path.exists(os.path.join(localincl, "eigen_3.3.4", "Eigen", "Core")): raise RuntimeError("couldn't find Eigen headers") # Add the pybind11 include directory import numpy import pybind11 include_dirs = [ os.path.join("george", "include"), os.path.join(localincl, "eigen_3.3.4"), numpy.get_include(), pybind11.get_include(False), pybind11.get_include(True), ] for ext in self.extensions: ext.include_dirs = include_dirs + ext.include_dirs # Building on RTDs takes a bit of special care if os.environ.get("READTHEDOCS", None) == "True": for ext in self.extensions: ext.extra_compile_args = ["-std=c++14", "-O0"] _build_ext.build_extensions(self) return # Compiler flags ct = self.compiler.compiler_type opts = self.c_opts.get(ct, []) if ct == "unix": opts.append("-DVERSION_INFO=\"{0:s}\"" .format(self.distribution.get_version())) print("testing C++14/C++11 support") opts.append(cpp_flag(self.compiler)) flags = ["-stdlib=libc++", "-funroll-loops", "-Wno-unused-function", "-Wno-uninitialized", "-Wno-unused-local-typedefs"] # Mac specific flags and libraries if sys.platform == "darwin": flags += ["-march=native", "-mmacosx-version-min=10.9"] for lib in ["m", "c++"]: for ext in self.extensions: ext.libraries.append(lib) for ext in self.extensions: ext.extra_link_args += ["-mmacosx-version-min=10.9", "-march=native"] else: libraries = ["m", "stdc++", "c++"] for lib in libraries: if not has_library(self.compiler, lib): continue for ext in self.extensions: ext.libraries.append(lib) # Check the flags print("testing compiler flags") for flag in flags: if has_flag(self.compiler, flag): opts.append(flag) elif ct == "msvc": opts.append("/DVERSION_INFO=\\\"{0:s}\\\"" .format(self.distribution.get_version())) for ext in self.extensions: ext.extra_compile_args = opts # Run the standard build procedure. _build_ext.build_extensions(self)
def build_extensions(self): # The include directory for the celerite headers localincl = "vendor" if not os.path.exists( os.path.join(localincl, "eigen_3.3.4", "Eigen", "Core")): raise RuntimeError("couldn't find Eigen headers") # Add the pybind11 include directory import numpy import pybind11 include_dirs = [ os.path.join("george", "include"), os.path.join(localincl, "eigen_3.3.4"), numpy.get_include(), pybind11.get_include(False), pybind11.get_include(True), ] for ext in self.extensions: ext.include_dirs = include_dirs + ext.include_dirs # Compiler flags ct = self.compiler.compiler_type opts = self.c_opts.get(ct, []) if ct == "unix": opts.append("-DVERSION_INFO=\"{0:s}\"".format( self.distribution.get_version())) print("testing C++14/C++11 support") opts.append(cpp_flag(self.compiler)) flags = [ "-stdlib=libc++", "-funroll-loops", "-Wno-unused-function", "-Wno-uninitialized", "-Wno-unused-local-typedefs" ] # Mac specific flags and libraries if sys.platform == "darwin": flags += ["-march=native", "-mmacosx-version-min=10.9"] for lib in ["m", "c++"]: for ext in self.extensions: ext.libraries.append(lib) for ext in self.extensions: ext.extra_link_args += [ "-mmacosx-version-min=10.9", "-march=native" ] else: libraries = ["m", "stdc++", "c++"] for lib in libraries: if not has_library(self.compiler, lib): continue for ext in self.extensions: ext.libraries.append(lib) # Check the flags print("testing compiler flags") for flag in flags: if has_flag(self.compiler, flag): opts.append(flag) elif ct == "msvc": opts.append("/DVERSION_INFO=\\\"{0:s}\\\"".format( self.distribution.get_version())) for ext in self.extensions: ext.extra_compile_args = opts # Run the standard build procedure. _build_ext.build_extensions(self)
csrcs = glob.glob("cpp/src/*.cu") + glob.glob("cpp/src/*.cc") extensions = [ # CMakeExtension(name="cuhnsw"), Extension("cuhnsw.cuhnsw_bind", sources= csrcs + [ \ "cuhnsw/bindings.cc", "3rd/json11/json11.cpp"], language="c++", extra_compile_args=extra_compile_args, extra_link_args=["-fopenmp"], library_dirs=[CUDA['lib64']], libraries=['cudart', 'curand'], extra_objects=[], include_dirs=[ \ "cpp/include/", np.get_include(), pybind11.get_include(), pybind11.get_include(True), CUDA['include'], "3rd/json11", "3rd/spdlog/include"]) ] # Return the git revision as a string def git_version(): def _minimal_ext_cmd(cmd): # construct minimal environment env = {} for k in ['SYSTEMROOT', 'PATH']: val = os.environ.get(k) if val is not None: env[k] = val out = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=env). \ communicate()[0]
def get_pybind11_include_path(self) -> str: return pybind11.get_include()
import os import sys import platform import numpy as np import pybind11 import setuptools from setuptools import Extension, setup from setuptools.command.build_ext import build_ext __version__ = '0.6.1' include_dirs = [ pybind11.get_include(), np.get_include(), ] # compatibility when run in python_bindings bindings_dir = 'python_bindings' if bindings_dir in os.path.basename(os.getcwd()): source_files = ['./bindings.cpp'] include_dirs.extend(['../hnswlib/']) else: source_files = ['./python_bindings/bindings.cpp'] include_dirs.extend(['./hnswlib/']) libraries = [] extra_objects = [] ext_modules = [ Extension(
def get_pybind_include(user=False): import pybind11 return pybind11.get_include(user)
def build_extensions(self): import pybind11 ext, = self.distribution.ext_modules ext.depends += [ "setup.py", *map(str, Path("src").glob("*.h")), *map(str, Path("src").glob("*.cpp")), ] if UNITY_BUILD: ext.sources += ["src/_unity_build.cpp"] else: ext.sources += [*map(str, Path("src").glob("*.cpp"))] ext.sources.remove("src/_unity_build.cpp") ext.include_dirs += [pybind11.get_include()] tmp_include_dir = Path( self.get_finalized_command("build").build_base, "include") tmp_include_dir.mkdir(parents=True, exist_ok=True) ext.include_dirs += [tmp_include_dir] try: get_pkg_config(f"--atleast-version={MIN_RAQM_VERSION}", "raqm") except (FileNotFoundError, CalledProcessError): (tmp_include_dir / "raqm-version.h").write_text("") # Touch it. with urllib.request.urlopen( f"https://raw.githubusercontent.com/HOST-Oman/libraqm/" f"v{MIN_RAQM_VERSION}/src/raqm.h") as request, \ (tmp_include_dir / "raqm.h").open("wb") as file: file.write(request.read()) if sys.platform == "linux": import cairo get_pkg_config(f"--atleast-version={MIN_CAIRO_VERSION}", "cairo") ext.include_dirs += [cairo.get_include()] ext.extra_compile_args += [ "-std=c++1z", "-fvisibility=hidden", "-flto", "-Wall", "-Wextra", "-Wpedantic", *get_pkg_config("--cflags", "cairo"), ] ext.extra_link_args += ["-flto"] if MANYLINUX: ext.extra_link_args += ["-static-libgcc", "-static-libstdc++"] elif sys.platform == "darwin": import cairo get_pkg_config(f"--atleast-version={MIN_CAIRO_VERSION}", "cairo") ext.include_dirs += [cairo.get_include()] # On OSX<10.14, version-min=10.9 avoids deprecation warning wrt. # libstdc++, but assumes that the build uses non-Xcode-provided # LLVM. # On OSX>=10.14, assume that the build uses the normal toolchain. macosx_min_version = ("10.14" if LooseVersion( platform.mac_ver()[0]) >= "10.14" else "10.9") ext.extra_compile_args += [ "-std=c++1z", "-fvisibility=hidden", "-flto", f"-mmacosx-version-min={macosx_min_version}", *get_pkg_config("--cflags", "cairo"), ] ext.extra_link_args += [ # version-min needs to be repeated to avoid a warning. "-flto", f"-mmacosx-version-min={macosx_min_version}", ] elif sys.platform == "win32": # Windows conda path for FreeType. ext.include_dirs += [Path(sys.prefix, "Library/include")] ext.extra_compile_args += [ "/std:c++17", "/Zc:__cplusplus", "/experimental:preprocessor", "/EHsc", "/D_USE_MATH_DEFINES", "/wd4244", "/wd4267", ] # cf. gcc -Wconversion. ext.libraries += ["psapi", "cairo", "freetype"] # Windows conda path for FreeType -- needs to be str, not Path. ext.library_dirs += [str(Path(sys.prefix, "Library/lib"))] # Workaround https://bugs.llvm.org/show_bug.cgi?id=33222 (clang + # libstdc++ + std::variant = compilation error) and pybind11 #1604 # (-fsized-deallocation). Note that `.compiler.compiler` only exists # for UnixCCompiler. if os.name == "posix": compiler_macros = subprocess.check_output( [*self.compiler.compiler, "-dM", "-E", "-x", "c", "/dev/null"], universal_newlines=True) if "__clang__" in compiler_macros: ext.extra_compile_args += ([ "-stdlib=libc++", "-fsized-deallocation" ]) # Explicitly linking to libc++ is required to avoid picking up # the system C++ library (libstdc++ or an outdated libc++). ext.extra_link_args += ["-lc++"] super().build_extensions() if sys.platform == "win32": for dll in ["cairo.dll", "freetype.dll"]: for path in paths_from_link_libpaths(): if (path / dll).exists(): shutil.copy2(path / dll, Path(self.build_lib, "mplcairo")) break
# Copyright (C) 2010-2016 Dzhelil S. Rufat. All Rights Reserved. import numpy import pybind11 from setuptools import setup, Extension include_dirs = [ 'src', numpy.get_include(), pybind11.get_include(True), pybind11.get_include(False), ] depends = [ 'setup.py', ] extra_compile_args = [ '-std=c++11', ] ext_modules = [ Extension( 'licpy.resample', sources=[ 'licpy/resample.cpp', ], depends=depends, include_dirs=include_dirs, extra_compile_args=extra_compile_args,
def setup_dyna_cpp(): '''Basic setup of the LS-DYNA C-Extension Returns ------- srcs : list of str include_dirs : list of str compiler_args : list of str ''' include_dirs = ["qd/cae", np.get_include(), pybind11.get_include()] srcs = [ "qd/cae/dyna_cpp/python_api/pybind_wrapper.cpp", "qd/cae/dyna_cpp/db/FEMFile.cpp", "qd/cae/dyna_cpp/db/DB_Elements.cpp", "qd/cae/dyna_cpp/db/DB_Nodes.cpp", "qd/cae/dyna_cpp/db/DB_Parts.cpp", "qd/cae/dyna_cpp/db/Element.cpp", "qd/cae/dyna_cpp/db/Node.cpp", "qd/cae/dyna_cpp/db/Part.cpp", "qd/cae/dyna_cpp/dyna/d3plot/D3plotBuffer.cpp", "qd/cae/dyna_cpp/dyna/d3plot/D3plot.cpp", "qd/cae/dyna_cpp/dyna/d3plot/RawD3plot.cpp", "qd/cae/dyna_cpp/dyna/keyfile/KeyFile.cpp", "qd/cae/dyna_cpp/dyna/keyfile/Keyword.cpp", "qd/cae/dyna_cpp/dyna/keyfile/NodeKeyword.cpp", "qd/cae/dyna_cpp/dyna/keyfile/ElementKeyword.cpp", "qd/cae/dyna_cpp/dyna/keyfile/PartKeyword.cpp", "qd/cae/dyna_cpp/dyna/keyfile/IncludeKeyword.cpp", "qd/cae/dyna_cpp/dyna/keyfile/IncludePathKeyword.cpp", "qd/cae/dyna_cpp/utility/FileUtility.cpp", "qd/cae/dyna_cpp/utility/TextUtility.cpp", # "qd/cae/dyna_cpp/parallel/WorkQueue.cpp", ] extra_link_args = [] libs_dyna = [] # linux compiler args if is_linux: compiler_args = ["-std=c++11", "-fPIC", "-D_GLIBCXX_USE_CXX11_ABI=0", # ABI compatability "-DQD_VERSION=\"" + version + "\""] libs_dyna = ["stdc++"] if debugging_mode: compiler_args.append("-DQD_DEBUG") compiler_args.append("-O0") else: compiler_args.append("-O3") if measure_time: compiler_args.append("-DQD_MEASURE_TIME") if use_openmp: compiler_args.append("-fopenmp") extra_link_args.append("-lgomp") else: compiler_args.append("-Wno-unknown-pragmas") # windowscompiler args elif is_windows: compiler_args = ["/EHa", "/DQD_VERSION=\\\"" + version + "\\\""] if debugging_mode: compiler_args.append("/DQD_DEBUG") if measure_time: compiler_args.append("/DQD_MEASURE_TIME") if use_openmp: compiler_args.append("/openmp") else: raise RuntimeError("Could not determine os (windows or linux)") return srcs, include_dirs, compiler_args, extra_link_args, libs_dyna
def __str__(self): # postpone importing pybind11 until building actually happens import pybind11 return pybind11.get_include(self.user)
def finalize_options(self): super().finalize_options() use_pkg_config = False if self.use_pkg_config is None: use_pkg_config = self.has_pkg_config() elif self.use_pkg_config == 'yes': use_pkg_config = True if not self.has_pkg_config(): raise Exception('pkg-config not available') elif self.use_pkg_config == 'no': use_pkg_config = False else: raise Exception( '--use-pkg-config invalid value {}, should be yes or no'. format(self.use_pkg_config)) if self.ffmpeg_dir is not None and not os.path.isdir(self.ffmpeg_dir): raise Exception('ffmpeg directory does not exists: '.format( self.ffmpeg_dir)) if self.sphinxbase_dir is not None and not os.path.isdir( self.sphinxbase_dir): raise Exception('sphinxbase directory does not exists: '.format( self.sphinxbase_dir)) if self.pocketsphinx_dir is not None and not os.path.isdir( self.pocketsphinx_dir): raise Exception('pocketsphinx directory does not exists: '.format( self.pocketsphinx_dir)) ffmpeg_libs = [ 'avdevice', 'avformat', 'avfilter', 'avcodec', 'swresample', 'swscale', 'avutil', ] sphinx_libs = [ 'pocketsphinx', 'sphinxbase', ] if use_pkg_config: pkgs = ['lib' + name for name in ffmpeg_libs] + sphinx_libs self.cflags += self.get_pkg_config('--cflags-only-other', pkgs) self.ldflags += self.get_pkg_config('--libs-only-other', pkgs) self.include_dirs += self.get_pkg_config('--cflags-only-I', pkgs, strip_prefixes=['-I']) self.library_dirs += self.get_pkg_config( '--libs-only-L', pkgs, strip_prefixes=['-L', '-R']) self.libraries += self.get_pkg_config('--libs-only-l', pkgs, strip_prefixes=['-l']) else: self.libraries += ffmpeg_libs + sphinx_libs import pybind11 self.include_dirs += self.get_paths(pybind11.get_include(), '') self.include_dirs += self.get_paths(pybind11.get_include(True), '') self.include_dirs += self.get_paths(self.ffmpeg_dir, '', 'include') self.include_dirs += self.get_paths(self.sphinxbase_dir, '', 'include') self.include_dirs += self.get_paths(self.pocketsphinx_dir, '', 'include') self.library_dirs += [sysconfig.get_path('include')] self.library_dirs += self.get_paths(self.ffmpeg_dir, '', 'lib') self.library_dirs += self.get_paths(self.sphinxbase_dir, '', 'lib') self.library_dirs += self.get_paths(self.pocketsphinx_dir, '', 'lib') if sys.platform == 'win32': bit64 = sys.maxsize > 2**32 arch = 'x64' if bit64 else 'win32' self.include_dirs += self.get_paths( self.sphinxbase_dir, os.path.join('include', 'win32')) self.library_dirs += self.get_paths( self.sphinxbase_dir, os.path.join('bin', 'Release', arch)) self.library_dirs += self.get_paths( self.pocketsphinx_dir, os.path.join('bin', 'Release', arch))
version='0.1', description='A Custom Special-Purpose C++ Module with a Simple Python Interface', ext_modules=[ Extension( # Module name. Must match whatever you gave to PYBIND11_PLUGIN(...) name="my_custom_module", # list your source C++ file(s) for this module here: sources=[ 'my_custom_module.cxx' ], # Include directories (note that tomographer.include.get_include() returns a # list, including Boost and Eigen header locations as well as Tomographer # and Tomographer-Py header locations): include_dirs=[ numpy.get_include(), pybind11.get_include(), # for pybind11 system-wide install pybind11.get_include(True), # for pybind11 user install ] + tomographer.include.get_include(), # Compiler flags: extra_compile_args=shlex.split(vv.get('CXX_FLAGS')), # any linker flags, if needed (do NOT use -flto, or else use it at your own risk): extra_link_args=[], # anywhere to look for libraries you need to link against? : library_dirs=[], # any libraries you need to link against? : libraries=[], # any custom header files you depend on: depends=[] ), ], )
] link_args = [] ext_source = [] for root, dirs, files in os.walk('src/pybind11'): ext_source += ['%s/%s' % (root, f) for f in files] ext_path = [ path.replace('src/pybind11', 'mdma').replace('.cc', '') for path in ext_source ] ext_modules = [ Extension(module_path, [source_path], include_dirs=[ 'include', pybind11.get_include(False), pybind11.get_include(True), '/usr/include/eigen3' ], language='c++', extra_compile_args=cpp_args, extra_link_args=link_args) for module_path, source_path in zip(ext_path, ext_source) ] setup( name='MDMAmazing', version=version, license='GNU General Public License v3.0', author='Joshua F. Robinson', author_email='*****@*****.**', url='https://github.com/tranqui/MDMAmazing.git',
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from numpy.distutils.misc_util import get_info as get_misc_info from scipy._build_utils.system_info import get_info from scipy._build_utils import combine_dict, uses_blas64, numpy_nodepr_api from scipy._build_utils.compiler_helper import set_cxx_flags_hook from distutils.sysconfig import get_python_inc import pybind11 config = Configuration('spatial', parent_package, top_path) config.add_data_dir('tests') # spatial.transform config.add_subpackage('transform') # qhull qhull_src = sorted( glob.glob(join(dirname(__file__), 'qhull_src', 'src', '*.c'))) inc_dirs = [get_python_inc()] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.append(get_numpy_include_dirs()) inc_dirs.append(join(dirname(dirname(__file__)), '_lib')) inc_dirs.append(join(dirname(dirname(__file__)), '_build_utils', 'src')) if uses_blas64(): lapack_opt = get_info('lapack_ilp64_opt') else: lapack_opt = get_info('lapack_opt') cfg = combine_dict(lapack_opt, include_dirs=inc_dirs) config.add_extension('_qhull', sources=['_qhull.c', 'qhull_misc.c'] + qhull_src, **cfg) # cKDTree ckdtree_src = [ 'query.cxx', 'build.cxx', 'query_pairs.cxx', 'count_neighbors.cxx', 'query_ball_point.cxx', 'query_ball_tree.cxx', 'sparse_distances.cxx' ] ckdtree_src = [join('ckdtree', 'src', x) for x in ckdtree_src] ckdtree_headers = [ 'ckdtree_decl.h', 'coo_entries.h', 'distance_base.h', 'distance.h', 'ordered_pair.h', 'rectangle.h' ] ckdtree_headers = [join('ckdtree', 'src', x) for x in ckdtree_headers] ckdtree_dep = ['_ckdtree.cxx'] + ckdtree_headers + ckdtree_src ext = config.add_extension('_ckdtree', sources=['_ckdtree.cxx'] + ckdtree_src, depends=ckdtree_dep, include_dirs=inc_dirs + [join('ckdtree', 'src')]) ext._pre_build_hook = set_cxx_flags_hook # _distance_wrap config.add_extension('_distance_wrap', sources=[join('src', 'distance_wrap.c')], depends=[join('src', 'distance_impl.h')], include_dirs=[ get_numpy_include_dirs(), join(dirname(dirname(__file__)), '_lib') ], extra_info=get_misc_info("npymath"), **numpy_nodepr_api) distance_pybind_includes = [ pybind11.get_include(True), pybind11.get_include(False), get_numpy_include_dirs() ] ext = config.add_extension('_distance_pybind', sources=[join('src', 'distance_pybind.cpp')], depends=[ join('src', 'function_ref.h'), join('src', 'views.h'), join('src', 'distance_metrics.h') ], include_dirs=distance_pybind_includes, language='c++', **numpy_nodepr_api) ext._pre_build_hook = pre_build_hook config.add_extension('_voronoi', sources=['_voronoi.c']) config.add_extension('_hausdorff', sources=['_hausdorff.c']) # Add license files config.add_data_files('qhull_src/COPYING.txt') # Type stubs config.add_data_files('*.pyi') return config
def main(): data_files = list() # includes. data_files.append((os.path.join('include', 'solvcon'), glob.glob(os.path.join('include', '*')))) # javascript code. lead = os.path.join('solvcon', 'visual', 'js') for root, directory, files in os.walk(lead): files = [(lead, os.path.join(root, fname)) for fname in files] data_files.extend(files) # test data. lead = os.path.join('share', 'solvcon', 'test') data_files.extend([ (lead, glob.glob(os.path.join('test', 'data', '*.g'))), (lead, glob.glob(os.path.join('test', 'data', '*.jou'))), (lead, glob.glob(os.path.join('test', 'data', '*.nc'))), (lead, glob.glob(os.path.join('test', 'data', '*.neu'))), (lead, glob.glob(os.path.join('test', 'data', '*.blk'))), (lead, glob.glob(os.path.join('test', 'data', '*.vtk'))), (lead, glob.glob(os.path.join('test', 'data', '*.msh.gz'))), (lead, glob.glob(os.path.join('test', 'data', '*.geo'))), (os.path.join(lead, 'sample.dom'), glob.glob(os.path.join('test', 'data', 'sample.dom', '*'))) ]) # examples. lead = os.path.join('share', 'solvcon') for edir in glob.glob(os.path.join('examples', '*', '*')): if os.path.isdir(edir): data_files.append( (os.path.join(lead, edir), [os.path.join(edir, 'go')])) for ext in ('tmpl', 'py', 'h'): data_files.append((os.path.join(lead, edir), glob.glob(os.path.join(edir, '*.%s'%ext)))) turn_off_unused_warnings = '-Wno-unused-variable' if sys.platform != 'darwin': turn_off_unused_warnings += ' -Wno-unused-but-set-variable' # set up extension modules. ext_modules = [ make_pybind11_extension( 'solvcon.march', include_dirs=['libmarch/include', pybind11.get_include()] ), make_cython_extension( 'solvcon._march_bridge', [], include_dirs=['libmarch/include'] ), make_cython_extension( 'solvcon.mesh', ['src'], ), make_cython_extension( 'solvcon.parcel.fake._algorithm', ['src'], extra_compile_args=[ turn_off_unused_warnings, ], ), make_cython_extension( 'solvcon.parcel.linear._algorithm', ['src'], libraries=['lapack', 'blas'], extra_compile_args=[ turn_off_unused_warnings, '-Wno-unknown-pragmas', ], ), make_cython_extension( 'solvcon.parcel.bulk._algorithm', ['src'], extra_compile_args=[ turn_off_unused_warnings, '-Wno-unknown-pragmas', '-Wno-uninitialized', ], ), make_cython_extension( 'solvcon.parcel.gas._algorithm', ['src'], extra_compile_args=[ turn_off_unused_warnings, '-Wno-unknown-pragmas', ], ), make_cython_extension( 'solvcon.parcel.vewave._algorithm', ['src'], libraries=['lapack', 'blas'], extra_compile_args=[ turn_off_unused_warnings, '-Wno-unknown-pragmas', ], ), ] # remove files when cleaning. sidx = sys.argv.index('setup.py') if 'setup.py' in sys.argv else -1 cidx = sys.argv.index('clean') if 'clean' in sys.argv else -1 if cidx > sidx: derived = list() for mod in ext_modules: pyx = mod.sources[0] # this must be the pyx file. mainfn, dotfn = os.path.splitext(pyx) if '.pyx' == dotfn: derived += ['.'.join((mainfn, ext)) for ext in ('c', 'h')] derived += ['%s.so' % mainfn] + glob.glob('%s.*.so' % mainfn) derived = [fn for fn in derived if os.path.exists(fn)] if derived: sys.stdout.write('Removing in-place generated files:') for fn in derived: os.remove(fn) sys.stdout.write('\n %s' % fn) sys.stdout.write('\n') else: if "/home/docs/checkouts/readthedocs.org" in os.getcwd(): # Do not build extension modules if I am in readthedocs.org, # because the dependency cannot be met. ext_modules = list() else: ext_modules = cythonize(ext_modules) setup( name='SOLVCON', maintainer='Yung-Yu Chen', author='Yung-Yu Chen', maintainer_email='*****@*****.**', author_email='*****@*****.**', description='Solvers of Conservation Laws', long_description=''.join(open('README.rst').read()), license='BSD', url='http://solvcon.net/', download_url='https://github.com/solvcon/solvcon/releases', classifiers=[tok.strip() for tok in CLASSIFIERS.split('\n')], platforms=[ 'Linux', ], version=sc.__version__, scripts=[ 'scg', ], packages=[ 'solvcon', 'solvcon.io', 'solvcon.io.tests', 'solvcon.kerpak', 'solvcon.parcel', 'solvcon.parcel.bulk', 'solvcon.parcel.fake', 'solvcon.parcel.gas', 'solvcon.parcel.linear', 'solvcon.parcel.tests', 'solvcon.parcel.vewave', 'solvcon.tests', 'solvcon.vis', ], package_data={ 'solvcon.vis': ["js/*"], }, ext_modules=ext_modules, data_files=data_files, ) return
def read(fname): return open(join(path.dirname(__file__), fname)).read() pypi_deps = ['numpy', 'wand', 'scipy', 'pygame','PyOpenGL', 'setuptools', 'pybind11', 'wheel'] #Make sure we're using gcc. os.environ["CC"] = "g++" os.environ["CXX"] = "g++" cpp_args = ['-fopenmp', '-std=gnu++14', '-O3'] link_args = ['-fopenmp'] olOpt = Extension( 'openlut.lib.olOpt', sources = ['openlut/lib/olOpt.cpp'], include_dirs=[pybind11.get_include()], #Include pybind11 from its pip package. language = 'c++', extra_compile_args = cpp_args, extra_link_args = cpp_args ) setup( name = 'openlut', version = '0.2.6', description = 'OpenLUT is a practical color management library.', long_description = read('README.rst'), author = 'Sofus Rose', author_email = '*****@*****.**', url = 'https://www.github.com/so-rose/openlut', packages = find_packages(exclude=['src']),
def build_extensions(self): # Add the required Eigen include directory dirs = self.compiler.include_dirs for ext in self.extensions: dirs += ext.include_dirs include_dirs = [] eigen_include = find_eigen(hint=dirs) if eigen_include is None: logging.warn("Required library Eigen 3 not found.") else: include_dirs += [eigen_include] # Add the pybind11 include directory import pybind11 include_dirs += [ pybind11.get_include(False), pybind11.get_include(True), ] for ext in self.extensions: ext.include_dirs += include_dirs # Set up pybind11 ct = self.compiler.compiler_type opts = self.c_opts.get(ct, []) if ct == 'unix': opts.append('-DVERSION_INFO="{0:s}"' .format(self.distribution.get_version())) opts.append(cpp_flag(self.compiler)) if has_flag(self.compiler, '-fvisibility=hidden'): opts.append('-fvisibility=hidden') for flag in ["-Wno-unused-function", "-Wno-uninitialized", "-O4"]: if has_flag(self.compiler, flag): opts.append(flag) elif ct == 'msvc': opts.append('/DVERSION_INFO=\\"{0:s}\\"' .format(self.distribution.get_version())) for ext in self.extensions: ext.extra_compile_args = opts # Building on RTD doesn't require the extra speedups and it seems to # fail for some reason so we'll bail early. if os.environ.get("READTHEDOCS", None) == "True": _build_ext.build_extensions(self) return # Enable Eigen/Sparse support with_sparse = os.environ.get("WITH_SPARSE", None) if with_sparse is None or with_sparse.lower() != "false": for ext in self.extensions: ext.define_macros += [("WITH_SPARSE", None)] # Link to numpy's LAPACK if available variant = os.environ.get("LAPACK_VARIANT", None) if variant is not None and variant.lower() != "none": info = get_info(variant) if not len(info): logging.warn("LAPACK info for variant '{0}' not found") info = get_info("blas_opt") for ext in self.extensions: for k, v in info.items(): try: setattr(ext, k, getattr(ext, k) + v) except TypeError: continue ext.define_macros += [ ("WITH_LAPACK", None), ("LAPACK_VARIANT", variant) ] # Run the standard build procedure. _build_ext.build_extensions(self)
def setup_pybind11(cfg): import pybind11 cfg['include_dirs'] += [pybind11.get_include(), pybind11.get_include(True)] # Prefix with c++11 arg instead of suffix so that if a user specifies c++14 (or later!) then it won't be overridden. cfg['compiler_args'] = ['-std=c++11'] + cfg['compiler_args']
maintainer_email=__email__, keywords=[ 'Mie scattering', 'Multilayered sphere', 'Efficiency factors', 'Cross-sections' ], url=__url__, download_url=__download_url__, license='GPL', platforms='any', packages=['scattnlay'], # , 'scattnlay_dp', 'scattnlay_mp'], ext_modules=[ Extension( "scattnlay_dp", ["src/pb11_wrapper.cc"], language="c++", include_dirs=[np.get_include(), pb.get_include()], # extra_compile_args=['-std=c++11']), extra_compile_args=[ '-std=c++11', '-O3', '-mavx2', '-mfma', '-finline-limit=1000000', '-ffp-contract=fast' ]), Extension("scattnlay_mp", ["src/pb11_wrapper.cc"], language="c++", include_dirs=[np.get_include(), pb.get_include()], extra_compile_args=[ '-std=c++11', '-O3', '-mavx2', '-mfma', '-finline-limit=1000000', '-ffp-contract=fast', '-DMULTI_PRECISION=100' ]), # extra_compile_args=['-std=c++11', '-DMULTI_PRECISION=100'])
def setup_dyna_cpp(): '''Basic setup of the LS-DYNA C-Extension Returns ------- srcs : list of str include_dirs : list of str compiler_args : list of str ''' include_dirs = ["qd/cae", np.get_include(), pybind11.get_include()] srcs = [ "qd/cae/dyna_cpp/python_api/pybind_wrapper.cpp", "qd/cae/dyna_cpp/db/FEMFile.cpp", "qd/cae/dyna_cpp/db/DB_Elements.cpp", "qd/cae/dyna_cpp/db/DB_Nodes.cpp", "qd/cae/dyna_cpp/db/DB_Parts.cpp", "qd/cae/dyna_cpp/db/Element.cpp", "qd/cae/dyna_cpp/db/Node.cpp", "qd/cae/dyna_cpp/db/Part.cpp", "qd/cae/dyna_cpp/dyna/d3plot/D3plotBuffer.cpp", "qd/cae/dyna_cpp/dyna/d3plot/D3plot.cpp", "qd/cae/dyna_cpp/dyna/d3plot/RawD3plot.cpp", "qd/cae/dyna_cpp/dyna/keyfile/KeyFile.cpp", "qd/cae/dyna_cpp/dyna/keyfile/Keyword.cpp", "qd/cae/dyna_cpp/dyna/keyfile/NodeKeyword.cpp", "qd/cae/dyna_cpp/dyna/keyfile/ElementKeyword.cpp", "qd/cae/dyna_cpp/dyna/keyfile/PartKeyword.cpp", "qd/cae/dyna_cpp/dyna/keyfile/IncludeKeyword.cpp", "qd/cae/dyna_cpp/dyna/keyfile/IncludePathKeyword.cpp", "qd/cae/dyna_cpp/utility/FileUtility.cpp", "qd/cae/dyna_cpp/utility/TextUtility.cpp", "qd/cae/dyna_cpp/parallel/WorkQueue.cpp" ] extra_link_args = [] libs_dyna = [] # linux compiler args if is_linux: compiler_args = [ "-std=c++14", "-fPIC", "-DQD_VERSION=\"" + version + "\"" ] libs_dyna = ["stdc++"] if debugging_mode: compiler_args.append("-DQD_DEBUG") compiler_args.append("-O0") else: compiler_args.append("-O3") if measure_time: compiler_args.append("-DQD_MEASURE_TIME") if use_openmp: compiler_args.append("-fopenmp") extra_link_args.append("-lgomp") else: compiler_args.append("-Wno-unknown-pragmas") # windowscompiler args elif is_windows: compiler_args = ["/EHa", "/DQD_VERSION=\\\"" + version + "\\\""] if debugging_mode: compiler_args.append("/DQD_DEBUG") if measure_time: compiler_args.append("/DQD_MEASURE_TIME") if use_openmp: compiler_args.append("/openmp") else: raise RuntimeError("Could not determine os (windows or linux)") return srcs, include_dirs, compiler_args, extra_link_args, libs_dyna
#!/usr/bin/env python3 # encoding: utf-8 from setuptools import setup, Extension import pybind11 example_module = Extension( 'example', sources=['example-pants.cpp'], language='C++', include_dirs=[pybind11.get_include(True)], ) setup(name='example', version='0.1.0', description='example module written in C++', ext_modules=[example_module])
def __str__(self): if pybind11_path is not None: return os.path.join(os.environ["PYBIND11_DIR"], "include") else: import pybind11 return pybind11.get_include(self.user)
def __str__(self) -> str: import pybind11 return pybind11.get_include()
import platform from distutils.core import setup, Extension from distutils.command.build_ext import build_ext as BaseBuildExt from distutils.command.build import build as BaseBuild from distutils.unixccompiler import UnixCCompiler from distutils.util import get_platform from distutils.sysconfig import customize_compiler from distutils.errors import DistutilsSetupError from distutils.dep_util import newer try: import pybind11 except ImportError: INCLUDE_DIRS = [] else: INCLUDE_DIRS = [pybind11.get_include(), pybind11.get_include(user=True)] EXTRA_COMPILE_ARGS = ['-std=c++14'] if platform.system != "Windows" else [] VERSION_FILE = "VERSION" PY_VERSION_FILE = "ome_files/version.py" def get_version_string(): try: with open(VERSION_FILE) as f: return f.read().strip() except IOError: raise DistutilsSetupError("failed to read version info") def write_version():
import os import sys import pybind11 from setuptools import setup, find_packages, Extension include_dirs = [pybind11.get_include()] library_dirs = [] def _get_long_description(): readme_path = os.path.join(os.path.dirname(__file__), "README.md") with open(readme_path, encoding="utf-8") as readme_file: return readme_file.read() def _maybe_add_library_root(lib_name): if "%s_ROOT" % lib_name in os.environ: root = os.environ["%s_ROOT" % lib_name] include_dirs.append("%s/include" % root) for lib_dir in ("lib", "lib64"): path = "%s/%s" % (root, lib_dir) if os.path.exists(path): library_dirs.append(path) break _maybe_add_library_root("CTRANSLATE2") cflags = ["-std=c++17"]
def __str__(self): import pybind11 # pylint: disable=g-import-not-at-top return pybind11.get_include()
def build_extension(self, ext): import pybind11 suffix = get_config_vars()["EXT_SUFFIX"] python_includes = get_paths()["include"] pybind_includes = pybind11.get_include() extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name))) # required for auto-detection of auxiliary "native" libs if not extdir.endswith(os.path.sep): extdir += os.path.sep cmake_args = [ "-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=" + extdir, "-DPYTHON_EXECUTABLE=" + sys.executable, "-DPYBIND_INCLUDES=" + pybind_includes, "-DPYTHON_INCLUDES=" + python_includes, "-DSUFFIX=" + suffix, ] cfg = "Debug" if self.debug else "Release" build_args = ["--config", cfg] if platform.system() == "Windows": cmake_args += [ "-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{}={}".format(cfg.upper(), extdir) ] if sys.maxsize > 2 ** 32: cmake_args += ["-A", "x64"] build_args += ["--", "/m"] else: cmake_args += [ "-DCMAKE_BUILD_TYPE=" + cfg, "-O3", ] build_args += ["--", "-j2"] env = os.environ.copy() env["CXXFLAGS"] = '{} -DVERSION_INFO=\\"{}\\"'.format( env.get("CXXFLAGS", ""), self.distribution.get_version() ) if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) out = subprocess.Popen( ["cmake", ext.sourcedir] + cmake_args, cwd=self.build_temp, env=env, stdout=subprocess.PIPE, ) result = out.communicate()[0] print(result.decode()) out = subprocess.Popen( ["cmake", "--build", ".", "--target", "psvWave_cpp"] + build_args, cwd=self.build_temp, ) result = out.communicate() print(result)
def __str__(self): import pybind11 return pybind11.get_include(self.user)
setup(name = __mod__, version = __version__, description = __title__, long_description="""The Python version of scattnlay, a computer implementation of the algorithm for the calculation of electromagnetic \ radiation scattering by a multilayered sphere developed by Yang. It has been shown that the program is effective, \ resulting in very accurate values of scattering efficiencies for a wide range of size parameters, which is a \ considerable improvement over previous implementations of similar algorithms. For details see: \ O. Pena, U. Pal, Comput. Phys. Commun. 180 (2009) 2348-2354.""", author = __author__, author_email = __email__, maintainer = __author__, maintainer_email = __email__, keywords = ['Mie scattering', 'Multilayered sphere', 'Efficiency factors', 'Cross-sections'], url = __url__, download_url = __download_url__, license = 'GPL', platforms = 'any', packages = ['scattnlay', 'scattnlay_dp', 'scattnlay_mp'], ext_modules = [Extension("scattnlay_dp", ["src/nmie.cc", "src/nmie-pybind11.cc", "src/pb11_wrapper.cc"], language = "c++", include_dirs = [np.get_include(), pb.get_include()], extra_compile_args=['-std=c++11']), Extension("scattnlay_mp", ["src/nmie.cc", "src/nmie-pybind11.cc", "src/pb11_wrapper.cc"], language = "c++", include_dirs = [np.get_include(), pb.get_include()], extra_compile_args=['-std=c++11', '-DMULTI_PRECISION=100'])] )
"author_email": "email", "description": get_metadata("description"), "url": get_metadata("repository"), } with open(os.path.join("..", "README.md"), "r") as f: metadata["long_description"] = f.read() config = configparser.ConfigParser() config.read("setup.cfg") hd98_include_dir = config["hd98"].get("include_dir", "") #hd98_library_dir = config["hd98"].get("library_dir", "") pyhd98 = setuptools.Extension( "pyhd98", include_dirs=[pybind11.get_include(), hd98_include_dir], sources=["pyhd98.cpp"], # libraries=["hd98"], # library_dirs=[hd98_library_dir], define_macros=[ ("__HD98_VERSION__", r"\"" + metadata["version"] + r"\""), ("__HD98_AUTHOR__", r"\"" + metadata["author"] + r"\""), ], extra_compile_args=["/std:c++latest"], ) setuptools.setup(long_description_content_type="text/markdown", packages=setuptools.find_packages(), ext_modules=[pyhd98], **metadata)
if __name__ == "__main__": metadata = { "name": "vect2rast", "version": get_metadata("version"), "author": get_metadata("author"), "author_email": "email", "description": get_metadata("description"), "url": get_metadata("repository"), } with open(os.path.join("..", "README.md"), "r") as f: metadata["long_description"] = f.read() config = configparser.ConfigParser() config.read("setup.cfg") vect2rast_include_dir = config["vect2rast"].get("include_dir", "") vect2rast_library_dir = config["vect2rast"].get("library_dir", "") vect2rast = setuptools.Extension( "vect2rast.vect2rast", include_dirs=[pybind11.get_include(), vect2rast_include_dir], sources=[os.path.join("vect2rast", "vect2rast.cpp")], libraries=["vect2rast"], library_dirs=[vect2rast_library_dir], ) setuptools.setup(long_description_content_type="text/markdown", packages=setuptools.find_packages(), ext_modules=[vect2rast], **metadata)
def get_pybind_includepath(): import pybind11 return pybind11.get_include()
import os import os.path import sys (opt,) = get_config_vars('OPT') os.environ['OPT'] = " ".join( flag for flag in opt.split() if flag != '-Wstrict-prototypes' ) pk = pkgconfig.parse('sdskv-server') server_libraries = pk['libraries'] server_library_dirs = pk['library_dirs'] server_library_dirs = pk['library_dirs'] server_include_dirs = pk['include_dirs'] server_include_dirs.append(".") server_include_dirs.append(pybind11.get_include()) pysdskv_server_module = Extension('_pysdskvserver', ["pysdskv/src/server.cpp"], libraries=server_libraries, library_dirs=server_library_dirs, include_dirs=server_include_dirs, extra_compile_args=['-std=c++11'], depends=["pysdskv/src/server.cpp"]) pk = pkgconfig.parse('sdskv-client') client_libraries = pk['libraries'] client_library_dirs = pk['library_dirs'] client_library_dirs = pk['library_dirs'] client_include_dirs = pk['include_dirs'] client_include_dirs.append(".") client_include_dirs.append(pybind11.get_include())
import os import sys from setuptools import setup, Extension, find_packages try: import pybind11 pybind11_include_path = pybind11.get_include() except ImportError: pybind11_include_path = "" ext_name = "ricoh_camera_sdk" ext_version = "1.1.0" py_ver = sys.version_info[:2] sdk_include_path = "" sdk_lib_path = "" for dirname, dirs, filenames in os.walk("./sdk/"): if "include" in dirs and "lib" in dirs: sdk_include_path = os.path.join(dirname, "include") sdk_lib_path = os.path.join(dirname, "lib") if os.name == "posix": import platform machine = platform.machine() if machine in ["x86_64"]: sdk_lib_path = os.path.join(sdk_lib_path, "x64") elif machine in ["i386"]: sdk_lib_path = os.path.join(sdk_lib_path, "x86") elif machine in ["armv7l"]:
def build_pybind_ext( name: str, sources: list, build_path: str, target_path: str, *, include_dirs: Optional[List[str]] = None, library_dirs: Optional[List[str]] = None, libraries: Optional[List[str]] = None, extra_compile_args: Optional[Union[List[str], Dict[str, List[str]]]] = None, extra_link_args: Optional[List[str]] = None, build_ext_class: Type = None, verbose: bool = False, clean: bool = False, ) -> Tuple[str, str]: # Hack to remove warning about "-Wstrict-prototypes" not having effect in C++ replaced_flags_backup = copy.deepcopy(distutils.sysconfig._config_vars) _clean_build_flags(distutils.sysconfig._config_vars) include_dirs = include_dirs or [] library_dirs = library_dirs or [] libraries = libraries or [] extra_compile_args = extra_compile_args or [] extra_link_args = extra_link_args or [] # Build extension module py_extension = setuptools.Extension( name, sources, include_dirs=[ pybind11.get_include(), pybind11.get_include(user=True), *include_dirs ], library_dirs=[*library_dirs], libraries=[*libraries], language="c++", extra_compile_args=extra_compile_args, extra_link_args=extra_link_args, ) setuptools_args = dict( name=name, ext_modules=[py_extension], script_args=[ "build_ext", # "--parallel={}".format(gt_config.build_settings["parallel_jobs"]), "--build-temp={}".format(build_path), "--build-lib={}".format(build_path), "--force", ], ) if build_ext_class is not None: setuptools_args["cmdclass"] = {"build_ext": build_ext_class} if verbose: setuptools_args["script_args"].append("-v") setuptools.setup(**setuptools_args) else: setuptools_args["script_args"].append("-q") io_out, io_err = io.StringIO(), io.StringIO() with contextlib.redirect_stdout(io_out), contextlib.redirect_stderr( io_err): setuptools.setup(**setuptools_args) # Copy extension in target path module_name = py_extension._full_name file_path = py_extension._file_name src_path = os.path.join(build_path, file_path) dest_path = os.path.join(target_path, os.path.basename(file_path)) os.makedirs(os.path.dirname(dest_path), exist_ok=True) distutils.file_util.copy_file(src_path, dest_path, verbose=verbose) # Final cleaning if clean: shutil.rmtree(build_path) # Restore original distutils flag config to not break functionality with "-Wstrict-prototypes"-hack for other # tools using distutils. for key, value in replaced_flags_backup.items(): distutils.sysconfig._config_vars[key] = value return module_name, dest_path
from setuptools import setup, Extension import sys import setuptools import pybind11 import cppmat __version__ = '0.0.1' ext_modules = [ Extension( 'tensorlib', ['tensorlib.cpp'], include_dirs=[ pybind11.get_include(False), pybind11.get_include(True ), cppmat .get_include(False), cppmat .get_include(True ) ], language='c++' ), ] setup( name = 'tensorlib', description = 'Tensorlib', long_description = 'This is an example module, it no real use!', keywords = 'Example, C++, C++11, Python bindings, pybind11', version = __version__, license = 'MIT', author = 'Tom de Geus',