def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_info # compile_args = ['-std=c99', '-Wall', '-Werror'] compile_args = ['-std=c99', '-Wall'] config = Configuration(None, parent_package, top_path) config.add_subpackage('numtypes') config.add_subpackage('numtypes/tests') config.add_extension('numtypes._nint', extra_compile_args=compile_args, sources=[join('src', '_nint.c.src')], **get_info("npymath")) config.add_extension('numtypes._complex_int', extra_compile_args=compile_args, sources=[join('src', '_complex_int.c.src')], **get_info("npymath")) config.add_extension('numtypes._polarcomplex', extra_compile_args=compile_args, sources=[join('src', '_polarcomplex.c.src')]) config.add_extension( 'numtypes._python_logtypes', extra_compile_args=compile_args, sources=[join('src', 'logtypes', '_python_logtypes.c')]) config.add_extension('numtypes._logtypes', extra_compile_args=compile_args, sources=[join('src', 'logtypes', '_logtypes.c.src')]) return config
def test_installed_npymath_ini(): # Regression test for gh-7707. If npymath.ini wasn't installed, then this # will give an error. info = get_info('npymath') assert isinstance(info, dict) assert "define_macros" in info
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import get_info npymath_info = get_info('npymath') # { # 'include_dirs': ['/usr/local/lib/python3.7/site-packages/numpy/core/include'], # 'library_dirs': ['/usr/local/lib/python3.7/site-packages/numpy/core/lib'], # 'libraries': ['npymath'], # 'define_macros': [] # } if os.name == 'posix': npymath_info['libraries'].append('m') from numpy.distutils.misc_util import Configuration config = Configuration(package_name='clib', parent_name=parent_package, top_path=top_path) p = Path(CLIB_ABS_PATH) for abs_prog_path in p.glob("*.pyx"): fn = abs_prog_path.name # hoge.pyx *name, _ = fn.split(".") name = ".".join(name) # hoge config.add_extension( name=name, sources=[fn], language="c++", **npymath_info, ) print( f"* \033[34m{fn}\033[0m is compiled by Cython to \033[34m{name}.cpp\033[0m file." ) # config.add_subpackage('tests') return config
def configuration(parent_package="", top_path=None): config = Configuration("", parent_package, top_path) config.add_extension("hpyc", ["hpyc.cpp"], extra_info=get_info("npymath"), extra_compile_args=CFLAGS) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_info config = Configuration('ssm', parent_package, top_path) info = get_info("npymath") config.add_extension('_statespace', include_dirs=['dismalpy/src'], sources=['_statespace.c'], extra_info=info) config.add_extension('_kalman_filter', include_dirs=['dismalpy/src'], sources=['_kalman_filter.c'], extra_info=info) config.add_extension('_kalman_smoother', include_dirs=['dismalpy/src'], sources=['_kalman_smoother.c'], extra_info=info) config.add_extension('_simulation_smoother', include_dirs=['dismalpy/src'], sources=['_simulation_smoother.c'], extra_info=info) config.add_extension('_tools', include_dirs=['dismalpy/src'], sources=['_tools.c']) config.add_subpackage('compat') config.add_data_dir('tests') config.add_subpackage('_filters') config.add_subpackage('_smoothers') config.make_config_py() return config
def __init__(self, *args, **kwargs): from numpy import get_include from numpy.distutils.misc_util import get_info kwargs.update(get_info('npymath')) kwargs['include_dirs'] += [get_include()] Extension.__init__(self, *args, **kwargs)
def __init__(self, *args, **kwargs): if len(sys.argv) < 2 or not self.is_special_command(): from numpy import get_include from numpy.distutils.misc_util import get_info kwargs.update(get_info('npymath')) kwargs['include_dirs'] += [get_include()] Extension.__init__(self, *args, **kwargs)
def __init__(self): # Need to import it here since setuptools may monkeypatch it from distutils.dist import Distribution self._verbose = False self._compiler = new_compiler() customize_compiler(self._compiler) self._build_ext = build_ext(Distribution()) self._build_ext.finalize_options() self._py_lib_dirs = self._build_ext.library_dirs self._py_include_dirs = self._build_ext.include_dirs self._math_info = np_misc.get_info('npymath')
def configuration(parent_package='', top_path=None): import numpy from numpy.distutils.misc_util import Configuration from numpy.distutils.misc_util import get_info info = get_info('npymath') config = Configuration('sfa_utils', parent_package, top_path) config.add_extension('npufunc', ['src/pysfa/log_erfc.c'], extra_info=info) return config
def configuration(parent_package='', top_path=None): import numpy from numpy.distutils.misc_util import Configuration from numpy.distutils.misc_util import get_info #Necessary for the half-float d-type. info = get_info('npymath') config = Configuration('', parent_package, top_path) config.add_extension('numpy_test', ['numpy_test.c'], extra_info=info) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('special', parent_package, top_path) define_macros = [] if sys.platform=='win32': # define_macros.append(('NOINFINITIES',None)) # define_macros.append(('NONANS',None)) define_macros.append(('_USE_MATH_DEFINES',None)) # C libraries config.add_library('sc_c_misc',sources=[join('c_misc','*.c')]) config.add_library('sc_cephes',sources=[join('cephes','*.c')], include_dirs=[get_python_inc(), get_numpy_include_dirs()], macros=define_macros) # Fortran libraries config.add_library('sc_mach',sources=[join('mach','*.f')], config_fc={'noopt':(__file__,1)}) config.add_library('sc_toms',sources=[join('amos','*.f')]) config.add_library('sc_amos',sources=[join('toms','*.f')]) config.add_library('sc_cdf',sources=[join('cdflib','*.f')]) config.add_library('sc_specfun',sources=[join('specfun','*.f')]) # Extension _cephes sources = ['_cephesmodule.c', 'amos_wrappers.c', 'specfun_wrappers.c', 'toms_wrappers.c','cdf_wrappers.c','ufunc_extras.c'] config.add_extension('_cephes', sources=sources, libraries=['sc_amos','sc_toms','sc_c_misc','sc_cephes','sc_mach', 'sc_cdf', 'sc_specfun'], depends=["ufunc_extras.h", "cephes.h", "amos_wrappers.h", "toms_wrappers.h", "cdf_wrappers.h", "specfun_wrappers.h", "c_misc/misc.h", "cephes_doc.h", "cephes/mconf.h", "cephes/cephes_names.h"], define_macros = define_macros, extra_info=get_info("npymath") ) # Extension specfun config.add_extension('specfun', sources=['specfun.pyf'], f2py_options=['--no-wrap-functions'], define_macros=[], libraries=['sc_specfun']) config.add_data_files('tests/*.py') config.add_data_files('tests/data/README') config.add_data_files('tests/data/*.npz') return config
def configuration(parent_package='', top_path=None): import numpy from numpy.distutils.misc_util import Configuration from numpy.distutils.misc_util import get_info info = get_info('npymath') config = Configuration('c_faces', parent_package, top_path) config.add_extension('c_faces', ['c_faces.cpp'], extra_info=info, extra_compile_args= ["-Wall", "-Werror", "-O3"]) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_info config = Configuration('_filters', parent_package, top_path) info = get_info("npymath") config.add_extension('_conventional', include_dirs=['dismalpy/src'], sources=['_conventional.c'], extra_info=info) config.add_extension('_univariate', include_dirs=['dismalpy/src'], sources=['_univariate.c'], extra_info=info) config.add_extension('_inversions', include_dirs=['dismalpy/src'], sources=['_inversions.c'], extra_info=info) config.make_config_py() return config
def _update_extensions(self): import numpy from numpy.distutils.misc_util import get_info numpy_includes = [numpy.get_include()] extra_incl = pkg_resources.resource_filename('numpy', 'core/include') numpy_includes += [extra_incl] numpy_includes = list(set(numpy_includes)) numpy_math_libs = get_info('npymath') for extension in self.extensions: if not hasattr(extension, 'include_dirs'): continue extension.include_dirs = list(set(extension.include_dirs + numpy_includes)) if extension.name in EXT_REQUIRES_NUMPY_MATH_LIBS: extension.include_dirs += numpy_math_libs['include_dirs'] extension.libraries += numpy_math_libs['libraries'] extension.library_dirs += numpy_math_libs['library_dirs']
def _update_extensions(self): import numpy from numpy.distutils.misc_util import get_info numpy_includes = [numpy.get_include()] extra_incl = pkg_resources.resource_filename('numpy', 'core/include') numpy_includes += [extra_incl] numpy_includes = list(set(numpy_includes)) numpy_math_libs = get_info('npymath') for extension in self.extensions: if not hasattr(extension, 'include_dirs'): continue extension.include_dirs = list( set(extension.include_dirs + numpy_includes)) if extension.name in EXT_REQUIRES_NUMPY_MATH_LIBS: extension.include_dirs += numpy_math_libs['include_dirs'] extension.libraries += numpy_math_libs['libraries'] extension.library_dirs += numpy_math_libs['library_dirs']
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.misc_util import get_info info = get_info('npymath') config = Configuration('', parent_package, top_path) config.add_extension( 'cNeuralReluplex', sources=['reluplex/Reluplex2py.cpp'], include_dirs=[common_inc, glpk_inc, reluplex_inc, src_inc], library_dirs=[glpk_lib], runtime_library_dirs=[glpk_lib], libraries=["glpk"], extra_compile_args=['--std=c++14'], language='c++', extra_info=info) return config
def update_extension(extension, requires_math=True): import numpy # noqa: F811 from numpy.distutils.log import set_verbosity from numpy.distutils.misc_util import get_info set_verbosity(1) numpy_includes = [numpy.get_include()] extra_incl = pkg_resources.resource_filename("numpy", "core/include") numpy_includes += [extra_incl] numpy_includes = list(set(numpy_includes)) numpy_math_libs = get_info("npymath") if not hasattr(extension, "include_dirs"): return extension.include_dirs = list(set(extension.include_dirs + numpy_includes)) if requires_math: extension.include_dirs += numpy_math_libs["include_dirs"] extension.libraries += numpy_math_libs["libraries"] extension.library_dirs += numpy_math_libs["library_dirs"]
def configuration(parent_package='', top_path=''): ''' Function to do the configuration. ''' from numpy.distutils.misc_util import Configuration config = Configuration('cpython', parent_package, top_path) numpy_nodepr_api = [('NPY_NO_DEPRECATED_API', 'NPY_1_9_API_VERSION')] headers = glob.glob('*.h') # Add CPYTHON extension from numpy.distutils.misc_util import get_info math_aux_cpy_src = ['math_aux_cpy.c'] config.add_extension('math_aux_cpy', sources=math_aux_cpy_src, depends=headers + math_aux_cpy_src, extra_info=get_info('npymath'), define_macros=numpy_nodepr_api, ) return config
install_requires = ["numpy", "scikit-learn>=0.16"] tests_require = install_requires + ["pytest"] docs_require = install_requires + [ "Sphinx", "sphinx-gallery", "numpydoc", "Pillow", "matplotlib" ] setup_options = dict(name="hmmlearn", version=VERSION, description=DESCRIPTION, long_description=LONG_DESCRIPTION, maintainer=MAINTAINER, maintainer_email=MAINTAINER_EMAIL, license=LICENSE, url="https://github.com/hmmlearn/hmmlearn", packages=["hmmlearn", "hmmlearn.tests"], classifiers=CLASSIFIERS, ext_modules=[ Extension("hmmlearn._hmmc", ["hmmlearn/_hmmc.c"], extra_compile_args=["-O3"], **get_info("npymath")) ], install_requires=install_requires, tests_require=tests_require, extras_require={ "tests": tests_require, "docs": docs_require }) if __name__ == "__main__": setup(**setup_options)
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('special', parent_package, top_path) define_macros = [] if sys.platform=='win32': # define_macros.append(('NOINFINITIES',None)) # define_macros.append(('NONANS',None)) define_macros.append(('_USE_MATH_DEFINES',None)) curdir = os.path.abspath(os.path.dirname(__file__)) # C libraries config.add_library('sc_c_misc',sources=[join('c_misc','*.c')], include_dirs=[curdir, get_python_inc(), get_numpy_include_dirs()], macros=define_macros) config.add_library('sc_cephes',sources=[join('cephes','*.c')], include_dirs=[curdir, get_python_inc(), get_numpy_include_dirs()], macros=define_macros) # Fortran/C++ libraries config.add_library('sc_mach',sources=[join('mach','*.f')], config_fc={'noopt':(__file__,1)}) config.add_library('sc_amos',sources=[join('amos','*.f')]) config.add_library('sc_cdf',sources=[join('cdflib','*.f')]) config.add_library('sc_specfun',sources=[join('specfun','*.f')]) # Extension specfun config.add_extension('specfun', sources=['specfun.pyf'], f2py_options=['--no-wrap-functions'], define_macros=[], libraries=['sc_specfun']) # Extension _ufuncs config.add_extension('_ufuncs', libraries=['sc_amos','sc_c_misc','sc_cephes','sc_mach', 'sc_cdf', 'sc_specfun'], depends=["_logit.h", "cephes.h", "amos_wrappers.h", "cdf_wrappers.h", "specfun_wrappers.h", "c_misc/misc.h", "cephes/mconf.h", "cephes/cephes_names.h"], sources=['_ufuncs.c', 'sf_error.c', '_logit.c.src', "amos_wrappers.c", "cdf_wrappers.c", "specfun_wrappers.c"], include_dirs=[curdir], define_macros = define_macros, extra_info=get_info("npymath")) # Extension _ufuncs_cxx config.add_extension('_ufuncs_cxx', sources=['_ufuncs_cxx.cxx', 'sf_error.c', '_faddeeva.cxx', 'Faddeeva.cc', ], libraries=['sc_cephes'], include_dirs=[curdir], define_macros=define_macros, extra_info=get_info("npymath")) config.add_data_files('tests/*.py') config.add_data_files('tests/data/README') config.add_data_files('tests/data/*.npz') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info as get_system_info config = Configuration('special', parent_package, top_path) define_macros = [] if sys.platform == 'win32': # define_macros.append(('NOINFINITIES',None)) # define_macros.append(('NONANS',None)) define_macros.append(('_USE_MATH_DEFINES', None)) curdir = os.path.abspath(os.path.dirname(__file__)) inc_dirs = [get_python_inc(), os.path.join(curdir, "c_misc")] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.insert(0, get_numpy_include_dirs()) # C libraries c_misc_src = [join('c_misc', '*.c')] c_misc_hdr = [join('c_misc', '*.h')] cephes_src = [join('cephes', '*.c')] cephes_hdr = [join('cephes', '*.h')] config.add_library('sc_c_misc', sources=c_misc_src, include_dirs=[curdir] + inc_dirs, depends=(cephes_hdr + cephes_src + c_misc_hdr + cephes_hdr + ['*.h']), macros=define_macros) config.add_library('sc_cephes', sources=cephes_src, include_dirs=[curdir] + inc_dirs, depends=(cephes_hdr + ['*.h']), macros=define_macros) # Fortran/C++ libraries mach_src = [join('mach', '*.f')] amos_src = [join('amos', '*.f')] cdf_src = [join('cdflib', '*.f')] specfun_src = [join('specfun', '*.f')] config.add_library('sc_mach', sources=mach_src, config_fc={'noopt': (__file__, 1)}) config.add_library('sc_amos', sources=amos_src) config.add_library('sc_cdf', sources=cdf_src) config.add_library('sc_specfun', sources=specfun_src) # Extension specfun config.add_extension('specfun', sources=['specfun.pyf'], f2py_options=['--no-wrap-functions'], depends=specfun_src, define_macros=[], libraries=['sc_specfun']) # Extension _ufuncs headers = ['*.h', join('c_misc', '*.h'), join('cephes', '*.h')] ufuncs_src = [ '_ufuncs.c', 'sf_error.c', '_logit.c.src', "amos_wrappers.c", "cdf_wrappers.c", "specfun_wrappers.c" ] ufuncs_dep = (headers + ufuncs_src + amos_src + c_misc_src + cephes_src + mach_src + cdf_src + specfun_src) cfg = dict(get_system_info('lapack_opt')) cfg.setdefault('include_dirs', []).extend([curdir] + inc_dirs + [numpy.get_include()]) cfg.setdefault('libraries', []).extend([ 'sc_amos', 'sc_c_misc', 'sc_cephes', 'sc_mach', 'sc_cdf', 'sc_specfun' ]) cfg.setdefault('define_macros', []).extend(define_macros) config.add_extension('_ufuncs', depends=ufuncs_dep, sources=ufuncs_src, extra_info=get_info("npymath"), **cfg) # Extension _ufuncs_cxx ufuncs_cxx_src = [ '_ufuncs_cxx.cxx', 'sf_error.c', '_faddeeva.cxx', 'Faddeeva.cc', '_wright.cxx', 'wright.cc' ] ufuncs_cxx_dep = (headers + ufuncs_cxx_src + cephes_src + ['*.hh']) config.add_extension('_ufuncs_cxx', sources=ufuncs_cxx_src, depends=ufuncs_cxx_dep, include_dirs=[curdir], define_macros=define_macros, extra_info=get_info("npymath")) cfg = dict(get_system_info('lapack_opt')) config.add_extension('_ellip_harm_2', sources=[ '_ellip_harm_2.c', 'sf_error.c', ], **cfg) # Cython API config.add_data_files('cython_special.pxd') cython_special_src = [ 'cython_special.c', 'sf_error.c', '_logit.c.src', "amos_wrappers.c", "cdf_wrappers.c", "specfun_wrappers.c" ] cython_special_dep = (headers + ufuncs_src + ufuncs_cxx_src + amos_src + c_misc_src + cephes_src + mach_src + cdf_src + specfun_src) cfg.setdefault('include_dirs', []).extend([curdir] + inc_dirs + [numpy.get_include()]) cfg.setdefault('libraries', []).extend([ 'sc_amos', 'sc_c_misc', 'sc_cephes', 'sc_mach', 'sc_cdf', 'sc_specfun' ]) cfg.setdefault('define_macros', []).extend(define_macros + [('CYTHON_SPECIAL', 1)]) config.add_extension('cython_special', depends=cython_special_dep, sources=cython_special_src, extra_info=get_info("npymath"), **cfg) # combinatorics config.add_extension('_comb', sources=['_comb.c']) # testing for _round.h config.add_extension('_test_round', sources=['_test_round.c'], depends=['_round.h', 'c_misc/double2.h'], include_dirs=[numpy.get_include()], extra_info=get_info('npymath')) config.add_data_files('tests/*.py') config.add_data_files('tests/data/README') config.add_data_files('tests/data/*.npz') config.add_subpackage('_precompute') return config
foo, _ = subprocess.Popen(["curl-config", "--libs"], stdout=subprocess.PIPE).communicate() except: sys.exit( "Either libcurl isn't installed, it didn't come with curl-config, or curl-config isn't in your $PATH. This must be corrected before installing pyBigWig!\n" ) foo = foo.strip().split() for v in foo: if v[0:2] == "-L": additional_libs.append(v[2:]) include_dirs = ["libBigWig", sysconfig.get_config_var("INCLUDEPY")] defines = [] if WITHNUMPY is True: defines.extend([("WITHNUMPY", None), ("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION")]) extra_info = get_info("npymath") include_dirs.extend(extra_info["include_dirs"]) libs.extend(extra_info["libraries"]) extra_info["library_dirs"].extend(additional_libs) additional_libs = extra_info["library_dirs"] module1 = Extension( "pyBigWig", sources=srcs, libraries=libs, library_dirs=additional_libs, define_macros=defines, include_dirs=include_dirs, ) setup(
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info as get_system_info config = Configuration('special', parent_package, top_path) define_macros = [] if sys.platform == 'win32': # define_macros.append(('NOINFINITIES',None)) # define_macros.append(('NONANS',None)) define_macros.append(('_USE_MATH_DEFINES',None)) curdir = os.path.abspath(os.path.dirname(__file__)) inc_dirs = [get_python_inc(), os.path.join(curdir, "c_misc")] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.insert(0, get_numpy_include_dirs()) # C libraries c_misc_src = [join('c_misc','*.c')] c_misc_hdr = [join('c_misc','*.h')] cephes_src = [join('cephes','*.c')] cephes_hdr = [join('cephes', '*.h')] config.add_library('sc_c_misc',sources=c_misc_src, include_dirs=[curdir] + inc_dirs, depends=(cephes_hdr + cephes_src + c_misc_hdr + cephes_hdr + ['*.h']), macros=define_macros) config.add_library('sc_cephes',sources=cephes_src, include_dirs=[curdir] + inc_dirs, depends=(cephes_hdr + ['*.h']), macros=define_macros) # Fortran/C++ libraries mach_src = [join('mach','*.f')] amos_src = [join('amos','*.f')] cdf_src = [join('cdflib','*.f')] specfun_src = [join('specfun','*.f')] config.add_library('sc_mach',sources=mach_src, config_fc={'noopt':(__file__,1)}) config.add_library('sc_amos',sources=amos_src) config.add_library('sc_cdf',sources=cdf_src) config.add_library('sc_specfun',sources=specfun_src) # Extension specfun config.add_extension('specfun', sources=['specfun.pyf'], f2py_options=['--no-wrap-functions'], depends=specfun_src, define_macros=[], libraries=['sc_specfun']) # Extension _ufuncs headers = ['*.h', join('c_misc', '*.h'), join('cephes', '*.h')] ufuncs_src = ['_ufuncs.c', 'sf_error.c', '_logit.c.src', "amos_wrappers.c", "cdf_wrappers.c", "specfun_wrappers.c"] ufuncs_dep = (headers + ufuncs_src + amos_src + c_misc_src + cephes_src + mach_src + cdf_src + specfun_src) cfg = dict(get_system_info('lapack_opt')) cfg.setdefault('include_dirs', []).extend([curdir] + inc_dirs + [numpy.get_include()]) cfg.setdefault('libraries', []).extend(['sc_amos','sc_c_misc','sc_cephes','sc_mach', 'sc_cdf', 'sc_specfun']) cfg.setdefault('define_macros', []).extend(define_macros) config.add_extension('_ufuncs', depends=ufuncs_dep, sources=ufuncs_src, extra_info=get_info("npymath"), **cfg) # Extension _ufuncs_cxx ufuncs_cxx_src = ['_ufuncs_cxx.cxx', 'sf_error.c', '_faddeeva.cxx', 'Faddeeva.cc'] ufuncs_cxx_dep = (headers + ufuncs_cxx_src + cephes_src + ['*.hh']) config.add_extension('_ufuncs_cxx', sources=ufuncs_cxx_src, depends=ufuncs_cxx_dep, include_dirs=[curdir], define_macros=define_macros, extra_info=get_info("npymath")) cfg = dict(get_system_info('lapack_opt')) config.add_extension('_ellip_harm_2', sources=['_ellip_harm_2.c', 'sf_error.c',], **cfg ) config.add_data_files('tests/*.py') config.add_data_files('tests/data/README') config.add_data_files('tests/data/*.npz') return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('special', parent_package, top_path) define_macros = [] if sys.platform == 'win32': # define_macros.append(('NOINFINITIES',None)) # define_macros.append(('NONANS',None)) define_macros.append(('_USE_MATH_DEFINES',None)) curdir = os.path.abspath(os.path.dirname(__file__)) inc_dirs = [get_python_inc(), os.path.join(curdir, "c_misc")] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.insert(0, get_numpy_include_dirs()) # C libraries c_misc_src = [join('c_misc','*.c')] c_misc_hdr = [join('c_misc','*.h')] cephes_src = [join('cephes','*.c')] cephes_hdr = [join('cephes', '*.h')] config.add_library('sc_c_misc',sources=c_misc_src, include_dirs=[curdir] + inc_dirs, depends=(cephes_hdr + cephes_src + c_misc_hdr + cephes_hdr + ['*.h']), macros=define_macros) config.add_library('sc_cephes',sources=cephes_src, include_dirs=[curdir] + inc_dirs, depends=(cephes_hdr + ['*.h']), macros=define_macros) # Fortran/C++ libraries mach_src = [join('mach','*.f')] amos_src = [join('amos','*.f')] cdf_src = [join('cdflib','*.f')] specfun_src = [join('specfun','*.f')] config.add_library('sc_mach',sources=mach_src, config_fc={'noopt':(__file__,1)}) config.add_library('sc_amos',sources=amos_src) config.add_library('sc_cdf',sources=cdf_src) config.add_library('sc_specfun',sources=specfun_src) # Extension specfun config.add_extension('specfun', sources=['specfun.pyf'], f2py_options=['--no-wrap-functions'], depends=specfun_src, define_macros=[], libraries=['sc_specfun']) # Extension _ufuncs headers = ['*.h', join('c_misc', '*.h'), join('cephes', '*.h')] ufuncs_src = ['_ufuncs.c', 'sf_error.c', '_logit.c.src', "amos_wrappers.c", "cdf_wrappers.c", "specfun_wrappers.c"] ufuncs_dep = (headers + ufuncs_src + amos_src + c_misc_src + cephes_src + mach_src + cdf_src + specfun_src) config.add_extension('_ufuncs', libraries=['sc_amos','sc_c_misc','sc_cephes','sc_mach', 'sc_cdf', 'sc_specfun'], depends=ufuncs_dep, sources=ufuncs_src, include_dirs=[curdir] + inc_dirs, define_macros=define_macros, extra_info=get_info("npymath")) # Extension _ufuncs_cxx ufuncs_cxx_src = ['_ufuncs_cxx.cxx', 'sf_error.c', '_faddeeva.cxx', 'Faddeeva.cc'] ufuncs_cxx_dep = (headers + ufuncs_cxx_src + cephes_src + ['*.hh']) config.add_extension('_ufuncs_cxx', sources=ufuncs_cxx_src, depends=ufuncs_cxx_dep, include_dirs=[curdir], define_macros=define_macros, extra_info=get_info("npymath")) config.add_data_files('tests/*.py') config.add_data_files('tests/data/README') config.add_data_files('tests/data/*.npz') return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info as get_system_info config = Configuration('special', parent_package, top_path) define_macros = [] if sys.platform == 'win32': # define_macros.append(('NOINFINITIES',None)) # define_macros.append(('NONANS',None)) define_macros.append(('_USE_MATH_DEFINES',None)) curdir = os.path.abspath(os.path.dirname(__file__)) inc_dirs = [get_python_inc(), os.path.join(curdir, "c_misc")] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.insert(0, get_numpy_include_dirs()) inc_dirs.append(join(dirname(dirname(__file__)), '_lib')) # C libraries c_misc_src = [join('c_misc','*.c')] c_misc_hdr = [join('c_misc','*.h')] cephes_src = [join('cephes','*.c')] cephes_hdr = [join('cephes', '*.h')] config.add_library('sc_c_misc',sources=c_misc_src, include_dirs=[curdir] + inc_dirs, depends=(cephes_hdr + cephes_src + c_misc_hdr + cephes_hdr + ['*.h']), macros=define_macros) config.add_library('sc_cephes',sources=cephes_src, include_dirs=[curdir] + inc_dirs, depends=(cephes_hdr + ['*.h']), macros=define_macros) # Fortran/C++ libraries mach_src = [join('mach','*.f')] amos_src = [join('amos','*.f')] cdf_src = [join('cdflib','*.f')] specfun_src = [join('specfun','*.f')] config.add_library('sc_mach',sources=mach_src, config_fc={'noopt':(__file__,1)}) config.add_library('sc_amos',sources=amos_src) config.add_library('sc_cdf',sources=cdf_src) config.add_library('sc_specfun',sources=specfun_src) # Extension specfun config.add_extension('specfun', sources=['specfun.pyf'], f2py_options=['--no-wrap-functions'], depends=specfun_src, define_macros=[], libraries=['sc_specfun']) # Extension _ufuncs headers = ['*.h', join('c_misc', '*.h'), join('cephes', '*.h')] ufuncs_src = ['_ufuncs.c', 'sf_error.c', '_logit.c.src', "amos_wrappers.c", "cdf_wrappers.c", "specfun_wrappers.c"] ufuncs_dep = (headers + ufuncs_src + amos_src + c_misc_src + cephes_src + mach_src + cdf_src + specfun_src) cfg = dict(get_system_info('lapack_opt')) cfg.setdefault('include_dirs', []).extend([curdir] + inc_dirs + [numpy.get_include()]) cfg.setdefault('libraries', []).extend(['sc_amos','sc_c_misc','sc_cephes','sc_mach', 'sc_cdf', 'sc_specfun']) cfg.setdefault('define_macros', []).extend(define_macros) config.add_extension('_ufuncs', depends=ufuncs_dep, sources=ufuncs_src, extra_info=get_info("npymath"), **cfg) # Extension _ufuncs_cxx ufuncs_cxx_src = ['_ufuncs_cxx.cxx', 'sf_error.c', '_faddeeva.cxx', 'Faddeeva.cc', '_wright.cxx', 'wright.cc'] ufuncs_cxx_dep = (headers + ufuncs_cxx_src + cephes_src + ['*.hh']) config.add_extension('_ufuncs_cxx', sources=ufuncs_cxx_src, depends=ufuncs_cxx_dep, include_dirs=[curdir] + inc_dirs, define_macros=define_macros, extra_info=get_info("npymath")) cfg = dict(get_system_info('lapack_opt')) config.add_extension('_ellip_harm_2', sources=['_ellip_harm_2.c', 'sf_error.c',], **cfg ) # Cython API config.add_data_files('cython_special.pxd') cython_special_src = ['cython_special.c', 'sf_error.c', '_logit.c.src', "amos_wrappers.c", "cdf_wrappers.c", "specfun_wrappers.c"] cython_special_dep = (headers + ufuncs_src + ufuncs_cxx_src + amos_src + c_misc_src + cephes_src + mach_src + cdf_src + specfun_src) cfg = dict(get_system_info('lapack_opt')) cfg.setdefault('include_dirs', []).extend([curdir] + inc_dirs + [numpy.get_include()]) cfg.setdefault('libraries', []).extend(['sc_amos','sc_c_misc','sc_cephes','sc_mach', 'sc_cdf', 'sc_specfun']) cfg.setdefault('define_macros', []).extend(define_macros) config.add_extension('cython_special', depends=cython_special_dep, sources=cython_special_src, extra_info=get_info("npymath"), **cfg) # combinatorics config.add_extension('_comb', sources=['_comb.c']) # testing for _round.h config.add_extension('_test_round', sources=['_test_round.c'], depends=['_round.h', 'c_misc/double2.h'], include_dirs=[numpy.get_include()] + inc_dirs, extra_info=get_info('npymath')) config.add_data_files('tests/*.py') config.add_data_files('tests/data/README') # regenerate npz data files makenpz = os.path.join(os.path.dirname(__file__), 'utils', 'makenpz.py') data_dir = os.path.join(os.path.dirname(__file__), 'tests', 'data') for name in ['boost', 'gsl', 'local']: subprocess.check_call([sys.executable, makenpz, '--use-timestamp', os.path.join(data_dir, name)]) config.add_data_files('tests/data/*.npz') config.add_subpackage('_precompute') return config
extra_setuptools_args = {} from distutils.command import install # Import distutils _after_ potential setuptools import above, and after removing # MANIFEST from distutils.core import setup from distutils.extension import Extension from distutils.command import build_py, build_ext from cythexts import cyproc_exts, get_pyx_sdist, derror_maker from setup_helpers import install_scripts_bat, add_flag_checking, check_npymath # Define extensions EXTS = [] # Add flags for linking to npymath library ext_kwargs = get_info('npymath') ext_kwargs['include_dirs'].append('src') for modulename, other_sources, language in ( ('dipy.reconst.recspeed', [], 'c'), ('dipy.reconst.vec_val_sum', [], 'c'), ('dipy.reconst.quick_squash', [], 'c'), ('dipy.tracking.distances', [], 'c'), ('dipy.tracking.streamlinespeed', [], 'c'), ('dipy.tracking.vox2track', [], 'c'), ('dipy.tracking.propspeed', [], 'c'), ('dipy.denoise.denspeed', [], 'c'), ('dipy.align.vector_fields', [], 'c'), ('dipy.align.sumsqdiff', [], 'c'), ('dipy.align.expectmax', [], 'c'), ('dipy.align.crosscorr', [], 'c'), ('dipy.align.bundlemin', [], 'c')):
CFLAGS = [] if sys.platform == "darwin" and sys.version_info[:2] == (2, 6): cpp_link_args = ["-lstdc++"] else: cpp_link_args = [] install_name_tool_fixer = [] if sys.platform == "darwin": install_name_tool_fixer += ["-headerpad_max_install_names"] npymath_info = np_misc.get_info("npymath") ext_dynfunc = Extension( name="numba._dynfunc", sources=["numba/_dynfunc.c"], extra_compile_args=CFLAGS, depends=["numba/_pymodule.h"] ) ext_npymath_exports = Extension( name="numba._npymath_exports", sources=["numba/_npymath_exports.c"], include_dirs=npymath_info["include_dirs"], libraries=npymath_info["libraries"], library_dirs=npymath_info["library_dirs"], define_macros=npymath_info["define_macros"], )
"Sphinx", "sphinx-gallery", "numpydoc", "Pillow", "matplotlib" ] setup_options = dict( name="hmmlearn", version=VERSION, description=DESCRIPTION, long_description=LONG_DESCRIPTION, maintainer=MAINTAINER, maintainer_email=MAINTAINER_EMAIL, license=LICENSE, url="https://github.com/hmmlearn/hmmlearn", packages=["hmmlearn", "hmmlearn.tests"], classifiers=CLASSIFIERS, ext_modules=[ Extension("hmmlearn._hmmc", ["hmmlearn/_hmmc.c"], extra_compile_args=["-O3"], **get_info("npymath")) ], install_requires=install_requires, tests_require=tests_require, extras_require={ "tests": tests_require, "docs": docs_require } ) if __name__ == "__main__": setup(**setup_options)
def get_ext_modules(): """ Return a list of Extension instances for the setup() call. """ # Note we don't import Numpy at the toplevel, since setup.py # should be able to run without Numpy for pip to discover the # build dependencies import numpy.distutils.misc_util as np_misc # Inject required options for extensions compiled against the Numpy # C API (include dirs, library dirs etc.) np_compile_args = np_misc.get_info('npymath') ext_dynfunc = Extension(name='numba._dynfunc', sources=['numba/_dynfuncmod.c'], extra_compile_args=CFLAGS, depends=['numba/_pymodule.h', 'numba/_dynfunc.c']) ext_dispatcher = Extension(name="numba._dispatcher", sources=['numba/_dispatcher.c', 'numba/_typeof.c', 'numba/_hashtable.c', 'numba/_dispatcherimpl.cpp', 'numba/typeconv/typeconv.cpp'], depends=["numba/_pymodule.h", "numba/_dispatcher.h", "numba/_typeof.h", "numba/_hashtable.h"], **np_compile_args) ext_helperlib = Extension(name="numba._helperlib", sources=["numba/_helpermod.c", "numba/_math_c99.c"], extra_compile_args=CFLAGS, extra_link_args=install_name_tool_fixer, depends=["numba/_pymodule.h", "numba/_math_c99.h", "numba/_helperlib.c", "numba/_lapack.c", "numba/_npymath_exports.c", "numba/_random.c", "numba/mathnames.inc"], **np_compile_args) ext_typeconv = Extension(name="numba.typeconv._typeconv", sources=["numba/typeconv/typeconv.cpp", "numba/typeconv/_typeconv.cpp"], depends=["numba/_pymodule.h"], ) ext_npyufunc_ufunc = Extension(name="numba.npyufunc._internal", sources=["numba/npyufunc/_internal.c"], depends=["numba/npyufunc/_ufunc.c", "numba/npyufunc/_internal.h", "numba/_pymodule.h"], **np_compile_args) ext_npyufunc_workqueue_impls = [] def check_file_at_path(path2file): """ Takes a list as a path, a single glob (*) is permitted as an entry which indicates that expansion at this location is required (i.e. version might not be known). """ found = None path2check = [os.path.split(os.path.split(sys.executable)[0])[0]] path2check += [os.getenv(n, '') for n in ['CONDA_PREFIX', 'PREFIX']] if sys.platform.startswith('win'): path2check += [os.path.join(p, 'Library') for p in path2check] for p in path2check: if p: if '*' in path2file: globloc = path2file.index('*') searchroot = os.path.join(*path2file[:globloc]) try: potential_locs = os.listdir(os.path.join(p, searchroot)) except BaseException: continue searchfor = path2file[globloc + 1:] for x in potential_locs: potpath = os.path.join(p, searchroot, x, *searchfor) if os.path.isfile(potpath): found = p # the latest is used elif os.path.isfile(os.path.join(p, *path2file)): found = p # the latest is used return found # Search for Intel TBB, first check env var TBBROOT then conda locations tbb_root = os.getenv('TBBROOT') if not tbb_root: tbb_root = check_file_at_path(['include', 'tbb', 'tbb.h']) # Set various flags for use in TBB and openmp. On OSX, also find OpenMP! have_openmp = True if sys.platform.startswith('win'): cpp11flags = [] ompcompileflags = ['-openmp'] omplinkflags = [] elif sys.platform.startswith('darwin'): cpp11flags = ['-std=c++11'] # This is a bit unusual but necessary... # llvm (clang) OpenMP is used for headers etc at compile time # Intel OpenMP (libiomp5) provides the link library. # They are binary compatible and may not safely coexist in a process, as # libiomp5 is more prevalent and often linked in for NumPy it is used # here! ompcompileflags = ['-fopenmp'] omplinkflags = ['-fopenmp=libiomp5'] omppath = ['lib', 'clang', '*', 'include', 'omp.h'] have_openmp = check_file_at_path(omppath) else: cpp11flags = ['-std=c++11'] ompcompileflags = ['-fopenmp'] if platform.machine() == 'ppc64le': omplinkflags = ['-fopenmp'] else: omplinkflags = ['-fopenmp'] if tbb_root: print("Using Intel TBB from:", tbb_root) ext_npyufunc_tbb_workqueue = Extension( name='numba.npyufunc.tbbpool', sources=['numba/npyufunc/tbbpool.cpp', 'numba/npyufunc/gufunc_scheduler.cpp'], depends=['numba/npyufunc/workqueue.h'], include_dirs=[os.path.join(tbb_root, 'include')], extra_compile_args=cpp11flags, libraries =['tbb'], # TODO: if --debug or -g, use 'tbb_debug' library_dirs=[os.path.join(tbb_root, 'lib', 'intel64', 'gcc4.4'), # for Linux os.path.join(tbb_root, 'lib'), # for MacOS os.path.join(tbb_root, 'lib', 'intel64', 'vc_mt'), # for Windows ], ) ext_npyufunc_workqueue_impls.append(ext_npyufunc_tbb_workqueue) else: print("TBB not found") # Disable OpenMP if we are building a wheel or # forced by user with NUMBA_NO_OPENMP=1 if is_building_wheel() or os.getenv('NUMBA_NO_OPENMP'): print("OpenMP disabled") elif have_openmp: print("Using OpenMP from:", have_openmp) # OpenMP backed work queue ext_npyufunc_omppool = Extension( name='numba.npyufunc.omppool', sources=['numba/npyufunc/omppool.cpp', 'numba/npyufunc/gufunc_scheduler.cpp'], depends=['numba/npyufunc/workqueue.h'], extra_compile_args=ompcompileflags + cpp11flags, extra_link_args = omplinkflags) ext_npyufunc_workqueue_impls.append(ext_npyufunc_omppool) else: print("OpenMP not found") # Build the Numba workqueue implementation irrespective of whether the TBB # version is built. Users can select a backend via env vars. ext_npyufunc_workqueue = Extension( name='numba.npyufunc.workqueue', sources=['numba/npyufunc/workqueue.c', 'numba/npyufunc/gufunc_scheduler.cpp'], depends=['numba/npyufunc/workqueue.h']) ext_npyufunc_workqueue_impls.append(ext_npyufunc_workqueue) ext_mviewbuf = Extension(name='numba.mviewbuf', extra_link_args=install_name_tool_fixer, sources=['numba/mviewbuf.c']) ext_nrt_python = Extension(name='numba.runtime._nrt_python', sources=['numba/runtime/_nrt_pythonmod.c', 'numba/runtime/nrt.c'], depends=['numba/runtime/nrt.h', 'numba/_pymodule.h', 'numba/runtime/_nrt_python.c'], **np_compile_args) ext_jitclass_box = Extension(name='numba.jitclass._box', sources=['numba/jitclass/_box.c'], depends=['numba/_pymodule.h'], ) ext_cuda_extras = Extension(name='numba.cuda.cudadrv._extras', sources=['numba/cuda/cudadrv/_extras.c'], depends=['numba/_pymodule.h'], include_dirs=["numba"]) ext_modules = [ext_dynfunc, ext_dispatcher, ext_helperlib, ext_typeconv, ext_npyufunc_ufunc, ext_mviewbuf, ext_nrt_python, ext_jitclass_box, ext_cuda_extras] ext_modules += ext_npyufunc_workqueue_impls return ext_modules
def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration("special", parent_package, top_path) define_macros = [] if sys.platform == "win32": # define_macros.append(('NOINFINITIES',None)) # define_macros.append(('NONANS',None)) define_macros.append(("_USE_MATH_DEFINES", None)) curdir = os.path.abspath(os.path.dirname(__file__)) inc_dirs = [get_python_inc()] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.append(get_numpy_include_dirs()) # C libraries config.add_library( "sc_c_misc", sources=[join("c_misc", "*.c")], include_dirs=[curdir] + inc_dirs, macros=define_macros ) config.add_library( "sc_cephes", sources=[join("cephes", "*.c")], include_dirs=[curdir] + inc_dirs, macros=define_macros ) # Fortran/C++ libraries config.add_library("sc_mach", sources=[join("mach", "*.f")], config_fc={"noopt": (__file__, 1)}) config.add_library("sc_amos", sources=[join("amos", "*.f")]) config.add_library("sc_cdf", sources=[join("cdflib", "*.f")]) config.add_library("sc_specfun", sources=[join("specfun", "*.f")]) # Extension specfun config.add_extension( "specfun", sources=["specfun.pyf"], f2py_options=["--no-wrap-functions"], define_macros=[], libraries=["sc_specfun"], ) # Extension _ufuncs config.add_extension( "_ufuncs", libraries=["sc_amos", "sc_c_misc", "sc_cephes", "sc_mach", "sc_cdf", "sc_specfun"], depends=[ "_logit.h", "cephes.h", "amos_wrappers.h", "cdf_wrappers.h", "specfun_wrappers.h", "c_misc/misc.h", "cephes/mconf.h", "cephes/cephes_names.h", ], sources=["_ufuncs.c", "sf_error.c", "_logit.c.src", "amos_wrappers.c", "cdf_wrappers.c", "specfun_wrappers.c"], include_dirs=[curdir], define_macros=define_macros, extra_info=get_info("npymath"), ) # Extension _ufuncs_cxx config.add_extension( "_ufuncs_cxx", sources=["_ufuncs_cxx.cxx", "sf_error.c", "_faddeeva.cxx", "Faddeeva.cc"], libraries=["sc_cephes"], include_dirs=[curdir], define_macros=define_macros, extra_info=get_info("npymath"), ) config.add_data_files("tests/*.py") config.add_data_files("tests/data/README") config.add_data_files("tests/data/*.npz") return config
from distutils.core import setup, Extension import numpy.distutils.misc_util as npy_util npy_info = npy_util.get_info('npymath') distance_wrap = Extension('_distance_wrap', sources = ['distance_wrap.c'], **npy_info) setup(name = 'simple-distance-cpy', version = '0.21', description = 'I want euclidean cdist and I want it now', py_modules = ['distance'], ext_modules = [distance_wrap], url='https://github.com/tocisz/simple-distance-cpy', author='Tomasz Cichocki', author_email='cichymail at gmail dot com')
from distutils.core import setup, Extension import numpy.distutils.misc_util as npy_util npy_info = npy_util.get_info('npymath') distance_wrap = Extension('_distance_wrap', sources=['distance_wrap.c'], **npy_info) setup(name='simple-distance-cpy', version='0.21', description='I want euclidean cdist and I want it now', py_modules=['distance'], ext_modules=[distance_wrap], url='https://github.com/tocisz/simple-distance-cpy', author='Tomasz Cichocki', author_email='cichymail at gmail dot com')
def test_installed_npymath_ini(): # Regression test for gh-7707. If npymath.ini wasn't installed, then this # will give an error. info = get_info('npymath')
if not lapack_info: # No LAPACK in NumPy print('### Warning: Using unoptimized blas/lapack @@@') MODULE_SOURCES.extend(lapack_lite_files[:-1]) # all but f2c.h MODULE_DEPENDENCIES.extend(lapack_lite_files) else: if sys.platform == 'win32': print('### Warning: python.xerbla.c is disabled ###') else: MODULE_SOURCES.extend(lapack_lite_files[:1]) # python_xerbla.c MODULE_DEPENDENCIES.extend(lapack_lite_files[:1]) npymath_info = np_misc_util.get_info('npymath') extra_opts = copy.deepcopy(lapack_info) for key, val in npymath_info.items(): if extra_opts.get(key): extra_opts[key].extend(val) else: extra_opts[key] = copy.deepcopy(val) gufunc_module = Extension('gulinalg._impl', sources = MODULE_SOURCES, depends = MODULE_DEPENDENCIES, **extra_opts) packages = [
def get_ext_modules(): """ Return a list of Extension instances for the setup() call. """ # Note we don't import Numpy at the toplevel, since setup.py # should be able to run without Numpy for pip to discover the # build dependencies import numpy.distutils.misc_util as np_misc # Inject required options for extensions compiled against the Numpy # C API (include dirs, library dirs etc.) np_compile_args = np_misc.get_info("npymath") ext_dynfunc = Extension( name="numba._dynfunc", sources=["numba/_dynfuncmod.c"], extra_compile_args=CFLAGS, depends=["numba/_pymodule.h", "numba/_dynfunc.c"], ) ext_dispatcher = Extension( name="numba._dispatcher", sources=[ "numba/_dispatcher.c", "numba/_typeof.c", "numba/_hashtable.c", "numba/_dispatcherimpl.cpp", "numba/typeconv/typeconv.cpp", ], depends=["numba/_pymodule.h", "numba/_dispatcher.h", "numba/_typeof.h", "numba/_hashtable.h"], **np_compile_args ) ext_helperlib = Extension( name="numba._helperlib", sources=["numba/_helpermod.c", "numba/_math_c99.c"], extra_compile_args=CFLAGS, extra_link_args=install_name_tool_fixer, depends=[ "numba/_pymodule.h", "numba/_math_c99.h", "numba/_helperlib.c", "numba/_lapack.c", "numba/_npymath_exports.c", "numba/_random.c", "numba/mathnames.inc", ], **np_compile_args ) ext_typeconv = Extension( name="numba.typeconv._typeconv", sources=["numba/typeconv/typeconv.cpp", "numba/typeconv/_typeconv.cpp"], depends=["numba/_pymodule.h"], ) ext_npyufunc_ufunc = Extension( name="numba.npyufunc._internal", sources=["numba/npyufunc/_internal.c"], depends=["numba/npyufunc/_ufunc.c", "numba/npyufunc/_internal.h", "numba/_pymodule.h"], **np_compile_args ) ext_npyufunc_workqueue = Extension( name="numba.npyufunc.workqueue", sources=["numba/npyufunc/workqueue.c"], depends=["numba/npyufunc/workqueue.h"] ) ext_mviewbuf = Extension(name="numba.mviewbuf", sources=["numba/mviewbuf.c"]) ext_nrt_python = Extension( name="numba.runtime._nrt_python", sources=["numba/runtime/_nrt_pythonmod.c", "numba/runtime/nrt.c"], depends=["numba/runtime/nrt.h", "numba/_pymodule.h", "numba/runtime/_nrt_python.c"], **np_compile_args ) ext_jitclass_box = Extension( name="numba.jitclass._box", sources=["numba/jitclass/_box.c"], depends=["numba/_pymodule.h"] ) ext_modules = [ ext_dynfunc, ext_dispatcher, ext_helperlib, ext_typeconv, ext_npyufunc_ufunc, ext_npyufunc_workqueue, ext_mviewbuf, ext_nrt_python, ext_jitclass_box, ] return ext_modules
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from scipy._build_utils.compiler_helper import set_cxx_flags_hook import numpy as np config = Configuration('stats', parent_package, top_path) statlib_src = [join('statlib', '*.f')] config.add_library('statlib', sources=statlib_src) # add statlib module config.add_extension('statlib', sources=['statlib.pyf'], f2py_options=['--no-wrap-functions'], libraries=['statlib'], depends=statlib_src) # add _stats module config.add_extension('_stats', sources=['_stats.c']) # add mvn module config.add_extension('mvn', sources=['mvn.pyf', 'mvndst.f']) # add _sobol module config.add_extension('_sobol', sources=['_sobol.c']) config.add_data_files('_sobol_direction_numbers.npz') # add _qmc_cy module ext = config.add_extension('_qmc_cy', sources=['_qmc_cy.cxx']) ext._pre_build_hook = set_cxx_flags_hook if int(os.environ.get('SCIPY_USE_PYTHRAN', 1)): import pythran ext = pythran.dist.PythranExtension( 'scipy.stats._hypotests_pythran', sources=["scipy/stats/_hypotests_pythran.py"], config=['compiler.blas=none']) config.ext_modules.append(ext) # add BiasedUrn module config.add_data_files('biasedurn.pxd') from _generate_pyx import isNPY_OLD # type: ignore[import] NPY_OLD = isNPY_OLD() if NPY_OLD: biasedurn_libs = [] biasedurn_libdirs = [] else: biasedurn_libs = ['npyrandom', 'npymath'] biasedurn_libdirs = [ join(np.get_include(), '..', '..', 'random', 'lib') ] biasedurn_libdirs += get_info('npymath')['library_dirs'] ext = config.add_extension( 'biasedurn', sources=[ 'biasedurn.cxx', 'biasedurn/impls.cpp', 'biasedurn/fnchyppr.cpp', 'biasedurn/wnchyppr.cpp', 'biasedurn/stoc1.cpp', 'biasedurn/stoc3.cpp' ], include_dirs=[np.get_include()], library_dirs=biasedurn_libdirs, libraries=biasedurn_libs, define_macros=[('R_BUILD', None)], language='c++', depends=['biasedurn/stocR.h'], ) ext._pre_build_hook = pre_build_hook # add boost stats distributions config.add_subpackage('_boost') # Type stubs config.add_data_files('*.pyi') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('special', parent_package, top_path) define_macros = [] if sys.platform == 'win32': # define_macros.append(('NOINFINITIES',None)) # define_macros.append(('NONANS',None)) define_macros.append(('_USE_MATH_DEFINES', None)) curdir = os.path.abspath(os.path.dirname(__file__)) inc_dirs = [get_python_inc()] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.append(get_numpy_include_dirs()) # C libraries config.add_library('sc_c_misc', sources=[join('c_misc', '*.c')], include_dirs=[curdir] + inc_dirs, macros=define_macros) config.add_library('sc_cephes', sources=[join('cephes', '*.c')], include_dirs=[curdir] + inc_dirs, macros=define_macros) # Fortran/C++ libraries config.add_library('sc_mach', sources=[join('mach', '*.f')], config_fc={'noopt': (__file__, 1)}) config.add_library('sc_amos', sources=[join('amos', '*.f')]) config.add_library('sc_cdf', sources=[join('cdflib', '*.f')]) config.add_library('sc_specfun', sources=[join('specfun', '*.f')]) # Extension specfun config.add_extension('specfun', sources=['specfun.pyf'], f2py_options=['--no-wrap-functions'], define_macros=[], libraries=['sc_specfun']) # Extension _ufuncs config.add_extension('_ufuncs', libraries=[ 'sc_amos', 'sc_c_misc', 'sc_cephes', 'sc_mach', 'sc_cdf', 'sc_specfun' ], depends=[ "_logit.h", "cephes.h", "amos_wrappers.h", "cdf_wrappers.h", "specfun_wrappers.h", "c_misc/misc.h", "cephes/mconf.h", "cephes/cephes_names.h" ], sources=[ '_ufuncs.c', 'sf_error.c', '_logit.c.src', "amos_wrappers.c", "cdf_wrappers.c", "specfun_wrappers.c" ], include_dirs=[curdir], define_macros=define_macros, extra_info=get_info("npymath")) # Extension _ufuncs_cxx config.add_extension('_ufuncs_cxx', sources=[ '_ufuncs_cxx.cxx', 'sf_error.c', '_faddeeva.cxx', 'Faddeeva.cc', ], libraries=['sc_cephes'], include_dirs=[curdir], define_macros=define_macros, extra_info=get_info("npymath")) config.add_data_files('tests/*.py') config.add_data_files('tests/data/README') config.add_data_files('tests/data/*.npz') return config
versioneer.parentdir_prefix = 'numba-' cmdclass = versioneer.get_cmdclass() setup_args = { 'long_description': open('README.md').read(), } GCCFLAGS = ["-std=c89", "-Wdeclaration-after-statement", "-Werror"] if os.environ.get("NUMBA_GCC_FLAGS"): CFLAGS = GCCFLAGS else: CFLAGS = [] npymath_info = np_misc.get_info('npymath') ext_dynfunc = Extension(name='numba._dynfunc', sources=['numba/_dynfunc.c'], extra_compile_args=CFLAGS, depends=["numba/_pymodule.h"]) ext_numpyadapt = Extension(name='numba._numpyadapt', sources=['numba/_numpyadapt.c'], include_dirs=[numpy.get_include()], extra_compile_args=CFLAGS, depends=["numba/_pymodule.h"]) ext_npymath_exports = Extension(name='numba._npymath_exports', sources=['numba/_npymath_exports.c'], include_dirs=npymath_info['include_dirs'], libraries=npymath_info['libraries'],
def get_ext_modules(): """ Return a list of Extension instances for the setup() call. """ # Note we don't import Numpy at the toplevel, since setup.py # should be able to run without Numpy for pip to discover the # build dependencies import numpy.distutils.misc_util as np_misc # Inject required options for extensions compiled against the Numpy # C API (include dirs, library dirs etc.) np_compile_args = np_misc.get_info('npymath') ext_dynfunc = Extension(name='numba._dynfunc', sources=['numba/_dynfuncmod.c'], extra_compile_args=CFLAGS, depends=['numba/_pymodule.h', 'numba/_dynfunc.c']) ext_dispatcher = Extension(name="numba._dispatcher", sources=['numba/_dispatcher.c', 'numba/_typeof.c', 'numba/_hashtable.c', 'numba/_dispatcherimpl.cpp', 'numba/typeconv/typeconv.cpp'], depends=["numba/_pymodule.h", "numba/_dispatcher.h", "numba/_typeof.h", "numba/_hashtable.h"], **np_compile_args) ext_helperlib = Extension(name="numba._helperlib", sources=["numba/_helpermod.c", "numba/_math_c99.c"], extra_compile_args=CFLAGS, extra_link_args=install_name_tool_fixer, depends=["numba/_pymodule.h", "numba/_math_c99.h", "numba/_helperlib.c", "numba/_lapack.c", "numba/_npymath_exports.c", "numba/_random.c", "numba/mathnames.inc"], **np_compile_args) ext_typeconv = Extension(name="numba.typeconv._typeconv", sources=["numba/typeconv/typeconv.cpp", "numba/typeconv/_typeconv.cpp"], depends=["numba/_pymodule.h"], ) ext_npyufunc_ufunc = Extension(name="numba.npyufunc._internal", sources=["numba/npyufunc/_internal.c"], depends=["numba/npyufunc/_ufunc.c", "numba/npyufunc/_internal.h", "numba/_pymodule.h"], **np_compile_args) tbb_root = os.getenv('TBBROOT') if tbb_root: print("Using TBBROOT=", tbb_root) ext_npyufunc_workqueue = Extension( name='numba.npyufunc.workqueue', sources=['numba/npyufunc/tbbpool.cpp', 'numba/npyufunc/gufunc_scheduler.cpp'], depends=['numba/npyufunc/workqueue.h'], include_dirs=[os.path.join(tbb_root, 'include')], extra_compile_args=[] if sys.platform.startswith('win') else ['-std=c++11'], libraries =['tbb'], library_dirs=[os.path.join(tbb_root, 'lib', 'intel64', 'gcc4.4'), # for Linux os.path.join(tbb_root, 'lib'), # for MacOS os.path.join(tbb_root, 'lib', 'intel64', 'vc_mt'), # for Windows ], ) else: ext_npyufunc_workqueue = Extension( name='numba.npyufunc.workqueue', sources=['numba/npyufunc/workqueue.c', 'numba/npyufunc/gufunc_scheduler.cpp'], depends=['numba/npyufunc/workqueue.h']) ext_mviewbuf = Extension(name='numba.mviewbuf', extra_link_args=install_name_tool_fixer, sources=['numba/mviewbuf.c']) ext_nrt_python = Extension(name='numba.runtime._nrt_python', sources=['numba/runtime/_nrt_pythonmod.c', 'numba/runtime/nrt.c'], depends=['numba/runtime/nrt.h', 'numba/_pymodule.h', 'numba/runtime/_nrt_python.c'], **np_compile_args) ext_jitclass_box = Extension(name='numba.jitclass._box', sources=['numba/jitclass/_box.c'], depends=['numba/_pymodule.h'], ) ext_cuda_extras = Extension(name='numba.cuda.cudadrv._extras', sources=['numba/cuda/cudadrv/_extras.c'], depends=['numba/_pymodule.h'], include_dirs=["numba"]) ext_modules = [ext_dynfunc, ext_dispatcher, ext_helperlib, ext_typeconv, ext_npyufunc_ufunc, ext_npyufunc_workqueue, ext_mviewbuf, ext_nrt_python, ext_jitclass_box, ext_cuda_extras] return ext_modules
cmdclass = {'clean': CleanCommand, 'build': build} cmdclass["build_src"] = DummyBuildSrc cmdclass["build_ext"] = CheckingBuildExt # some linux distros require it #NOTE: we are not currently using this but add it to Extension, if needed. # libraries = ['m'] if 'win32' not in sys.platform else [] from numpy.distutils.misc_util import get_info # Reset the cython exclusions file init_cython_exclusion(CYTHON_EXCLUSION_FILE) npymath_info = get_info("npymath") ext_data = dict(kalman_loglike={ "name": "statsmodels/tsa/kalmanf/kalman_loglike.c", "depends": ["statsmodels/src/capsule.h"], "include_dirs": ["statsmodels/src"], "sources": [] }, _hamilton_filter={ "name": "statsmodels/tsa/regime_switching/_hamilton_filter.c", "depends": [], "include_dirs": [], "sources": [] }, _kim_smoother={ "name": "statsmodels/tsa/regime_switching/_kim_smoother.c",
depends=tseries_depends + ['pandas/src/numpy_helper.h'], sources=[srcpath('tseries', suffix=suffix), 'pandas/src/period.c', 'pandas/src/datetime/np_datetime.c', 'pandas/src/datetime/np_datetime_strings.c'], include_dirs=[np.get_include()], # pyrex_gdb=True, # extra_compile_args=['-Wconversion'] ) sparse_ext = Extension('pandas._sparse', sources=[srcpath('sparse', suffix=suffix)], include_dirs=[np.get_include()]) npymath_info = get_info('npymath') npymath_libdir = npymath_info['library_dirs'][0] npymath_libdir = npymath_libdir.replace('\\\\', '\\') ujson_ext = Extension('pandas._ujson', sources=['pandas/src/ujson/python/ujson.c', 'pandas/src/ujson/python/objToJSON.c', 'pandas/src/ujson/python/JSONtoObj.c', 'pandas/src/ujson/lib/ultrajsonenc.c', 'pandas/src/ujson/lib/ultrajsondec.c', 'pandas/src/datetime/np_datetime.c' ], include_dirs=['pandas/src/ujson/python', 'pandas/src/ujson/lib', 'pandas/src/datetime',
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from scipy._build_utils.compiler_helper import set_cxx_flags_hook import numpy as np config = Configuration('stats', parent_package, top_path) config.add_data_dir('tests') statlib_src = [join('statlib', '*.f')] config.add_library('statlib', sources=statlib_src) # add statlib module config.add_extension('statlib', sources=['statlib.pyf'], f2py_options=['--no-wrap-functions'], libraries=['statlib'], depends=statlib_src) # add _stats module config.add_extension('_stats', sources=['_stats.c']) # add mvn module config.add_extension('mvn', sources=['mvn.pyf', 'mvndst.f']) # add _sobol module config.add_extension('_sobol', sources=['_sobol.c']) config.add_data_files('_sobol_direction_numbers.npz') # add _qmc_cy module ext = config.add_extension('_qmc_cy', sources=['_qmc_cy.cxx']) ext._pre_build_hook = set_cxx_flags_hook # add BiasedUrn module config.add_data_files('biasedurn.pxd') from _generate_pyx import isNPY_OLD # type: ignore[import] NPY_OLD = isNPY_OLD() if NPY_OLD: biasedurn_libs = [] biasedurn_libdirs = [] else: biasedurn_libs = ['npyrandom', 'npymath'] biasedurn_libdirs = [ join(np.get_include(), '..', '..', 'random', 'lib') ] biasedurn_libdirs += get_info('npymath')['library_dirs'] ext = config.add_extension( 'biasedurn', sources=[ 'biasedurn.cxx', 'biasedurn/impls.cpp', 'biasedurn/fnchyppr.cpp', 'biasedurn/wnchyppr.cpp', 'biasedurn/stoc1.cpp', 'biasedurn/stoc3.cpp' ], include_dirs=[np.get_include()], library_dirs=biasedurn_libdirs, libraries=biasedurn_libs, define_macros=[('R_BUILD', None)], language='c++', extra_compile_args=['-Wno-narrowing'] if system() == 'Darwin' else [], depends=['biasedurn/stocR.h'], ) ext._pre_build_hook = pre_build_hook # add boost stats distributions config.add_subpackage('_boost') # Type stubs config.add_data_files('*.pyi') return config
cmdclass = {'clean': CleanCommand, 'build': build} cmdclass["build_src"] = DummyBuildSrc cmdclass["build_ext"] = CheckingBuildExt # some linux distros require it #NOTE: we are not currently using this but add it to Extension, if needed. # libraries = ['m'] if 'win32' not in sys.platform else [] from numpy.distutils.misc_util import get_info npymath_info = get_info("npymath") ext_data = dict( kalman_loglike = {"name" : "statsmodels/tsa/kalmanf/kalman_loglike.c", "depends" : ["statsmodels/src/capsule.h"], "include_dirs": ["statsmodels/src"], "sources" : []}, _statespace = {"name" : "statsmodels/tsa/statespace/_statespace.c", "depends" : ["statsmodels/src/capsule.h"], "include_dirs": ["statsmodels/src"] + npymath_info['include_dirs'], "libraries": npymath_info['libraries'], "library_dirs": npymath_info['library_dirs'], "sources" : []}, linbin = {"name" : "statsmodels/nonparametric/linbin.c", "depends" : [], "sources" : []}, _smoothers_lowess = {"name" : "statsmodels/nonparametric/_smoothers_lowess.c",
] ] if not lapack_info: # No LAPACK in NumPy print('### Warning: Using unoptimized blas/lapack @@@') MODULE_SOURCES.extend(lapack_lite_files[:-1]) # all but f2c.h MODULE_DEPENDENCIES.extend(lapack_lite_files) else: if sys.platform == 'win32': print('### Warning: python.xerbla.c is disabled ###') else: MODULE_SOURCES.extend(lapack_lite_files[:1]) # python_xerbla.c MODULE_DEPENDENCIES.extend(lapack_lite_files[:1]) npymath_info = np_misc_util.get_info('npymath') extra_opts = copy.deepcopy(lapack_info) for key, val in npymath_info.items(): if extra_opts.get(key): extra_opts[key].extend(val) else: extra_opts[key] = copy.deepcopy(val) # make sure the compiler can find conditional_omp.h extra_opts['include_dirs'] += [os.path.join(C_SRC_PATH)] cmdclass = versioneer.get_cmdclass() if "GULINALG_DISABLE_OPENMP" not in os.environ: # OpenMP will be disabled unless omp_test_c below compiles successfully
import pyarrow as pa from docs.source.buildscripts.sdc_build_doc import SDCBuildDoc # Note we don't import Numpy at the toplevel, since setup.py # should be able to run without Numpy for pip to discover the # build dependencies import numpy.distutils.misc_util as np_misc import versioneer # String constants for Intel SDC project configuration # This name is used for wheel package build SDC_NAME_STR = 'sdc' # Inject required options for extensions compiled against the Numpy # C API (include dirs, library dirs etc.) np_compile_args = np_misc.get_info('npymath') is_win = platform.system() == 'Windows' is_osx = platform.system() == 'Darwin' def readme(): with open('README.rst', encoding='utf-8') as f: return f.read() # package environment variable is PREFIX during build time if 'CONDA_BUILD' in os.environ: PREFIX_DIR = os.environ['PREFIX'] else: PREFIX_DIR = os.environ['CONDA_PREFIX']
def get_ext_modules(): """ Return a list of Extension instances for the setup() call. """ # Note we don't import Numpy at the toplevel, since setup.py # should be able to run without Numpy for pip to discover the # build dependencies import numpy.distutils.misc_util as np_misc # Inject required options for extensions compiled against the Numpy # C API (include dirs, library dirs etc.) np_compile_args = np_misc.get_info('npymath') ext_dynfunc = Extension(name='numba._dynfunc', sources=['numba/_dynfuncmod.c'], extra_compile_args=CFLAGS, depends=['numba/_pymodule.h', 'numba/_dynfunc.c']) ext_dispatcher = Extension(name="numba._dispatcher", sources=['numba/_dispatcher.c', 'numba/_typeof.c', 'numba/_hashtable.c', 'numba/_dispatcherimpl.cpp', 'numba/typeconv/typeconv.cpp'], depends=["numba/_pymodule.h", "numba/_dispatcher.h", "numba/_typeof.h", "numba/_hashtable.h"], **np_compile_args) ext_helperlib = Extension(name="numba._helperlib", sources=["numba/_helpermod.c", "numba/_math_c99.c"], extra_compile_args=CFLAGS, extra_link_args=install_name_tool_fixer, depends=["numba/_pymodule.h", "numba/_math_c99.h", "numba/_helperlib.c", "numba/_lapack.c", "numba/_npymath_exports.c", "numba/_random.c", "numba/_dictobject.c", "numba/mathnames.inc",], **np_compile_args) ext_typeconv = Extension(name="numba.typeconv._typeconv", sources=["numba/typeconv/typeconv.cpp", "numba/typeconv/_typeconv.cpp"], depends=["numba/_pymodule.h"], ) ext_npyufunc_ufunc = Extension(name="numba.npyufunc._internal", sources=["numba/npyufunc/_internal.c"], depends=["numba/npyufunc/_ufunc.c", "numba/npyufunc/_internal.h", "numba/_pymodule.h"], **np_compile_args) ext_npyufunc_workqueue_impls = [] def check_file_at_path(path2file): """ Takes a list as a path, a single glob (*) is permitted as an entry which indicates that expansion at this location is required (i.e. version might not be known). """ found = None path2check = [os.path.split(os.path.split(sys.executable)[0])[0]] path2check += [os.getenv(n, '') for n in ['CONDA_PREFIX', 'PREFIX']] if sys.platform.startswith('win'): path2check += [os.path.join(p, 'Library') for p in path2check] for p in path2check: if p: if '*' in path2file: globloc = path2file.index('*') searchroot = os.path.join(*path2file[:globloc]) try: potential_locs = os.listdir(os.path.join(p, searchroot)) except BaseException: continue searchfor = path2file[globloc + 1:] for x in potential_locs: potpath = os.path.join(p, searchroot, x, *searchfor) if os.path.isfile(potpath): found = p # the latest is used elif os.path.isfile(os.path.join(p, *path2file)): found = p # the latest is used return found # Search for Intel TBB, first check env var TBBROOT then conda locations tbb_root = os.getenv('TBBROOT') if not tbb_root: tbb_root = check_file_at_path(['include', 'tbb', 'tbb.h']) # Set various flags for use in TBB and openmp. On OSX, also find OpenMP! have_openmp = True if sys.platform.startswith('win'): cpp11flags = [] ompcompileflags = ['-openmp'] omplinkflags = [] elif sys.platform.startswith('darwin'): cpp11flags = ['-std=c++11'] # This is a bit unusual but necessary... # llvm (clang) OpenMP is used for headers etc at compile time # Intel OpenMP (libiomp5) provides the link library. # They are binary compatible and may not safely coexist in a process, as # libiomp5 is more prevalent and often linked in for NumPy it is used # here! ompcompileflags = ['-fopenmp'] omplinkflags = ['-fopenmp=libiomp5'] omppath = ['lib', 'clang', '*', 'include', 'omp.h'] have_openmp = check_file_at_path(omppath) else: cpp11flags = ['-std=c++11'] ompcompileflags = ['-fopenmp'] if platform.machine() == 'ppc64le': omplinkflags = ['-fopenmp'] else: omplinkflags = ['-fopenmp'] if tbb_root: print("Using Intel TBB from:", tbb_root) ext_npyufunc_tbb_workqueue = Extension( name='numba.npyufunc.tbbpool', sources=['numba/npyufunc/tbbpool.cpp', 'numba/npyufunc/gufunc_scheduler.cpp'], depends=['numba/npyufunc/workqueue.h'], include_dirs=[os.path.join(tbb_root, 'include')], extra_compile_args=cpp11flags, libraries =['tbb'], # TODO: if --debug or -g, use 'tbb_debug' library_dirs=[os.path.join(tbb_root, 'lib', 'intel64', 'gcc4.4'), # for Linux os.path.join(tbb_root, 'lib'), # for MacOS os.path.join(tbb_root, 'lib', 'intel64', 'vc_mt'), # for Windows ], ) ext_npyufunc_workqueue_impls.append(ext_npyufunc_tbb_workqueue) else: print("TBB not found") # Disable OpenMP if we are building a wheel or # forced by user with NUMBA_NO_OPENMP=1 if is_building_wheel() or os.getenv('NUMBA_NO_OPENMP'): print("OpenMP disabled") elif have_openmp: print("Using OpenMP from:", have_openmp) # OpenMP backed work queue ext_npyufunc_omppool = Extension( name='numba.npyufunc.omppool', sources=['numba/npyufunc/omppool.cpp', 'numba/npyufunc/gufunc_scheduler.cpp'], depends=['numba/npyufunc/workqueue.h'], extra_compile_args=ompcompileflags + cpp11flags, extra_link_args = omplinkflags) ext_npyufunc_workqueue_impls.append(ext_npyufunc_omppool) else: print("OpenMP not found") # Build the Numba workqueue implementation irrespective of whether the TBB # version is built. Users can select a backend via env vars. ext_npyufunc_workqueue = Extension( name='numba.npyufunc.workqueue', sources=['numba/npyufunc/workqueue.c', 'numba/npyufunc/gufunc_scheduler.cpp'], depends=['numba/npyufunc/workqueue.h']) ext_npyufunc_workqueue_impls.append(ext_npyufunc_workqueue) ext_mviewbuf = Extension(name='numba.mviewbuf', extra_link_args=install_name_tool_fixer, sources=['numba/mviewbuf.c']) ext_nrt_python = Extension(name='numba.runtime._nrt_python', sources=['numba/runtime/_nrt_pythonmod.c', 'numba/runtime/nrt.c'], depends=['numba/runtime/nrt.h', 'numba/_pymodule.h', 'numba/runtime/_nrt_python.c'], **np_compile_args) ext_jitclass_box = Extension(name='numba.jitclass._box', sources=['numba/jitclass/_box.c'], depends=['numba/_pymodule.h'], ) ext_cuda_extras = Extension(name='numba.cuda.cudadrv._extras', sources=['numba/cuda/cudadrv/_extras.c'], depends=['numba/_pymodule.h'], include_dirs=["numba"]) ext_modules = [ext_dynfunc, ext_dispatcher, ext_helperlib, ext_typeconv, ext_npyufunc_ufunc, ext_mviewbuf, ext_nrt_python, ext_jitclass_box, ext_cuda_extras] ext_modules += ext_npyufunc_workqueue_impls return ext_modules
def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info as get_system_info config = Configuration("special", parent_package, top_path) define_macros = [] if sys.platform == "win32": # define_macros.append(('NOINFINITIES',None)) # define_macros.append(('NONANS',None)) define_macros.append(("_USE_MATH_DEFINES", None)) curdir = os.path.abspath(os.path.dirname(__file__)) inc_dirs = [get_python_inc(), os.path.join(curdir, "c_misc")] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.insert(0, get_numpy_include_dirs()) # C libraries c_misc_src = [join("c_misc", "*.c")] c_misc_hdr = [join("c_misc", "*.h")] cephes_src = [join("cephes", "*.c")] cephes_hdr = [join("cephes", "*.h")] config.add_library( "sc_c_misc", sources=c_misc_src, include_dirs=[curdir] + inc_dirs, depends=(cephes_hdr + cephes_src + c_misc_hdr + cephes_hdr + ["*.h"]), macros=define_macros, ) config.add_library( "sc_cephes", sources=cephes_src, include_dirs=[curdir] + inc_dirs, depends=(cephes_hdr + ["*.h"]), macros=define_macros, ) # Fortran/C++ libraries mach_src = [join("mach", "*.f")] amos_src = [join("amos", "*.f")] cdf_src = [join("cdflib", "*.f")] specfun_src = [join("specfun", "*.f")] config.add_library("sc_mach", sources=mach_src, config_fc={"noopt": (__file__, 1)}) config.add_library("sc_amos", sources=amos_src) config.add_library("sc_cdf", sources=cdf_src) config.add_library("sc_specfun", sources=specfun_src) # Extension specfun config.add_extension( "specfun", sources=["specfun.pyf"], f2py_options=["--no-wrap-functions"], depends=specfun_src, define_macros=[], libraries=["sc_specfun"], ) # Extension _ufuncs headers = ["*.h", join("c_misc", "*.h"), join("cephes", "*.h")] ufuncs_src = ["_ufuncs.c", "sf_error.c", "_logit.c.src", "amos_wrappers.c", "cdf_wrappers.c", "specfun_wrappers.c"] ufuncs_dep = headers + ufuncs_src + amos_src + c_misc_src + cephes_src + mach_src + cdf_src + specfun_src cfg = dict(get_system_info("lapack_opt")) cfg.setdefault("include_dirs", []).extend([curdir] + inc_dirs + [numpy.get_include()]) cfg.setdefault("libraries", []).extend(["sc_amos", "sc_c_misc", "sc_cephes", "sc_mach", "sc_cdf", "sc_specfun"]) cfg.setdefault("define_macros", []).extend(define_macros) config.add_extension("_ufuncs", depends=ufuncs_dep, sources=ufuncs_src, extra_info=get_info("npymath"), **cfg) # Extension _ufuncs_cxx ufuncs_cxx_src = ["_ufuncs_cxx.cxx", "sf_error.c", "_faddeeva.cxx", "Faddeeva.cc"] ufuncs_cxx_dep = headers + ufuncs_cxx_src + cephes_src + ["*.hh"] config.add_extension( "_ufuncs_cxx", sources=ufuncs_cxx_src, depends=ufuncs_cxx_dep, include_dirs=[curdir], define_macros=define_macros, extra_info=get_info("npymath"), ) cfg = dict(get_system_info("lapack_opt")) config.add_extension("_ellip_harm_2", sources=["_ellip_harm_2.c", "sf_error.c"], **cfg) # combinatoris config.add_extension("_comb", sources=["_comb.c"]) config.add_data_files("tests/*.py") config.add_data_files("tests/data/README") config.add_data_files("tests/data/*.npz") return config