def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_info config = Configuration('ssm', parent_package, top_path) info = get_info("npymath") config.add_extension('_statespace', include_dirs=['dismalpy/src'], sources=['_statespace.c'], extra_info=info) config.add_extension('_kalman_filter', include_dirs=['dismalpy/src'], sources=['_kalman_filter.c'], extra_info=info) config.add_extension('_kalman_smoother', include_dirs=['dismalpy/src'], sources=['_kalman_smoother.c'], extra_info=info) config.add_extension('_simulation_smoother', include_dirs=['dismalpy/src'], sources=['_simulation_smoother.c'], extra_info=info) config.add_extension('_tools', include_dirs=['dismalpy/src'], sources=['_tools.c']) config.add_subpackage('compat') config.add_data_dir('tests') config.add_subpackage('_filters') config.add_subpackage('_smoothers') config.make_config_py() return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from numpy.distutils.system_info import get_info from distutils.sysconfig import get_python_inc config = Configuration('spatial_016', parent_package, top_path) inc_dirs = [get_python_inc()] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.append(get_numpy_include_dirs()) ckdtree_src = ['ckdtree_query.cxx', 'ckdtree_globals.cxx', 'ckdtree_cpp_exc.cxx'] ckdtree_src = [join('ckdtree', 'src', x) for x in ckdtree_src] ckdtree_headers = ['ckdtree_decl.h', 'ckdtree_exc.h', 'ckdtree_methods.h', 'ckdtree_utils.h'] ckdtree_headers = [join('ckdtree', 'src', x) for x in ckdtree_headers] ckdtree_dep = ['ckdtree.cxx'] + ckdtree_headers + ckdtree_src config.add_extension('ckdtree', sources=[join('ckdtree', 'ckdtree.cxx')] + ckdtree_src, depends=ckdtree_dep, include_dirs=inc_dirs + [join('ckdtree','src')]) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info config = Configuration('fit_interpolate', parent_package, top_path) config.add_data_dir('tests') #util_dir = os.path.abspath(join(os.path.dirname(__file__),'..','utilities')) util_dir = join('..','utilities') util_srcs = [join(util_dir,'quad_tree.c'), join(util_dir,'sparse_dok.c'), join(util_dir,'sparse_csr.c')] if sys.platform == 'darwin': extra_args = None else: extra_args = ['-fopenmp'] config.add_extension('fitsmooth', sources=['fitsmooth.c']+util_srcs, include_dirs=[util_dir], extra_compile_args=extra_args, extra_link_args=extra_args) return config
def configuration(parent_package='', top_path=None): global config from numpy.distutils.misc_util import Configuration from numpy.distutils.fcompiler import get_default_fcompiler, CompilerNotFound build = True try: # figure out which compiler we're going to use compiler = get_default_fcompiler() # set some fortran compiler-dependent flags f90flags = [] if compiler == 'gnu95': f90flags.append('-fno-range-check') f90flags.append('-ffree-form') f90flags.append('-fPIC') elif compiler == 'intel' or compiler == 'intelem': f90flags.append('-132') # Need zero-level optimization to avoid build problems with rrtmg_lw_k_g.f90 #f90flags.append('-O2') # Suppress all compiler warnings (avoid huge CI log files) f90flags.append('-w') except CompilerNotFound: print('No Fortran compiler found, not building the RRTMG_LW radiation module!') build = False config = Configuration(package_name='_rrtmg_lw', parent_name=parent_package, top_path=top_path) if build: config.add_extension( name='_rrtmg_lw', sources=[rrtmg_lw_gen_source], extra_f90_compile_args=f90flags, f2py_options=['--quiet'], ) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info config = Configuration('utilities', parent_package, top_path) config.add_data_dir('tests') config.add_data_dir(join('tests','data')) config.add_extension('sparse_ext', sources='sparse_ext.c') config.add_extension('sparse_matrix_ext', sources=['sparse_matrix_ext.c', 'sparse_dok.c']) config.add_extension('util_ext', sources='util_ext.c') if sys.platform == 'darwin': extra_args = None else: extra_args = ['-fopenmp'] config.add_extension('cg_ext', sources='cg_ext.c', extra_compile_args=extra_args, extra_link_args=extra_args) config.add_extension('quad_tree_ext', sources=['quad_tree_ext.c', 'quad_tree.c']) return config
def configuration(parent_package='', top_path=None): import os.path as op from numpy.distutils.misc_util import Configuration from sfepy import Config site_config = Config() os_flag = {'posix' : 0, 'windows' : 1} auto_dir = op.dirname(__file__) auto_name = op.split(auto_dir)[-1] config = Configuration(auto_name, parent_package, top_path) defines = [('__SDIR__', "'\"%s\"'" % auto_dir), ('SFEPY_PLATFORM', os_flag[site_config.system()])] if '-DDEBUG_FMF' in site_config.debug_flags(): defines.append(('DEBUG_FMF', None)) fem_src = ['common_python.c'] fem_src = [op.join('../../fem/extmods', ii) for ii in fem_src] src = ['crcm.pyx', 'rcm.c'] config.add_extension('crcm', sources=src + fem_src, extra_compile_args=site_config.compile_flags(), extra_link_args=site_config.link_flags(), include_dirs=[auto_dir, '../../fem/extmods'], define_macros=defines) return config
def configuration(parent_package='',top_path=None): from scipy._build_utils.system_info import get_info, NotFoundError from numpy.distutils.misc_util import Configuration from scipy._build_utils import get_g77_abi_wrappers config = Configuration('isolve',parent_package,top_path) lapack_opt = get_info('lapack_opt') # iterative methods methods = ['BiCGREVCOM.f.src', 'BiCGSTABREVCOM.f.src', 'CGREVCOM.f.src', 'CGSREVCOM.f.src', # 'ChebyREVCOM.f.src', 'GMRESREVCOM.f.src', # 'JacobiREVCOM.f.src', 'QMRREVCOM.f.src', # 'SORREVCOM.f.src' ] Util = ['getbreak.f.src'] sources = Util + methods + ['_iterative.pyf.src'] sources = [join('iterative', x) for x in sources] sources += get_g77_abi_wrappers(lapack_opt) config.add_extension('_iterative', sources=sources, extra_info=lapack_opt) config.add_data_dir('tests') return config
def configuration(parent_package='', top_path=None or ''): from numpy.distutils.misc_util import Configuration config = Configuration('',parent_package,top_path) flibname = modulename + '_fortran_f2py' if fortran_files: config.add_library(flibname, sources = fortran_files) libraries.insert(0,flibname) for module_info in module_infos: name = module_info['name'] c_sources = module_info['c_sources'] f_sources = module_info['f_sources'] language = module_info['language'] if f_sources: f_lib = '%s_f_wrappers_f2py' % (name) config.add_library(f_lib, sources = f_sources) libs = [f_lib] + libraries else: libs = libraries config.add_extension(name, sources=c_sources + c_files, libraries = libs, define_macros = define_macros, undef_macros = undef_macros, include_dirs = include_dirs, extra_objects = extra_objects, language = language, ) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('',parent_package,top_path) config.add_extension('fortran_routines', sources = ['src/fortran/fortran_routines.f95']) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration("wmap5Wrapper", parent_package, top_path, namespace_packages = ['wmap5Wrapper'], version='0.3.0', author = 'Joel Akeret', author_email="*****@*****.**", description = "perform the wmap likelihood computation", url = "http://www.fhnw.ch", long_description = desc) config.add_extension('_wmapWrapper', sources=[ 'likelihood_v3/read_archive_map.f90', 'likelihood_v3/read_fits.f90', 'likelihood_v3/healpix_types.f90', 'likelihood_v3/br_mod_dist.f90', 'likelihood_v3/WMAP_5yr_options.F90', 'likelihood_v3/WMAP_5yr_util.f90', 'likelihood_v3/WMAP_5yr_gibbs.F90', 'likelihood_v3/WMAP_5yr_tt_pixlike.F90', 'likelihood_v3/WMAP_5yr_tt_beam_ptsrc_chisq.f90', 'likelihood_v3/WMAP_5yr_teeebb_pixlike.F90', 'likelihood_v3/WMAP_5yr_likelihood.F90', 'source/WmapWrapperCore.f90', 'source/WmapWrapper.f90', '_wmapWrapper.pyf'] , include_dirs = include_dirs, library_dirs = library_dirs, libraries = libraries, extra_f90_compile_args=extra_f90_compile_args, extra_link_args=extra_link_args ) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('',parent_package, top_path) config.add_extension('slicot', libraries=['lapack'], sources=['src/slicot.pyf'] + glob('src/*.f')) return config
def configuration(parent_package="", top_path=None): config = Configuration("tree", parent_package, top_path) libraries = [] if os.name == 'posix': libraries.append('m') # check for pre-compiled versions for the encountered sklearn version if not os.path.isdir("{}/headers/{}".format(os.path.dirname(os.path.realpath(__file__)), sklearn.__version__)) or \ not os.path.isfile("{}/headers/{}/_tree.c".format(os.path.dirname(os.path.realpath(__file__)), sklearn.__version__)): raise Exception(\ """sklearnef holds no pre-compiled _tree.c for your current scikit-learn version ({version}). Please download the corresponding header file from \ https://raw.githubusercontent.com/scikit-learn/scikit-learn/{version}/sklearn/tree/_tree.pxd, place it in sklearnef/tree/headers/sklearn/tree/ and compile _tree.pyx with cython using \ 'cython _tree.pyx -o headers/{version}/_tree.c -I headers/'. Then re-run \ the installation of sklearnef.""".format(version=sklearn.__version__)) config.add_extension("_diffentropy", sources=["headers/_diffentropy.c"], include_dirs=[numpy.get_include()], libraries=libraries + ['lapack', 'blas'], extra_compile_args=["-O3"]) config.add_extension("_tree", sources=["headers/{version}/_tree.c".format(version=sklearn.__version__)], include_dirs=[numpy.get_include()], libraries=libraries, extra_compile_args=["-O3"]) config.add_subpackage("tests") return config
def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration("", parent_package, top_path) config.add_extension("c_library", sources=["c_library.pyf", "c_functions.c"]) return config
def configuration(parent_package='', top_path=None): import os.path as op from numpy.distutils.misc_util import Configuration from sfepy import Config site_config = Config() system = site_config.system() os_flag = {'posix' : 0, 'windows' : 1}[system] auto_dir = op.dirname(__file__) auto_name = op.split(auto_dir)[-1] config = Configuration(auto_name, parent_package, top_path) inline = 'inline' if system == 'posix' else '__inline' defines = [('SFEPY_PLATFORM', os_flag), ('inline', inline)] if '-DDEBUG_FMF' in site_config.debug_flags(): defines.append(('DEBUG_FMF', None)) common_path = '../../common/extmods' common_src = ['fmfield.c', 'geommech.c', 'common_python.c'] common_src = [op.join(common_path, ii) for ii in common_src] src = ['igac.pyx', 'nurbs.c'] config.add_extension('igac', sources=src + common_src, depends=common_src, extra_compile_args=site_config.compile_flags(), extra_link_args=site_config.link_flags(), include_dirs=[auto_dir, common_path], define_macros=defines) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('pyrex_ext',parent_package,top_path) config.add_extension('primes', ['primes.pyx']) config.add_data_dir('tests') return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('graph', parent_package, top_path) config.add_data_dir('tests') # We need this because libcstat.a is linked to lapack, which can # be a fortran library, and the linker needs this information. from numpy.distutils.system_info import get_info lapack_info = get_info('lapack_opt',0) if 'libraries' not in lapack_info: # But on OSX that may not give us what we need, so try with 'lapack' # instead. NOTE: scipy.linalg uses lapack_opt, not 'lapack'... lapack_info = get_info('lapack',0) config.add_extension( '_graph', sources=['graph.c'], libraries=['cstat'], extra_info=lapack_info, ) config.add_extension( '_field', sources=['field.c'], libraries=['cstat'], extra_info=lapack_info, ) return config
def configuration(parent_package="", top_path=None): config = Configuration("pyxit", parent_package, top_path) config.add_extension("_estimator", sources=["_estimator.c"], include_dirs=[numpy.get_include()]) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.system_info import get_info, NotFoundError from numpy.distutils.misc_util import Configuration config = Configuration('isolve',parent_package,top_path) lapack_opt = get_info('lapack_opt') if not lapack_opt: raise NotFoundError('no lapack/blas resources found') # iterative methods methods = ['BiCGREVCOM.f.src', 'BiCGSTABREVCOM.f.src', 'CGREVCOM.f.src', 'CGSREVCOM.f.src', # 'ChebyREVCOM.f.src', 'GMRESREVCOM.f.src', # 'JacobiREVCOM.f.src', 'QMRREVCOM.f.src', # 'SORREVCOM.f.src' ] Util = ['STOPTEST2.f.src','getbreak.f.src'] sources = Util + methods + ['_iterative.pyf.src'] config.add_extension('_iterative', sources = [join('iterative',x) for x in sources], extra_info = lapack_opt ) config.add_data_dir('tests') return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('statistics', parent_package, top_path) config.add_data_dir('tests') config.add_extension('intvol', 'intvol.pyx', include_dirs = [np.get_include()]) return config
def configuration(parent_package='', top_path=None): import warnings from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, BlasNotFoundError config = Configuration('odr', parent_package, top_path) libodr_files = ['d_odr.f', 'd_mprec.f', 'dlunoc.f'] blas_info = get_info('blas_opt') if blas_info: libodr_files.append('d_lpk.f') else: warnings.warn(BlasNotFoundError.__doc__) libodr_files.append('d_lpkbls.f') odrpack_src = [join('odrpack', x) for x in libodr_files] config.add_library('odrpack', sources=odrpack_src) sources = ['__odrpack.c'] libraries = ['odrpack'] + blas_info.pop('libraries', []) include_dirs = ['.'] + blas_info.pop('include_dirs', []) config.add_extension('__odrpack', sources=sources, libraries=libraries, include_dirs=include_dirs, depends=(['odrpack.h'] + odrpack_src), **blas_info ) config.add_data_dir('tests') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('spline2', parent_package, top_path) config.add_extension('spline2c', sources=['spline2_nonint.c','spline2_wrap.c'] ) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('',parent_package,top_path) config.add_extension('m', sources = ['m.pyf','foo.c']) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs config = Configuration('filter', parent_package, top_path) config.add_data_dir('tests') config.add_data_dir('rank/tests') cython(['_ctmf.pyx'], working_path=base_path) cython(['rank/core_cy.pyx'], working_path=base_path) cython(['rank/generic_cy.pyx'], working_path=base_path) cython(['rank/percentile_cy.pyx'], working_path=base_path) cython(['rank/bilateral_cy.pyx'], working_path=base_path) config.add_extension('_ctmf', sources=['_ctmf.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('rank.core_cy', sources=['rank/core_cy.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('rank.generic_cy', sources=['rank/generic_cy.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension( 'rank.percentile_cy', sources=['rank/percentile_cy.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension( 'rank.bilateral_cy', sources=['rank/bilateral_cy.c'], include_dirs=[get_numpy_include_dirs()]) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration libraries = [] if os.name == 'posix': libraries.append('m') config = Configuration('iced', parent_package, top_path) config.add_subpackage('utils') config.add_subpackage("io") config.add_extension( '_normalization_', libraries=libraries, sources=['_normalization_.c'], include_dirs=[join('..', 'src', 'cblas'), numpy.get_include()]) config.add_extension( '_filter_', libraries=libraries, sources=['_filter_.c'], include_dirs=[join('..', 'src', 'cblas'), numpy.get_include()]) return config
def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration, get_mathlibs config = Configuration("random", parent_package, top_path) def generate_libraries(ext, build_dir): config_cmd = config.get_config_cmd() if top_path is None: libs = get_mathlibs() else: path = join(split(build_dir)[0], "core") libs = get_mathlibs(path) tc = testcode_wincrypt() if config_cmd.try_run(tc): libs.append("Advapi32") ext.libraries.extend(libs) return None libs = [] # Configure mtrand config.add_extension( "mtrand", sources=[join("mtrand", x) for x in ["mtrand.c", "randomkit.c", "initarray.c", "distributions.c"]] + [generate_libraries], libraries=libs, depends=[join("mtrand", "*.h"), join("mtrand", "*.pyx"), join("mtrand", "*.pxi")], ) config.add_data_files((".", join("mtrand", "randomkit.h"))) config.add_data_dir("tests") return config
def configuration(parent_package="", top_path=None): import warnings from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, BlasNotFoundError config = Configuration("odr", parent_package, top_path) libodr_files = ["d_odr.f", "d_mprec.f", "dlunoc.f"] blas_info = get_info("blas_opt") if blas_info: libodr_files.append("d_lpk.f") else: warnings.warn(BlasNotFoundError.__doc__) libodr_files.append("d_lpkbls.f") libodr = [join("odrpack", x) for x in libodr_files] config.add_library("odrpack", sources=libodr) sources = ["__odrpack.c"] libraries = ["odrpack"] + blas_info.pop("libraries", []) include_dirs = ["."] + blas_info.pop("include_dirs", []) config.add_extension( "__odrpack", sources=sources, libraries=libraries, include_dirs=include_dirs, depends=["odrpack.h"], **blas_info ) config.add_data_dir("tests") return config
def configuration(parent_package="", top_path=None): import numpy from numpy.distutils.misc_util import Configuration config = Configuration("vista_directory", parent_package, top_path) config.add_extension("vista", ["vista.c"]) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('gufunc_sampler', parent_package, top_path) config.add_extension('_gs_kernels', ['_gs_kernels.c.src']) return config
def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration("f2py_ext", parent_package, top_path) config.add_extension("fib2", ["src/fib2.pyf", "src/fib1.f"]) config.add_data_dir("tests") return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration cblas_libs, blas_info = get_blas_info() libraries = [] if os.name == 'posix': cblas_libs.append('m') libraries.append('m') config = Configuration('cluster', parent_package, top_path) config.add_extension('_hierarchical', sources=['_hierarchical.c'], include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension( '_k_means', libraries=cblas_libs, sources=['_k_means.c'], include_dirs=[join('..', 'src', 'cblas'), numpy.get_include(), blas_info.pop('include_dirs', [])], extra_compile_args=blas_info.pop('extra_compile_args', []), **blas_info ) return config
def configuration(parent_package='', top_path=None): config = Configuration('models', parent_package, top_path) # newrand wrappers config.add_extension( '_newrand', sources=['_newrand.pyx'], include_dirs=[numpy.get_include(), join('src', 'newrand')], depends=[join('src', 'newrand', 'newrand.h')], language='c++', # Use C++11 random number generator fix extra_compile_args=['-std=c++11']) # liblinear module libraries = [] if os.name == 'posix': libraries.append('m') # precompile liblinear to use C++11 flag config.add_library( 'liblinear-skl', sources=[ join('src', 'liblinear', 'linear.cpp'), join('src', 'liblinear', 'tron.cpp') ], depends=[ join('src', 'liblinear', 'linear.h'), join('src', 'liblinear', 'tron.h'), join('src', 'newrand', 'newrand.h') ], # Force C++ linking in case gcc is picked up instead # of g++ under windows with some versions of MinGW extra_link_args=['-lstdc++'], # Use C++11 to use the random number generator fix extra_compiler_args=['-std=c++11'], ) liblinear_sources = ['_liblinear.pyx'] liblinear_depends = [ join('src', 'liblinear', '*.h'), join('src', 'newrand', 'newrand.h'), join('src', 'liblinear', 'liblinear_helper.c') ] config.add_extension( '_liblinear', sources=liblinear_sources, libraries=['liblinear-skl'] + libraries, include_dirs=[ join('.', 'src', 'liblinear'), join('.', 'src', 'newrand'), # join('.'), numpy.get_include() ], depends=liblinear_depends, # extra_compile_args=['-O0 -fno-inline'], ) # end liblinear module config.ext_modules = cythonize(config.ext_modules, compiler_directives={'language_level': 3}, annotate=True) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info import numpy # needs to be called during build otherwise show_version may fail sometimes get_info('blas_opt', 0) libraries = [] if os.name == 'posix': libraries.append('m') config = Configuration('sklearn', parent_package, top_path) # submodules with build utilities config.add_subpackage('__check_build') config.add_subpackage('_build_utils') # submodules which do not have their own setup.py # we must manually add sub-submodules & tests config.add_subpackage('compose') config.add_subpackage('compose/tests') config.add_subpackage('covariance') config.add_subpackage('covariance/tests') config.add_subpackage('cross_decomposition') config.add_subpackage('cross_decomposition/tests') config.add_subpackage('feature_selection') config.add_subpackage('feature_selection/tests') config.add_subpackage('gaussian_process') config.add_subpackage('gaussian_process/tests') config.add_subpackage('inspection') config.add_subpackage('inspection/tests') config.add_subpackage('mixture') config.add_subpackage('mixture/tests') config.add_subpackage('model_selection') config.add_subpackage('model_selection/tests') config.add_subpackage('neural_network') config.add_subpackage('neural_network/tests') config.add_subpackage('preprocessing') config.add_subpackage('preprocessing/tests') config.add_subpackage('semi_supervised') config.add_subpackage('semi_supervised/tests') # submodules which have their own setup.py config.add_subpackage('cluster') config.add_subpackage('datasets') config.add_subpackage('decomposition') config.add_subpackage('ensemble') config.add_subpackage('externals') config.add_subpackage('feature_extraction') config.add_subpackage('manifold') config.add_subpackage('metrics') config.add_subpackage('neighbors') config.add_subpackage('tree') config.add_subpackage('utils') config.add_subpackage('svm') config.add_subpackage('linear_model') # add cython extension module for isotonic regression config.add_extension( '_isotonic', sources=['_isotonic.pyx'], include_dirs=[numpy.get_include()], libraries=libraries, ) # add the test directory config.add_subpackage('tests') maybe_cythonize_extensions(top_path, config) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, dot_join from numpy.distutils.system_info import get_info, default_lib_dirs config = Configuration('core', parent_package, top_path) local_dir = config.local_path codegen_dir = join(local_dir, 'code_generators') if is_released(config): warnings.simplefilter('error', MismatchCAPIWarning) # Check whether we have a mismatch between the set C API VERSION and the # actual C API VERSION check_api_version(C_API_VERSION, codegen_dir) generate_umath_py = join(codegen_dir, 'generate_umath.py') n = dot_join(config.name, 'generate_umath') generate_umath = imp.load_module('_'.join(n.split('.')), open(generate_umath_py, 'U'), generate_umath_py, ('.py', 'U', 1)) header_dir = 'include/numpy' # this is relative to config.path_in_package cocache = CallOnceOnly() def generate_config_h(ext, build_dir): target = join(build_dir, header_dir, 'config.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir) # Check math library and C99 math funcs availability mathlibs = check_mathlib(config_cmd) moredefs.append(('MATHLIB', ','.join(mathlibs))) check_math_capabilities(config_cmd, moredefs, mathlibs) moredefs.extend(cocache.check_ieee_macros(config_cmd)[0]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0]) # Signal check if is_npy_no_signal(): moredefs.append('__NPY_PRIVATE_NO_SIGNAL') # Windows checks if sys.platform == 'win32' or os.name == 'nt': win32_checks(moredefs) # Inline check inline = config_cmd.check_inline() # Check whether we need our own wide character support if not config_cmd.check_decl('Py_UNICODE_WIDE', headers=['Python.h']): PYTHON_HAS_UNICODE_WIDE = True else: PYTHON_HAS_UNICODE_WIDE = False if ENABLE_SEPARATE_COMPILATION: moredefs.append(('ENABLE_SEPARATE_COMPILATION', 1)) # Get long double representation if sys.platform != 'darwin': rep = check_long_double_representation(config_cmd) if rep in [ 'INTEL_EXTENDED_12_BYTES_LE', 'INTEL_EXTENDED_16_BYTES_LE', 'IEEE_QUAD_LE', 'IEEE_QUAD_BE', 'IEEE_DOUBLE_LE', 'IEEE_DOUBLE_BE', 'DOUBLE_DOUBLE_BE' ]: moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1)) else: raise ValueError("Unrecognized long double format: %s" % rep) # Py3K check if sys.version_info[0] == 3: moredefs.append(('NPY_PY3K', 1)) # Generate the config.h file from moredefs target_f = open(target, 'w') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # define inline to our keyword, or nothing target_f.write('#ifndef __cplusplus\n') if inline == 'inline': target_f.write('/* #undef inline */\n') else: target_f.write('#define inline %s\n' % inline) target_f.write('#endif\n') # add the guard to make sure config.h is never included directly, # but always through npy_config.h target_f.write(""" #ifndef _NPY_NPY_CONFIG_H_ #error config.h should never be included directly, include npy_config.h instead #endif """) target_f.close() print('File:', target) target_f = open(target) print(target_f.read()) target_f.close() print('EOF') else: mathlibs = [] target_f = open(target) for line in target_f.readlines(): s = '#define MATHLIB' if line.startswith(s): value = line[len(s):].strip() if value: mathlibs.extend(value.split(',')) target_f.close() # Ugly: this can be called within a library and not an extension, # in which case there is no libraries attributes (and none is # needed). if hasattr(ext, 'libraries'): ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) return target def generate_numpyconfig_h(ext, build_dir): """Depends on config.h: generate_config_h has to be called before !""" target = join(build_dir, header_dir, '_numpyconfig.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir) if is_npy_no_signal(): moredefs.append(('NPY_NO_SIGNAL', 1)) if is_npy_no_smp(): moredefs.append(('NPY_NO_SMP', 1)) else: moredefs.append(('NPY_NO_SMP', 0)) mathlibs = check_mathlib(config_cmd) moredefs.extend(cocache.check_ieee_macros(config_cmd)[1]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1]) if ENABLE_SEPARATE_COMPILATION: moredefs.append(('NPY_ENABLE_SEPARATE_COMPILATION', 1)) # Check wether we can use inttypes (C99) formats if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']): moredefs.append(('NPY_USE_C99_FORMATS', 1)) # visibility check hidden_visibility = visibility_define(config_cmd) moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility)) # Add the C API/ABI versions moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION)) moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION)) # Add moredefs to header target_f = open(target, 'w') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # Define __STDC_FORMAT_MACROS target_f.write(""" #ifndef __STDC_FORMAT_MACROS #define __STDC_FORMAT_MACROS 1 #endif """) target_f.close() # Dump the numpyconfig.h header to stdout print('File: %s' % target) target_f = open(target) print(target_f.read()) target_f.close() print('EOF') config.add_data_files((header_dir, target)) return target def generate_api_func(module_name): def generate_api(ext, build_dir): script = join(codegen_dir, module_name + '.py') sys.path.insert(0, codegen_dir) try: m = __import__(module_name) log.info('executing %s', script) h_file, c_file, doc_file = m.generate_api( os.path.join(build_dir, header_dir)) finally: del sys.path[0] config.add_data_files((header_dir, h_file), (header_dir, doc_file)) return (h_file, ) return generate_api generate_numpy_api = generate_api_func('generate_numpy_api') generate_ufunc_api = generate_api_func('generate_ufunc_api') config.add_include_dirs(join(local_dir, "src", "private")) config.add_include_dirs(join(local_dir, "src")) config.add_include_dirs(join(local_dir)) config.add_data_files('include/numpy/*.h') config.add_include_dirs(join('src', 'npymath')) config.add_include_dirs(join('src', 'multiarray')) config.add_include_dirs(join('src', 'umath')) config.add_include_dirs(join('src', 'npysort')) config.numpy_include_dirs.extend(config.paths('include')) deps = [ join('src', 'npymath', '_signbit.c'), join('include', 'numpy', '*object.h'), 'include/numpy/fenv/fenv.c', 'include/numpy/fenv/fenv.h', join(codegen_dir, 'genapi.py'), ] # Don't install fenv unless we need them. if sys.platform == 'cygwin': config.add_data_dir('include/numpy/fenv') ####################################################################### # dummy module # ####################################################################### # npymath needs the config.h and numpyconfig.h files to be generated, but # build_clib cannot handle generate_config_h and generate_numpyconfig_h # (don't ask). Because clib are generated before extensions, we have to # explicitly add an extension which has generate_config_h and # generate_numpyconfig_h as sources *before* adding npymath. config.add_extension('_dummy', sources=[ join('src', 'dummymodule.c'), generate_config_h, generate_numpyconfig_h, generate_numpy_api ]) ####################################################################### # npymath library # ####################################################################### subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")]) def get_mathlib_info(*args): # Another ugly hack: the mathlib info is known once build_src is run, # but we cannot use add_installed_pkg_config here either, so we only # update the substition dictionary during npymath build config_cmd = config.get_config_cmd() # Check that the toolchain works, to fail early if it doesn't # (avoid late errors with MATHLIB which are confusing if the # compiler does not work). st = config_cmd.try_link('int main(void) { return 0;}') if not st: raise RuntimeError( "Broken toolchain: cannot link a simple C program") mlibs = check_mathlib(config_cmd) posix_mlib = ' '.join(['-l%s' % l for l in mlibs]) msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs]) subst_dict["posix_mathlib"] = posix_mlib subst_dict["msvc_mathlib"] = msvc_mlib config.add_installed_library('npymath', sources=[ join('src', 'npymath', 'npy_math.c.src'), join('src', 'npymath', 'ieee754.c.src'), join('src', 'npymath', 'npy_math_complex.c.src'), join('src', 'npymath', 'halffloat.c'), get_mathlib_info ], install_dir='lib') config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config", subst_dict) config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config", subst_dict) ####################################################################### # npysort library # ####################################################################### # This library is created for the build but it is not installed config.add_library('npysort', sources=[join('src', 'npysort', 'sort.c.src')]) ####################################################################### # multiarray module # ####################################################################### # Multiarray version: this function is needed to build foo.c from foo.c.src # when foo.c is included in another file and as such not in the src # argument of build_ext command def generate_multiarray_templated_sources(ext, build_dir): from numpy.distutils.misc_util import get_cmd subpath = join('src', 'multiarray') sources = [ join(local_dir, subpath, 'scalartypes.c.src'), join(local_dir, subpath, 'arraytypes.c.src'), join(local_dir, subpath, 'nditer_templ.c.src'), join(local_dir, subpath, 'lowlevel_strided_loops.c.src'), join(local_dir, subpath, 'einsum.c.src') ] # numpy.distutils generate .c from .c.src in weird directories, we have # to add them there as they depend on the build_dir config.add_include_dirs(join(build_dir, subpath)) cmd = get_cmd('build_src') cmd.ensure_finalized() cmd.template_sources(sources, ext) multiarray_deps = [ join('src', 'multiarray', 'arrayobject.h'), join('src', 'multiarray', 'arraytypes.h'), join('src', 'multiarray', 'array_assign.h'), join('src', 'multiarray', 'buffer.h'), join('src', 'multiarray', 'calculation.h'), join('src', 'multiarray', 'common.h'), join('src', 'multiarray', 'convert_datatype.h'), join('src', 'multiarray', 'convert.h'), join('src', 'multiarray', 'conversion_utils.h'), join('src', 'multiarray', 'ctors.h'), join('src', 'multiarray', 'descriptor.h'), join('src', 'multiarray', 'getset.h'), join('src', 'multiarray', 'hashdescr.h'), join('src', 'multiarray', 'iterators.h'), join('src', 'multiarray', 'mapping.h'), join('src', 'multiarray', 'methods.h'), join('src', 'multiarray', 'multiarraymodule.h'), join('src', 'multiarray', 'nditer_impl.h'), join('src', 'multiarray', 'numpymemoryview.h'), join('src', 'multiarray', 'number.h'), join('src', 'multiarray', 'numpyos.h'), join('src', 'multiarray', 'refcount.h'), join('src', 'multiarray', 'scalartypes.h'), join('src', 'multiarray', 'sequence.h'), join('src', 'multiarray', 'shape.h'), join('src', 'multiarray', 'ucsnarrow.h'), join('src', 'multiarray', 'usertypes.h'), join('src', 'private', 'lowlevel_strided_loops.h'), join('include', 'numpy', 'arrayobject.h'), join('include', 'numpy', '_neighborhood_iterator_imp.h'), join('include', 'numpy', 'npy_endian.h'), join('include', 'numpy', 'arrayscalars.h'), join('include', 'numpy', 'noprefix.h'), join('include', 'numpy', 'npy_interrupt.h'), join('include', 'numpy', 'oldnumeric.h'), join('include', 'numpy', 'npy_3kcompat.h'), join('include', 'numpy', 'npy_math.h'), join('include', 'numpy', 'halffloat.h'), join('include', 'numpy', 'npy_common.h'), join('include', 'numpy', 'npy_os.h'), join('include', 'numpy', 'utils.h'), join('include', 'numpy', 'ndarrayobject.h'), join('include', 'numpy', 'npy_cpu.h'), join('include', 'numpy', 'numpyconfig.h'), join('include', 'numpy', 'ndarraytypes.h'), join('include', 'numpy', 'npy_deprecated_api.h'), join('include', 'numpy', '_numpyconfig.h.in'), ] multiarray_src = [ join('src', 'multiarray', 'arrayobject.c'), join('src', 'multiarray', 'arraytypes.c.src'), join('src', 'multiarray', 'array_assign.c'), join('src', 'multiarray', 'array_assign_scalar.c'), join('src', 'multiarray', 'array_assign_array.c'), join('src', 'multiarray', 'buffer.c'), join('src', 'multiarray', 'calculation.c'), join('src', 'multiarray', 'common.c'), join('src', 'multiarray', 'convert.c'), join('src', 'multiarray', 'convert_datatype.c'), join('src', 'multiarray', 'conversion_utils.c'), join('src', 'multiarray', 'ctors.c'), join('src', 'multiarray', 'datetime.c'), join('src', 'multiarray', 'datetime_strings.c'), join('src', 'multiarray', 'datetime_busday.c'), join('src', 'multiarray', 'datetime_busdaycal.c'), join('src', 'multiarray', 'descriptor.c'), join('src', 'multiarray', 'dtype_transfer.c'), join('src', 'multiarray', 'einsum.c.src'), join('src', 'multiarray', 'flagsobject.c'), join('src', 'multiarray', 'getset.c'), join('src', 'multiarray', 'hashdescr.c'), join('src', 'multiarray', 'item_selection.c'), join('src', 'multiarray', 'iterators.c'), join('src', 'multiarray', 'lowlevel_strided_loops.c.src'), join('src', 'multiarray', 'mapping.c'), join('src', 'multiarray', 'methods.c'), join('src', 'multiarray', 'multiarraymodule.c'), join('src', 'multiarray', 'nditer_templ.c.src'), join('src', 'multiarray', 'nditer_api.c'), join('src', 'multiarray', 'nditer_constr.c'), join('src', 'multiarray', 'nditer_pywrap.c'), join('src', 'multiarray', 'number.c'), join('src', 'multiarray', 'numpymemoryview.c'), join('src', 'multiarray', 'numpyos.c'), join('src', 'multiarray', 'refcount.c'), join('src', 'multiarray', 'sequence.c'), join('src', 'multiarray', 'shape.c'), join('src', 'multiarray', 'scalarapi.c'), join('src', 'multiarray', 'scalartypes.c.src'), join('src', 'multiarray', 'usertypes.c'), join('src', 'multiarray', 'ucsnarrow.c') ] if not ENABLE_SEPARATE_COMPILATION: multiarray_deps.extend(multiarray_src) multiarray_src = [ join('src', 'multiarray', 'multiarraymodule_onefile.c') ] multiarray_src.append(generate_multiarray_templated_sources) config.add_extension( 'multiarray', sources=multiarray_src + [ generate_config_h, generate_numpyconfig_h, generate_numpy_api, join(codegen_dir, 'generate_numpy_api.py'), join('*.py') ], depends=deps + multiarray_deps, libraries=['npymath', 'npysort']) ####################################################################### # umath module # ####################################################################### # umath version: this function is needed to build foo.c from foo.c.src # when foo.c is included in another file and as such not in the src # argument of build_ext command def generate_umath_templated_sources(ext, build_dir): from numpy.distutils.misc_util import get_cmd subpath = join('src', 'umath') # NOTE: For manual template conversion of loops.h.src, read the note # in that file. sources = [join(local_dir, subpath, 'loops.c.src')] # numpy.distutils generate .c from .c.src in weird directories, we have # to add them there as they depend on the build_dir config.add_include_dirs(join(build_dir, subpath)) cmd = get_cmd('build_src') cmd.ensure_finalized() cmd.template_sources(sources, ext) def generate_umath_c(ext, build_dir): target = join(build_dir, header_dir, '__umath_generated.c') dir = os.path.dirname(target) if not os.path.exists(dir): os.makedirs(dir) script = generate_umath_py if newer(script, target): f = open(target, 'w') f.write( generate_umath.make_code(generate_umath.defdict, generate_umath.__file__)) f.close() return [] umath_src = [ join('src', 'umath', 'umathmodule.c'), join('src', 'umath', 'reduction.c'), join('src', 'umath', 'funcs.inc.src'), join('src', 'umath', 'loops.c.src'), join('src', 'umath', 'ufunc_object.c'), join('src', 'umath', 'ufunc_type_resolution.c') ] umath_deps = [ generate_umath_py, join(codegen_dir, 'generate_ufunc_api.py') ] if not ENABLE_SEPARATE_COMPILATION: umath_deps.extend(umath_src) umath_src = [join('src', 'umath', 'umathmodule_onefile.c')] umath_src.append(generate_umath_templated_sources) umath_src.append(join('src', 'umath', 'funcs.inc.src')) config.add_extension( 'umath', sources=umath_src + [ generate_config_h, generate_numpyconfig_h, generate_umath_c, generate_ufunc_api ], depends=deps + umath_deps, libraries=['npymath'], ) ####################################################################### # scalarmath module # ####################################################################### config.add_extension( 'scalarmath', sources=[ join('src', 'scalarmathmodule.c.src'), generate_config_h, generate_numpyconfig_h, generate_numpy_api, generate_ufunc_api ], depends=deps, libraries=['npymath'], ) ####################################################################### # _dotblas module # ####################################################################### # Configure blasdot blas_info = get_info('blas_opt', 0) #blas_info = {} def get_dotblas_sources(ext, build_dir): if blas_info: if ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', []): return None # dotblas needs ATLAS, Fortran compiled blas will not be sufficient. return ext.depends[:1] return None # no extension module will be built config.add_extension('_dotblas', sources=[get_dotblas_sources], depends=[ join('blasdot', '_dotblas.c'), join('blasdot', 'cblas.h'), ], include_dirs=['blasdot'], extra_info=blas_info) ####################################################################### # umath_tests module # ####################################################################### config.add_extension('umath_tests', sources=[join('src', 'umath', 'umath_tests.c.src')]) ####################################################################### # multiarray_tests module # ####################################################################### config.add_extension( 'multiarray_tests', sources=[join('src', 'multiarray', 'multiarray_tests.c.src')]) config.add_data_dir('tests') config.add_data_dir('tests/data') config.make_svn_version_py() return config
def configuration(parent_package="", top_path=None): config = Configuration("ensemble", parent_package, top_path) config.add_extension("_gradient_boosting", sources=["_gradient_boosting.pyx"], include_dirs=[numpy.get_include()]) # Histogram-based gradient boosting files config.add_extension( "_hist_gradient_boosting._gradient_boosting", sources=["_hist_gradient_boosting/_gradient_boosting.pyx"], include_dirs=[numpy.get_include()]) config.add_extension("_hist_gradient_boosting.histogram", sources=["_hist_gradient_boosting/histogram.pyx"], include_dirs=[numpy.get_include()]) config.add_extension("_hist_gradient_boosting.splitting", sources=["_hist_gradient_boosting/splitting.pyx"], include_dirs=[numpy.get_include()]) config.add_extension("_hist_gradient_boosting._binning", sources=["_hist_gradient_boosting/_binning.pyx"], include_dirs=[numpy.get_include()]) config.add_extension("_hist_gradient_boosting._predictor", sources=["_hist_gradient_boosting/_predictor.pyx"], include_dirs=[numpy.get_include()]) config.add_extension("_hist_gradient_boosting._loss", sources=["_hist_gradient_boosting/_loss.pyx"], include_dirs=[numpy.get_include()]) config.add_extension("_hist_gradient_boosting.common", sources=["_hist_gradient_boosting/common.pyx"], include_dirs=[numpy.get_include()]) config.add_extension("_hist_gradient_boosting.utils", sources=["_hist_gradient_boosting/utils.pyx"], include_dirs=[numpy.get_include()]) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from numpy.distutils.misc_util import get_info as get_misc_info from numpy.distutils.system_info import get_info as get_sys_info from distutils.sysconfig import get_python_inc config = Configuration('spatial', parent_package, top_path) config.add_data_dir('tests') # qhull qhull_src = list(glob.glob(join(dirname(__file__), 'qhull', 'src', '*.c'))) inc_dirs = [get_python_inc()] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.append(get_numpy_include_dirs()) cfg = dict(get_sys_info('lapack_opt')) cfg.setdefault('include_dirs', []).extend(inc_dirs) def get_qhull_misc_config(ext, build_dir): # Generate a header file containing defines config_cmd = config.get_config_cmd() defines = [] if config_cmd.check_func('open_memstream', decl=True, call=True): defines.append(('HAVE_OPEN_MEMSTREAM', '1')) target = join(dirname(__file__), 'qhull_misc_config.h') with open(target, 'w') as f: for name, value in defines: f.write('#define {0} {1}\n'.format(name, value)) config.add_extension('qhull', sources=['qhull.c'] + qhull_src + [get_qhull_misc_config], **cfg) # cKDTree ckdtree_src = [ 'query.cxx', 'build.cxx', 'globals.cxx', 'cpp_exc.cxx', 'query_pairs.cxx', 'count_neighbors.cxx', 'query_ball_point.cxx', 'query_ball_tree.cxx', 'sparse_distances.cxx' ] ckdtree_src = [join('ckdtree', 'src', x) for x in ckdtree_src] ckdtree_headers = [ 'ckdtree_decl.h', 'cpp_exc.h', 'ckdtree_methods.h', 'cpp_utils.h', 'rectangle.h', 'distance.h', 'distance_box.h', 'ordered_pair.h' ] ckdtree_headers = [join('ckdtree', 'src', x) for x in ckdtree_headers] ckdtree_dep = ['ckdtree.cxx'] + ckdtree_headers + ckdtree_src config.add_extension('ckdtree', sources=['ckdtree.cxx'] + ckdtree_src, depends=ckdtree_dep, include_dirs=inc_dirs + [join('ckdtree', 'src')]) # _distance_wrap config.add_extension('_distance_wrap', sources=[join('src', 'distance_wrap.c')], depends=[join('src', 'distance_impl.h')], include_dirs=[get_numpy_include_dirs()], extra_info=get_misc_info("npymath")) return config
def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration from scipy._lib._unuran_utils import _unuran_dir if not os.path.exists(_unuran_dir(ret_path=True) / 'README.md'): raise RuntimeError("Missing the `unuran` submodule! Run `git " "submodule update --init` to fix this.") config = Configuration("_unuran", parent_package, top_path) # UNU.RAN info UNURAN_DIR = _unuran_dir(ret_path=True).resolve() UNURAN_VERSION = _get_version(UNURAN_DIR, "unuran/configure.ac", "AM_INIT_AUTOMAKE") DEFINE_MACROS = [ ("HAVE_ALARM", "1"), ("HAVE_DECL_ALARM", "1"), ("HAVE_DECL_HUGE_VAL", "1"), ("HAVE_DECL_INFINITY", "1"), ("HAVE_DECL_ISFINITE", "1"), ("HAVE_DECL_ISINF", "1"), ("HAVE_DECL_ISNAN", "1"), ("HAVE_DECL_LOG1P", "1"), ("HAVE_DECL_SIGNAL", "1"), ("HAVE_DECL_SNPRINTF", "1"), ("HAVE_DECL_VSNPRINTF", "1"), ("HAVE_FLOAT_H", "1"), ("HAVE_FLOOR", "1"), ("HAVE_IEEE_COMPARISONS", "1"), ("HAVE_INTTYPES_H", "1"), ("HAVE_LIBM", "1"), ("HAVE_LIMITS_H", "1"), ("HAVE_POW", "1"), ("HAVE_SIGNAL", "1"), ("HAVE_SQRT", "1"), ("HAVE_STDINT_H", "1"), ("HAVE_STDLIB_H", "1"), ("HAVE_STRCHR", "1"), ("HAVE_STRING_H", "1"), ("HAVE_STRTOL", "1"), ("HAVE_STRTOUL", "1"), ("LT_OBJDIR", '".libs/"'), ("PACKAGE", '"unuran"'), ("PACKAGE_BUGREPORT", '"*****@*****.**"'), ("PACKAGE_NAME", '"unuran"'), ("PACKAGE_STRING", '"unuran %s"' % UNURAN_VERSION), ("PACKAGE_TARNAME", '"unuran"'), ("PACKAGE_URL", '""'), ("PACKAGE_VERSION", '"%s"' % UNURAN_VERSION), ("STDC_HEADERS", "1"), ("UNUR_ENABLE_INFO", "1"), ("VERSION", '"%s"' % UNURAN_VERSION), ("HAVE_CONFIG_H", "1"), ("_ISOC99_SOURCE", "1"), ] UNURAN_DIRS = [ os.path.join("unuran", "src"), os.path.join("unuran", "src", "distr"), os.path.join("unuran", "src", "distributions"), os.path.join("unuran", "src", "methods"), os.path.join("unuran", "src", "parser"), os.path.join("unuran", "src", "specfunct"), os.path.join("unuran", "src", "urng"), os.path.join("unuran", "src", "utils"), os.path.join("unuran", "src", "tests"), ] UNURAN_SOURCE_DIRS = [UNURAN_DIR / dir_ for dir_ in UNURAN_DIRS] sources = _get_sources(UNURAN_SOURCE_DIRS[1:]) ext = config.add_extension( "unuran_wrapper", sources=["unuran_wrapper.c"] + sources, libraries=[], include_dirs=[str(dir_.resolve()) for dir_ in UNURAN_SOURCE_DIRS] + [os.path.join(os.path.dirname(__file__), "..", "..", "_lib", "src")] + [os.path.dirname(__file__)], language="c", define_macros=DEFINE_MACROS, ) ext.pre_build_hook = unuran_pre_build_hook config.add_data_files("*.pxd") config.add_data_files("*.pyi") return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_mathlibs config = Configuration('random', parent_package, top_path) def generate_libraries(ext, build_dir): config_cmd = config.get_config_cmd() libs = get_mathlibs() if sys.platform == 'win32': libs.extend(['Advapi32', 'Kernel32']) ext.libraries.extend(libs) return None # enable unix large file support on 32 bit systems # (64 bit off_t, lseek -> lseek64 etc.) if sys.platform[:3] == "aix": defs = [('_LARGE_FILES', None)] else: defs = [('_FILE_OFFSET_BITS', '64'), ('_LARGEFILE_SOURCE', '1'), ('_LARGEFILE64_SOURCE', '1')] defs.append(('NPY_NO_DEPRECATED_API', 0)) config.add_data_dir('tests') EXTRA_LINK_ARGS = [] # Math lib EXTRA_LIBRARIES = ['m'] if os.name != 'nt' else [] # Some bit generators exclude GCC inlining EXTRA_COMPILE_ARGS = ['-U__GNUC_GNU_INLINE__'] if is_msvc and platform_bits == 32: # 32-bit windows requires explicit sse2 option EXTRA_COMPILE_ARGS += ['/arch:SSE2'] elif not is_msvc: # Some bit generators require c99 EXTRA_COMPILE_ARGS += ['-std=c99'] INTEL_LIKE = any([val in k.lower() for k in platform.uname() for val in ('x86', 'i686', 'i386', 'amd64')]) if INTEL_LIKE: # Assumes GCC or GCC-like compiler EXTRA_COMPILE_ARGS += ['-msse2'] # Use legacy integer variable sizes LEGACY_DEFS = [('NP_RANDOM_LEGACY', '1')] # Required defined for DSFMT size and to allow it to detect SSE2 using # config file information DSFMT_DEFS = [('DSFMT_MEXP', '19937'), ("HAVE_NPY_CONFIG_H", "1")] PCG64_DEFS = [] if 1 or sys.maxsize < 2 ** 32 or os.name == 'nt': # Force emulated mode here PCG64_DEFS += [('PCG_FORCE_EMULATED_128BIT_MATH', '1')] config.add_extension('entropy', sources=['entropy.c', 'src/entropy/entropy.c'] + [generate_libraries], libraries=EXTRA_LIBRARIES, extra_compile_args=EXTRA_COMPILE_ARGS, extra_link_args=EXTRA_LINK_ARGS, depends=[join('src', 'splitmix64', 'splitmix.h'), join('src', 'entropy', 'entropy.h'), 'entropy.pyx', ], define_macros=defs, ) config.add_extension('dsfmt', sources=['dsfmt.c', 'src/dsfmt/dSFMT.c', 'src/dsfmt/dSFMT-jump.c', 'src/aligned_malloc/aligned_malloc.c'], include_dirs=['.', 'src', join('src', 'dsfmt')], libraries=EXTRA_LIBRARIES, extra_compile_args=EXTRA_COMPILE_ARGS, extra_link_args=EXTRA_LINK_ARGS, depends=[join('src', 'dsfmt', 'dsfmt.h'), 'dsfmt.pyx', ], define_macros=defs + DSFMT_DEFS, ) for gen in ['mt19937']: # gen.pyx, src/gen/gen.c, src/gen/gen-jump.c config.add_extension(gen, sources=['{0}.c'.format(gen), 'src/{0}/{0}.c'.format(gen), 'src/{0}/{0}-jump.c'.format(gen)], include_dirs=['.', 'src', join('src', gen)], libraries=EXTRA_LIBRARIES, extra_compile_args=EXTRA_COMPILE_ARGS, extra_link_args=EXTRA_LINK_ARGS, depends=['%s.pyx' % gen], define_macros=defs, ) for gen in ['philox', 'threefry', 'xoshiro256', 'xoshiro512', 'pcg64', 'pcg32']: # gen.pyx, src/gen/gen.c _defs = defs + PCG64_DEFS if gen == 'pcg64' else defs config.add_extension(gen, sources=['{0}.c'.format(gen), 'src/{0}/{0}.c'.format(gen)], include_dirs=['.', 'src', join('src', gen)], libraries=EXTRA_LIBRARIES, extra_compile_args=EXTRA_COMPILE_ARGS, extra_link_args=EXTRA_LINK_ARGS, depends=['%s.pyx' % gen], define_macros=_defs, ) for gen in ['common']: # gen.pyx config.add_extension(gen, sources=['{0}.c'.format(gen)], libraries=EXTRA_LIBRARIES, extra_compile_args=EXTRA_COMPILE_ARGS, extra_link_args=EXTRA_LINK_ARGS, include_dirs=['.', 'src'], depends=['%s.pyx' % gen], define_macros=defs, ) for gen in ['generator', 'bounded_integers']: # gen.pyx, src/distributions/distributions.c config.add_extension(gen, sources=['{0}.c'.format(gen), join('src', 'distributions', 'distributions.c')], libraries=EXTRA_LIBRARIES, extra_compile_args=EXTRA_COMPILE_ARGS, include_dirs=['.', 'src'], extra_link_args=EXTRA_LINK_ARGS, depends=['%s.pyx' % gen], define_macros=defs, ) config.add_extension('mtrand', sources=['mtrand.c', 'src/legacy/distributions-boxmuller.c', 'src/distributions/distributions.c'], include_dirs=['.', 'src', 'src/legacy'], libraries=EXTRA_LIBRARIES, extra_compile_args=EXTRA_COMPILE_ARGS, extra_link_args=EXTRA_LINK_ARGS, depends=['mtrand.pyx'], define_macros=defs + DSFMT_DEFS + LEGACY_DEFS, ) return config
def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration CONFIG = Configuration(PACKAGE_NAME, parent_name=parent_package, top_path=top_path) CONFIG.add_data_dir("adjacency_sets") CONFIG.add_subpackage("tmanalysis") CONFIG.add_subpackage("tools") CONFIG.add_subpackage("tm_multisurface") CONFIG.add_extension( "tfce", sources=["tfce.pyx"], include_dirs=["lib", numpy.get_include()], language="c++", extra_compile_args=["-std=c++11", "-Wno-unused", "-g"]) CONFIG.add_extension( "cynumstats", sources=["cynumstats.pyx"], include_dirs=[numpy.get_include()], language="c++", extra_compile_args=["-std=c++11", "-Wno-unused", "-g"]) CONFIG.add_extension( "adjacency", sources=["adjacency.pyx"], include_dirs=["lib", numpy.get_include()], language="c++", extra_compile_args=["-std=c++11", "-Wno-unused", "-g"]) def cythonize(self, base, ext_name, source, extension): target_ext = '.cpp' target_dir = CONFIG.get_build_temp_dir() target_dir = os.path.join(target_dir, "pyrex") for package_name in extension.name.split('.')[:-1]: target_dir = os.path.join(target_dir, package_name) new_sources = [] cython_targets = {} for source in extension.sources: (base, ext) = os.path.splitext(os.path.basename(source)) new_sources.append(os.path.join(target_dir, base + target_ext)) cython_targets[source] = new_sources[-1] module_name = extension.name for source in extension.sources: target = cython_targets[source] mkpath(os.path.dirname(target)) options = Cython.Compiler.Main.CompilationOptions( defaults=Cython.Compiler.Main.default_options, include_path=extension.include_dirs, output_file=target, verbose=True, cplus=True) result = Cython.Compiler.Main.compile([source], options=options, full_module_name=module_name) if len(new_sources) == 1: return new_sources[0] return new_sources build_src.build_src.generate_a_pyrex_source = cythonize CONFIG.make_config_py() return CONFIG
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_mathlibs config = Configuration('random', parent_package, top_path) def generate_libraries(ext, build_dir): config_cmd = config.get_config_cmd() libs = get_mathlibs() if sys.platform == 'win32': libs.extend(['Advapi32', 'Kernel32']) ext.libraries.extend(libs) return None # enable unix large file support on 32 bit systems # (64 bit off_t, lseek -> lseek64 etc.) if sys.platform[:3] == "aix": defs = [('_LARGE_FILES', None)] else: defs = [('_FILE_OFFSET_BITS', '64'), ('_LARGEFILE_SOURCE', '1'), ('_LARGEFILE64_SOURCE', '1')] defs.append(('NPY_NO_DEPRECATED_API', 0)) config.add_data_dir('tests') EXTRA_LINK_ARGS = [] # Math lib EXTRA_LIBRARIES = ['m'] if os.name != 'nt' else [] # Some bit generators exclude GCC inlining EXTRA_COMPILE_ARGS = ['-U__GNUC_GNU_INLINE__'] if is_msvc and platform_bits == 32: # 32-bit windows requires explicit sse2 option EXTRA_COMPILE_ARGS += ['/arch:SSE2'] elif not is_msvc: # Some bit generators require c99 EXTRA_COMPILE_ARGS += ['-std=c99'] INTEL_LIKE = any(arch in platform.machine() for arch in ('x86', 'i686', 'i386', 'amd64')) if INTEL_LIKE: # Assumes GCC or GCC-like compiler EXTRA_COMPILE_ARGS += ['-msse2'] # Use legacy integer variable sizes LEGACY_DEFS = [('NP_RANDOM_LEGACY', '1')] PCG64_DEFS = [] # One can force emulated 128-bit arithmetic if one wants. #PCG64_DEFS += [('PCG_FORCE_EMULATED_128BIT_MATH', '1')] for gen in ['mt19937']: # gen.pyx, src/gen/gen.c, src/gen/gen-jump.c config.add_extension('_{0}'.format(gen), sources=['_{0}.c'.format(gen), 'src/{0}/{0}.c'.format(gen), 'src/{0}/{0}-jump.c'.format(gen)], include_dirs=['.', 'src', join('src', gen)], libraries=EXTRA_LIBRARIES, extra_compile_args=EXTRA_COMPILE_ARGS, extra_link_args=EXTRA_LINK_ARGS, depends=['_%s.pyx' % gen], define_macros=defs, ) for gen in ['philox', 'pcg64', 'sfc64']: # gen.pyx, src/gen/gen.c _defs = defs + PCG64_DEFS if gen == 'pcg64' else defs config.add_extension('_{0}'.format(gen), sources=['_{0}.c'.format(gen), 'src/{0}/{0}.c'.format(gen)], include_dirs=['.', 'src', join('src', gen)], libraries=EXTRA_LIBRARIES, extra_compile_args=EXTRA_COMPILE_ARGS, extra_link_args=EXTRA_LINK_ARGS, depends=['_%s.pyx' % gen, 'bit_generator.pyx', 'bit_generator.pxd'], define_macros=_defs, ) for gen in ['_common', '_bit_generator']: # gen.pyx config.add_extension(gen, sources=['{0}.c'.format(gen)], libraries=EXTRA_LIBRARIES, extra_compile_args=EXTRA_COMPILE_ARGS, extra_link_args=EXTRA_LINK_ARGS, include_dirs=['.', 'src'], depends=['%s.pyx' % gen, '%s.pxd' % gen,], define_macros=defs, ) other_srcs = [ 'src/distributions/logfactorial.c', 'src/distributions/distributions.c', 'src/distributions/random_mvhg_count.c', 'src/distributions/random_mvhg_marginals.c', 'src/distributions/random_hypergeometric.c', ] for gen in ['_generator', '_bounded_integers']: # gen.pyx, src/distributions/distributions.c config.add_extension(gen, sources=['{0}.c'.format(gen)] + other_srcs, libraries=EXTRA_LIBRARIES, extra_compile_args=EXTRA_COMPILE_ARGS, include_dirs=['.', 'src'], extra_link_args=EXTRA_LINK_ARGS, depends=['%s.pyx' % gen], define_macros=defs, ) config.add_extension('mtrand', sources=['mtrand.c', 'src/legacy/legacy-distributions.c', 'src/distributions/logfactorial.c', 'src/distributions/distributions.c'], include_dirs=['.', 'src', 'src/legacy'], libraries=EXTRA_LIBRARIES, extra_compile_args=EXTRA_COMPILE_ARGS, extra_link_args=EXTRA_LINK_ARGS, depends=['mtrand.pyx'], define_macros=defs + LEGACY_DEFS, ) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from scipy._build_utils.compiler_helper import set_c_flags_hook config = Configuration('signal', parent_package, top_path) config.add_data_dir('tests') config.add_subpackage('windows') sigtools = config.add_extension('sigtools', sources=[ 'sigtoolsmodule.c', 'firfilter.c', 'medianfilter.c', 'lfilter.c.src', 'correlate_nd.c.src' ], depends=['sigtools.h'], include_dirs=['.'], **numpy_nodepr_api) sigtools._pre_build_hook = set_c_flags_hook if int(os.environ.get('SCIPY_USE_PYTHRAN', 1)): import pythran ext = pythran.dist.PythranExtension( 'scipy.signal._max_len_seq_inner', sources=["scipy/signal/_max_len_seq_inner.py"], config=['compiler.blas=none']) config.ext_modules.append(ext) ext = pythran.dist.PythranExtension( 'scipy.signal._spectral', sources=["scipy/signal/_spectral.py"], config=['compiler.blas=none']) config.ext_modules.append(ext) else: config.add_extension('_spectral', sources=['_spectral.c']) config.add_extension('_max_len_seq_inner', sources=['_max_len_seq_inner.c']) config.add_extension('_peak_finding_utils', sources=['_peak_finding_utils.c']) config.add_extension('_sosfilt', sources=['_sosfilt.c']) config.add_extension('_upfirdn_apply', sources=['_upfirdn_apply.c']) spline_src = [ 'splinemodule.c', 'S_bspline_util.c', 'D_bspline_util.c', 'C_bspline_util.c', 'Z_bspline_util.c', 'bspline_util.c' ] config.add_extension('spline', sources=spline_src, **numpy_nodepr_api) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('special', parent_package, top_path) define_macros = [] if sys.platform == 'win32': # define_macros.append(('NOINFINITIES',None)) # define_macros.append(('NONANS',None)) define_macros.append(('_USE_MATH_DEFINES', None)) # C libraries config.add_library( 'sc_c_misc', sources=[join('c_misc', '*.c')], include_dirs=[get_python_inc(), get_numpy_include_dirs()], macros=define_macros) config.add_library( 'sc_cephes', sources=[join('cephes', '*.c')], include_dirs=[get_python_inc(), get_numpy_include_dirs()], macros=define_macros) # Fortran libraries config.add_library('sc_mach', sources=[join('mach', '*.f')], config_fc={'noopt': (__file__, 1)}) config.add_library('sc_toms', sources=[join('amos', '*.f')]) config.add_library('sc_amos', sources=[join('toms', '*.f')]) config.add_library('sc_cdf', sources=[join('cdflib', '*.f')]) config.add_library('sc_specfun', sources=[join('specfun', '*.f')]) # Extension _cephes sources = [ '_cephesmodule.c', 'amos_wrappers.c', 'specfun_wrappers.c', 'toms_wrappers.c', 'cdf_wrappers.c', 'ufunc_extras.c' ] config.add_extension('_cephes', sources=sources, libraries=[ 'sc_amos', 'sc_toms', 'sc_c_misc', 'sc_cephes', 'sc_mach', 'sc_cdf', 'sc_specfun' ], depends=[ "ufunc_extras.h", "cephes.h", "amos_wrappers.h", "toms_wrappers.h", "cdf_wrappers.h", "specfun_wrappers.h", "c_misc/misc.h", "cephes_doc.h", "cephes/mconf.h", "cephes/cephes_names.h" ], define_macros=define_macros, extra_info=get_info("npymath")) # Extension specfun config.add_extension('specfun', sources=['specfun.pyf'], f2py_options=['--no-wrap-functions'], define_macros=[], libraries=['sc_specfun']) # Extension orthogonal_eval config.add_extension('orthogonal_eval', sources=['orthogonal_eval.c'], define_macros=[], extra_info=get_info("npymath")) # Extension lambertw config.add_extension('lambertw', sources=['lambertw.c'], define_macros=[], extra_info=get_info("npymath")) # Extension _logit config.add_extension('_logit', sources=['_logit.c.src'], extra_info=get_info("npymath")) config.add_data_files('tests/*.py') config.add_data_files('tests/data/README') config.add_data_files('tests/data/*.npz') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs config = Configuration('feature', parent_package, top_path) config.add_data_dir('tests') cython(['corner_cy.pyx'], working_path=base_path) cython(['censure_cy.pyx'], working_path=base_path) cython(['orb_cy.pyx'], working_path=base_path) cython(['brief_cy.pyx'], working_path=base_path) cython(['_texture.pyx'], working_path=base_path) cython(['_hessian_det_appx.pyx'], working_path=base_path) cython(['_hoghistogram.pyx'], working_path=base_path) config.add_extension('corner_cy', sources=['corner_cy.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('censure_cy', sources=['censure_cy.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('orb_cy', sources=['orb_cy.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('brief_cy', sources=['brief_cy.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('_texture', sources=['_texture.c'], include_dirs=[get_numpy_include_dirs(), '../_shared']) config.add_extension('_hessian_det_appx', sources=['_hessian_det_appx.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('_hoghistogram', sources=['_hoghistogram.c'], include_dirs=[get_numpy_include_dirs(), '../_shared']) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, BlasNotFoundError import numpy libraries = [] if os.name == 'posix': libraries.append('m') config = Configuration('sklearn', parent_package, top_path) config.add_subpackage('__check_build') config.add_subpackage('svm') config.add_subpackage('datasets') config.add_subpackage('datasets/tests') config.add_subpackage('feature_extraction') config.add_subpackage('feature_extraction/tests') config.add_subpackage('cluster') config.add_subpackage('cluster/tests') config.add_subpackage('covariance') config.add_subpackage('covariance/tests') config.add_subpackage('decomposition') config.add_subpackage('decomposition/tests') config.add_subpackage("ensemble") config.add_subpackage("ensemble/tests") config.add_subpackage('feature_selection') config.add_subpackage('feature_selection/tests') config.add_subpackage('utils') config.add_subpackage('utils/tests') config.add_subpackage('externals') config.add_subpackage('mixture') config.add_subpackage('mixture/tests') config.add_subpackage('gaussian_process') config.add_subpackage('gaussian_process/tests') config.add_subpackage('neighbors') config.add_subpackage('manifold') config.add_subpackage('metrics') config.add_subpackage('semi_supervised') config.add_subpackage("tree") config.add_subpackage("tree/tests") config.add_subpackage('metrics/tests') config.add_subpackage('metrics/cluster') config.add_subpackage('metrics/cluster/tests') # add cython extension module for hmm config.add_extension( '_hmmc', sources=['_hmmc.c'], include_dirs=[numpy.get_include()], libraries=libraries, ) # some libs needs cblas, fortran-compiled BLAS will not be sufficient blas_info = get_info('blas_opt', 0) if (not blas_info) or ( ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', [])): config.add_library('cblas', sources=[join('src', 'cblas', '*.c')]) warnings.warn(BlasNotFoundError.__doc__) # the following packages depend on cblas, so they have to be build # after the above. config.add_subpackage('linear_model') config.add_subpackage('utils') # add the test directory config.add_subpackage('tests') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs config = Configuration('morphology', parent_package, top_path) cython([ '_skeletonize_cy.pyx', '_convex_hull.pyx', '_grayreconstruct.pyx', '_extrema_cy.pyx' ], working_path=base_path) # _skeletonize_3d uses c++, so it must be cythonized separately cython(['_skeletonize_3d_cy.pyx.in'], working_path=base_path) cython(['_extrema_cy.pyx'], working_path=base_path) cython(['_flood_fill_cy.pyx'], working_path=base_path) cython(['_max_tree.pyx'], working_path=base_path) config.add_extension('_skeletonize_cy', sources=['_skeletonize_cy.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('_convex_hull', sources=['_convex_hull.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('_grayreconstruct', sources=['_grayreconstruct.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('_max_tree', sources=['_max_tree.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('_skeletonize_3d_cy', sources=['_skeletonize_3d_cy.cpp'], include_dirs=[get_numpy_include_dirs()], language='c++') config.add_extension('_extrema_cy', sources=['_extrema_cy.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('_flood_fill_cy', sources=['_flood_fill_cy.c'], include_dirs=[get_numpy_include_dirs()]) # add precomputed footprint decomposition data config.add_data_files('ball_decompositions.npy', 'disk_decompositions.npy') return config
def configuration(parent_package="", top_path=None): # noqa config = Configuration("tree", parent_package, top_path) libraries = [] if os.name == "posix": libraries.append("m") config.add_extension( "_oblique_tree", sources=["_oblique_tree.pyx"], include_dirs=[numpy.get_include()], libraries=libraries, extra_compile_args=["-O3"], language="c++", ) config.add_extension( "_oblique_splitter", sources=["_oblique_splitter.pyx"], include_dirs=[numpy.get_include()], libraries=libraries, extra_compile_args=["-O3"], language="c++", ) config.add_extension( "_tree", sources=["_tree.pyx"], include_dirs=[numpy.get_include()], libraries=libraries, extra_compile_args=["-O3"], language="c++", ) config.add_extension( "_splitter", sources=["_splitter.pyx"], include_dirs=[numpy.get_include()], libraries=libraries, extra_compile_args=["-O3"], language="c++", ) config.add_extension( "_criterion", sources=["_criterion.pyx"], include_dirs=[numpy.get_include()], libraries=libraries, extra_compile_args=["-O3"], ) config.add_extension( "_utils", sources=["_utils.pyx"], include_dirs=[numpy.get_include()], libraries=libraries, extra_compile_args=["-O3"], ) config.add_subpackage("tests") config.add_data_files("_splitter.pxd") config.add_data_files("_tree.pxd") config.add_data_files("_criterion.pxd") config.add_data_files("_utils.pxd") config.add_data_files("_oblique_splitter.pxd") config.add_data_files("_oblique_tree.pxd") return config
setattr(MSVCCompiler, 'library_dir_option', types.MethodType(_lib_dir_option, None, MSVCCompiler)) sdkdir = os.environ.get('WindowsSdkDir') if sdkdir: include_dirs.append(os.path.join(sdkdir,'Include')) library_dirs.append(os.path.join(sdkdir,'Lib')) # make sure we have mt.exe available in case we need it path = os.environ['PATH'].split(';') path.append(os.path.join(sdkdir,'bin')) os.environ['PATH'] = ';'.join(path) config = Configuration(name='slsqp') config.add_extension('slsqp', sources=['*.f', 'f2py/slsqp.pyf'], include_dirs=include_dirs, library_dirs=library_dirs) config.add_data_files('LICENSE','README') kwds = {'install_requires':['numpy'], 'version': '1.0.1', 'zip_safe': False, 'license': 'permissive open source', # NOTE: we use 'url' here, but it really translates to 'home-page' # in the metadata. Go figure. 'url': 'http://www.pyopt.org', 'package_data': {'openmdao.main': ['*.html']}, } kwds.update(config.todict())
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('pyrex_ext', parent_package, top_path) config.add_extension('primes', ['primes.pyx']) config.add_data_dir('tests') return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info config = Configuration('integrate', parent_package, top_path) # Get a local copy of lapack_opt_info lapack_opt = dict(get_info('lapack_opt',notfound_action=2)) # Pop off the libraries list so it can be combined with # additional required libraries lapack_libs = lapack_opt.pop('libraries', []) mach_src = [join('mach','*.f')] quadpack_src = [join('quadpack','*.f')] odepack_src = [join('odepack','*.f')] dop_src = [join('dop','*.f')] quadpack_test_src = [join('tests','_test_multivariate.c')] odeint_banded_test_src = [join('tests', 'banded5x5.f')] config.add_library('mach', sources=mach_src, config_fc={'noopt':(__file__,1)}) config.add_library('quadpack', sources=quadpack_src) config.add_library('odepack', sources=odepack_src) config.add_library('dop', sources=dop_src) # Extensions # quadpack: config.add_extension('_quadpack', sources=['_quadpackmodule.c'], libraries=(['quadpack', 'mach'] + lapack_libs), depends=(['quadpack.h','__quadpack.h'] + quadpack_src + mach_src), **lapack_opt) # odepack odepack_libs = ['odepack','mach'] + lapack_libs odepack_opts = lapack_opt.copy() odepack_opts.update(numpy_nodepr_api) config.add_extension('_odepack', sources=['_odepackmodule.c'], libraries=odepack_libs, depends=(odepack_src + mach_src), **odepack_opts) # vode config.add_extension('vode', sources=['vode.pyf'], libraries=odepack_libs, depends=(odepack_src + mach_src), **lapack_opt) # lsoda config.add_extension('lsoda', sources=['lsoda.pyf'], libraries=odepack_libs, depends=(odepack_src + mach_src), **lapack_opt) # dop config.add_extension('_dop', sources=['dop.pyf'], libraries=['dop'], depends=dop_src) config.add_extension('_test_multivariate', sources=quadpack_test_src) # Fortran+f2py extension module for testing odeint. config.add_extension('_test_odeint_banded', sources=odeint_banded_test_src, libraries=odepack_libs, depends=(odepack_src + mach_src), **lapack_opt) config.add_data_dir('tests') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, NotFoundError config = Configuration('qutipf90mc', parent_package, top_path) sources = [ 'qutraj_run.pyf', 'qutraj_precision.f90', 'mt19937.f90', 'linked_list.f90', 'qutraj_general.f90', 'qutraj_hilbert.f90', 'qutraj_evolve.f90', ] libs = ['zvode'] config.add_library('zvode', sources=[join('zvode', '*.f')]) extra_compile_args = [] extra_link_args = [] # # LAPACK? # lapack_opt = np.__config__.lapack_opt_info if not lapack_opt: # raise NotFoundError,'no lapack resources found' print("Warning: No lapack resource found. Linear algebra routines" + " like 'eigenvalues' and 'entropy' will not be available.") sources.append('qutraj_nolinalg.f90') else: sources.append('qutraj_linalg.f90') libs.extend(lapack_opt['libraries']) # # BLAS # if not lapack_opt: blas_opt = np.__config__.blas_opt_info else: blas_opt = lapack_opt # Remove libraries key from blas_opt if 'libraries' in blas_opt: # key doesn't exist on OS X ... libs.extend(blas_opt['libraries']) newblas = {} for key in blas_opt.keys(): if key == 'libraries': continue newblas[key] = blas_opt[key] # Add this last sources.append('qutraj_run.f90') config.add_extension('qutraj_run', sources=sources, extra_compile_args=extra_compile_args, extra_link_args=extra_link_args, libraries=libs, **newblas ) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('special', parent_package, top_path) define_macros = [] if sys.platform == 'win32': # define_macros.append(('NOINFINITIES',None)) # define_macros.append(('NONANS',None)) define_macros.append(('_USE_MATH_DEFINES', None)) curdir = os.path.abspath(os.path.dirname(__file__)) inc_dirs = [get_python_inc()] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.insert(0, get_numpy_include_dirs()) # C libraries config.add_library('sc_c_misc', sources=[join('c_misc', '*.c')], include_dirs=[curdir] + inc_dirs, macros=define_macros) config.add_library('sc_cephes', sources=[join('cephes', '*.c')], include_dirs=[curdir] + inc_dirs, macros=define_macros) # Fortran/C++ libraries config.add_library('sc_mach', sources=[join('mach', '*.f')], config_fc={'noopt': (__file__, 1)}) config.add_library('sc_amos', sources=[join('amos', '*.f')]) config.add_library('sc_cdf', sources=[join('cdflib', '*.f')]) config.add_library('sc_specfun', sources=[join('specfun', '*.f')]) # Extension specfun config.add_extension('specfun', sources=['specfun.pyf'], f2py_options=['--no-wrap-functions'], define_macros=[], libraries=['sc_specfun']) # Extension _ufuncs config.add_extension('_ufuncs', libraries=[ 'sc_amos', 'sc_c_misc', 'sc_cephes', 'sc_mach', 'sc_cdf', 'sc_specfun' ], depends=[ "_logit.h", "cephes.h", "amos_wrappers.h", "cdf_wrappers.h", "specfun_wrappers.h", "c_misc/misc.h", "cephes/mconf.h", "cephes/cephes_names.h" ], sources=[ '_ufuncs.c', 'sf_error.c', '_logit.c.src', "amos_wrappers.c", "cdf_wrappers.c", "specfun_wrappers.c" ], include_dirs=[curdir], define_macros=define_macros, extra_info=get_info("npymath")) # Extension _ufuncs_cxx config.add_extension('_ufuncs_cxx', sources=[ '_ufuncs_cxx.cxx', 'sf_error.c', '_faddeeva.cxx', 'Faddeeva.cc', ], include_dirs=[curdir], define_macros=define_macros, extra_info=get_info("npymath")) config.add_data_files('tests/*.py') config.add_data_files('tests/data/README') config.add_data_files('tests/data/*.npz') return config
def configuration(parent_package='', top_path=None): """Configure all packages that need to be built.""" config = Configuration('', parent_package, top_path) kwargs = {'libraries': [], 'include_dirs': [], 'library_dirs': []} # numpy.distutils.fcompiler.FCompiler doesn't support .F95 extension compiler = FCompiler(get_default_fcompiler()) compiler.src_extensions.append('.F95') compiler.language_map['.F95'] = 'f90' # collect all Fortran sources files = os.listdir('src') exclude_sources = ['PlanetsConstants.f95', 'PythonWrapper.f95'] sources = [ os.path.join('src', file) for file in files if file.lower().endswith(('.f95', '.c')) and file not in exclude_sources ] # (from http://stackoverflow.com/questions/14320220/ # testing-python-c-libraries-get-build-path)): build_lib_dir = "{dirname}.{platform}-{version[0]}.{version[1]}" dirparams = { 'dirname': 'temp', 'platform': sysconfig.get_platform(), 'version': sys.version_info } libdir = os.path.join('build', build_lib_dir.format(**dirparams)) print('searching SHTOOLS in:', libdir) # Fortran compilation config.add_library('SHTOOLS', sources=sources) # SHTOOLS kwargs['libraries'].extend(['SHTOOLS']) kwargs['include_dirs'].extend([libdir]) kwargs['library_dirs'].extend([libdir]) kwargs['f2py_options'] = ['--quiet'] # FFTW info fftw_info = get_info('fftw', notfound_action=2) dict_append(kwargs, **fftw_info) if sys.platform != 'win32': kwargs['libraries'].extend(['m']) # BLAS / Lapack info lapack_info = get_info('lapack_opt', notfound_action=2) blas_info = get_info('blas_opt', notfound_action=2) dict_append(kwargs, **blas_info) dict_append(kwargs, **lapack_info) if sys.platform == 'win32': kwargs['runtime_library_dirs'] = [] config.add_extension( 'pyshtools._SHTOOLS', sources=['src/pyshtools.pyf', 'src/PythonWrapper.f95'], **kwargs) return config
import setuptools from numpy.distutils.core import setup from numpy.distutils.misc_util import Configuration sources = [ 'src/basis.f90', 'src/evaluate.f90', 'src/jacobian.f90', 'src/knots.f90', 'src/paramuni.f90', ] config = Configuration('MBI') config.add_extension( 'MBIlib', sources=sources) #, extra_compile_args=['--fcompiler=gnu95']) kwds = { 'install_requires': ['numpy', 'scipy'], 'version': '0.1', 'zip_safe': False, 'license': 'LGPL', 'packages': ['MBI'], 'script_args': ['build', '--fcompiler=gnu95', 'install'] } kwds.update(config.todict()) setup(**kwds)
def configuration(parent_package='', top_path=None): from numpy.distutils.system_info import get_info, NotFoundError from numpy.distutils.misc_util import Configuration from interface_gen import generate_interface config = Configuration('linalg', parent_package, top_path) lapack_opt = get_info('lapack_opt') if not lapack_opt: raise NotFoundError('no lapack/blas resources found') atlas_version = ([v[3:-3] for k,v in lapack_opt.get('define_macros',[]) \ if k=='ATLAS_INFO']+[None])[0] if atlas_version: print('ATLAS version: %s' % atlas_version) target_dir = '' skip_names = {'clapack': [], 'flapack': [], 'cblas': [], 'fblas': []} if skip_single_routines: target_dir = 'dbl' skip_names['clapack'].extend(\ 'sgesv cgesv sgetrf cgetrf sgetrs cgetrs sgetri cgetri'\ ' sposv cposv spotrf cpotrf spotrs cpotrs spotri cpotri'\ ' slauum clauum strtri ctrtri'.split()) skip_names['flapack'].extend(skip_names['clapack']) skip_names['flapack'].extend(\ 'sgesdd cgesdd sgelss cgelss sgeqrf cgeqrf sgeev cgeev'\ ' sgegv cgegv ssyev cheev slaswp claswp sgees cgees' ' sggev cggev'.split()) skip_names['cblas'].extend('saxpy caxpy'.split()) skip_names['fblas'].extend(skip_names['cblas']) skip_names['fblas'].extend(\ 'srotg crotg srotmg srot csrot srotm sswap cswap sscal cscal'\ ' csscal scopy ccopy sdot cdotu cdotc snrm2 scnrm2 sasum scasum'\ ' isamax icamax sgemv cgemv chemv ssymv strmv ctrmv'\ ' sgemm cgemm'.split()) if using_lapack_blas: target_dir = join(target_dir, 'blas') skip_names['fblas'].extend(\ 'drotmg srotmg drotm srotm'.split()) if atlas_version == '3.2.1_pre3.3.6': target_dir = join(target_dir, 'atlas321') skip_names['clapack'].extend(\ 'sgetri dgetri cgetri zgetri spotri dpotri cpotri zpotri'\ ' slauum dlauum clauum zlauum strtri dtrtri ctrtri ztrtri'.split()) elif atlas_version and atlas_version > '3.4.0' and atlas_version <= '3.5.12': skip_names['clapack'].extend('cpotrf zpotrf'.split()) def generate_pyf(extension, build_dir): name = extension.name.split('.')[-1] target = join(build_dir, target_dir, name + '.pyf') if name[0] == 'c' and atlas_version is None and newer( __file__, target): f = open(target, 'w') f.write('python module ' + name + '\n') f.write('usercode void empty_module(void) {}\n') f.write('interface\n') f.write('subroutine empty_module()\n') f.write('intent(c) empty_module\n') f.write('end subroutine empty_module\n') f.write('end interface\nend python module' + name + '\n') f.close() return target if newer_group(extension.depends, target): generate_interface(name, extension.depends[0], target, skip_names[name]) return target # fblas: if needs_cblas_wrapper(lapack_opt): sources = ['fblas.pyf.src', join('src', 'fblaswrap_veclib_c.c')], else: sources = ['fblas.pyf.src', join('src', 'fblaswrap.f')] # Note: `depends` needs to include fblaswrap(_veclib) for both files to be # included by "python setup.py sdist" config.add_extension('fblas', sources=sources, depends=[ 'fblas_l?.pyf.src', join('src', 'fblaswrap_veclib_c.c'), join('src', 'fblaswrap.f') ], extra_info=lapack_opt) # cblas: config.add_extension('cblas', sources=[generate_pyf], depends=[ 'generic_cblas.pyf', 'generic_cblas1.pyf', 'interface_gen.py' ], extra_info=lapack_opt) # flapack: config.add_extension('flapack', sources=['flapack.pyf.src'], depends=['flapack_user.pyf.src'], extra_info=lapack_opt) # clapack: config.add_extension('clapack', sources=[generate_pyf], depends=['generic_clapack.pyf', 'interface_gen.py'], extra_info=lapack_opt) # _flinalg: config.add_extension('_flinalg', sources=[join('src', 'det.f'), join('src', 'lu.f')], extra_info=lapack_opt) # calc_lwork: config.add_extension('calc_lwork', [join('src', 'calc_lwork.f')], extra_info=lapack_opt) # atlas_version: if os.name == 'nt' and 'FPATH' in os.environ: define_macros = [('NO_ATLAS_INFO', 1)] else: define_macros = [] config.add_extension('atlas_version', ['atlas_version.c'], extra_info=lapack_opt, define_macros=define_macros) config.add_data_dir('tests') config.add_data_dir('benchmarks') return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info config = Configuration('optimize',parent_package, top_path) minpack_src = [join('minpack','*f')] config.add_library('minpack',sources=minpack_src) config.add_extension('_minpack', sources=['_minpackmodule.c'], libraries=['minpack'], depends=(["minpack.h","__minpack.h"] + minpack_src), **numpy_nodepr_api) rootfind_src = [join('Zeros','*.c')] rootfind_hdr = [join('Zeros','zeros.h')] config.add_library('rootfind', sources=rootfind_src, headers=rootfind_hdr, **numpy_nodepr_api) config.add_extension('_zeros', sources=['zeros.c'], libraries=['rootfind'], depends=(rootfind_src + rootfind_hdr), **numpy_nodepr_api) lapack = get_info('lapack_opt') if 'define_macros' in numpy_nodepr_api: if ('define_macros' in lapack) and (lapack['define_macros'] is not None): lapack['define_macros'] = (lapack['define_macros'] + numpy_nodepr_api['define_macros']) else: lapack['define_macros'] = numpy_nodepr_api['define_macros'] sources = ['lbfgsb.pyf', 'lbfgsb.f', 'linpack.f', 'timer.f'] config.add_extension('_lbfgsb', sources=[join('lbfgsb',x) for x in sources], **lapack) sources = ['moduleTNC.c','tnc.c'] config.add_extension('moduleTNC', sources=[join('tnc',x) for x in sources], depends=[join('tnc','tnc.h')], **numpy_nodepr_api) config.add_extension('_cobyla', sources=[join('cobyla',x) for x in ['cobyla.pyf', 'cobyla2.f', 'trstlp.f']], **numpy_nodepr_api) sources = ['minpack2.pyf', 'dcsrch.f', 'dcstep.f'] config.add_extension('minpack2', sources=[join('minpack2',x) for x in sources], **numpy_nodepr_api) sources = ['slsqp.pyf', 'slsqp_optmz.f'] config.add_extension('_slsqp', sources=[join('slsqp', x) for x in sources], **numpy_nodepr_api) config.add_extension('_nnls', sources=[join('nnls', x) for x in ["nnls.f","nnls.pyf"]], **numpy_nodepr_api) config.add_extension('_group_columns', sources=['_group_columns.c'],) config.add_subpackage('_lsq') config.add_data_dir('tests') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from numpy.distutils.misc_util import get_info as get_misc_info from scipy._build_utils.system_info import get_info from scipy._build_utils import combine_dict, uses_blas64, numpy_nodepr_api from scipy._build_utils.compiler_helper import set_cxx_flags_hook from distutils.sysconfig import get_python_inc import pybind11 config = Configuration('spatial', parent_package, top_path) config.add_data_dir('tests') # spatial.transform config.add_subpackage('transform') # qhull qhull_src = sorted( glob.glob(join(dirname(__file__), 'qhull_src', 'src', '*.c'))) inc_dirs = [get_python_inc()] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.append(get_numpy_include_dirs()) inc_dirs.append(join(dirname(dirname(__file__)), '_lib')) inc_dirs.append(join(dirname(dirname(__file__)), '_build_utils', 'src')) if uses_blas64(): lapack_opt = get_info('lapack_ilp64_opt') else: lapack_opt = get_info('lapack_opt') cfg = combine_dict(lapack_opt, include_dirs=inc_dirs) config.add_extension('_qhull', sources=['_qhull.c', 'qhull_misc.c'] + qhull_src, **cfg) # cKDTree ckdtree_src = [ 'query.cxx', 'build.cxx', 'query_pairs.cxx', 'count_neighbors.cxx', 'query_ball_point.cxx', 'query_ball_tree.cxx', 'sparse_distances.cxx' ] ckdtree_src = [join('ckdtree', 'src', x) for x in ckdtree_src] ckdtree_headers = [ 'ckdtree_decl.h', 'coo_entries.h', 'distance_base.h', 'distance.h', 'ordered_pair.h', 'rectangle.h' ] ckdtree_headers = [join('ckdtree', 'src', x) for x in ckdtree_headers] ckdtree_dep = ['_ckdtree.cxx'] + ckdtree_headers + ckdtree_src ext = config.add_extension('_ckdtree', sources=['_ckdtree.cxx'] + ckdtree_src, depends=ckdtree_dep, include_dirs=inc_dirs + [join('ckdtree', 'src')]) ext._pre_build_hook = set_cxx_flags_hook # _distance_wrap config.add_extension('_distance_wrap', sources=[join('src', 'distance_wrap.c')], depends=[join('src', 'distance_impl.h')], include_dirs=[ get_numpy_include_dirs(), join(dirname(dirname(__file__)), '_lib') ], extra_info=get_misc_info("npymath"), **numpy_nodepr_api) distance_pybind_includes = [ pybind11.get_include(True), pybind11.get_include(False), get_numpy_include_dirs() ] ext = config.add_extension('_distance_pybind', sources=[join('src', 'distance_pybind.cpp')], depends=[ join('src', 'function_ref.h'), join('src', 'views.h'), join('src', 'distance_metrics.h') ], include_dirs=distance_pybind_includes, language='c++', **numpy_nodepr_api) ext._pre_build_hook = pre_build_hook config.add_extension('_voronoi', sources=['_voronoi.c']) config.add_extension('_hausdorff', sources=['_hausdorff.c']) # Add license files config.add_data_files('qhull_src/COPYING.txt') # Type stubs config.add_data_files('*.pyi') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info as get_system_info config = Configuration('special', parent_package, top_path) define_macros = [] if sys.platform == 'win32': # define_macros.append(('NOINFINITIES',None)) # define_macros.append(('NONANS',None)) define_macros.append(('_USE_MATH_DEFINES', None)) curdir = os.path.abspath(os.path.dirname(__file__)) inc_dirs = [get_python_inc(), os.path.join(curdir, "c_misc")] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.insert(0, get_numpy_include_dirs()) # C libraries c_misc_src = [join('c_misc', '*.c')] c_misc_hdr = [join('c_misc', '*.h')] cephes_src = [join('cephes', '*.c')] cephes_hdr = [join('cephes', '*.h')] config.add_library('sc_c_misc', sources=c_misc_src, include_dirs=[curdir] + inc_dirs, depends=(cephes_hdr + cephes_src + c_misc_hdr + cephes_hdr + ['*.h']), macros=define_macros) config.add_library('sc_cephes', sources=cephes_src, include_dirs=[curdir] + inc_dirs, depends=(cephes_hdr + ['*.h']), macros=define_macros) # Fortran/C++ libraries mach_src = [join('mach', '*.f')] amos_src = [join('amos', '*.f')] cdf_src = [join('cdflib', '*.f')] specfun_src = [join('specfun', '*.f')] config.add_library('sc_mach', sources=mach_src, config_fc={'noopt': (__file__, 1)}) config.add_library('sc_amos', sources=amos_src) config.add_library('sc_cdf', sources=cdf_src) config.add_library('sc_specfun', sources=specfun_src) # Extension specfun config.add_extension('specfun', sources=['specfun.pyf'], f2py_options=['--no-wrap-functions'], depends=specfun_src, define_macros=[], libraries=['sc_specfun']) # Extension _ufuncs headers = ['*.h', join('c_misc', '*.h'), join('cephes', '*.h')] ufuncs_src = [ '_ufuncs.c', 'sf_error.c', '_logit.c.src', "amos_wrappers.c", "cdf_wrappers.c", "specfun_wrappers.c" ] ufuncs_dep = (headers + ufuncs_src + amos_src + c_misc_src + cephes_src + mach_src + cdf_src + specfun_src) cfg = dict(get_system_info('lapack_opt')) cfg.setdefault('include_dirs', []).extend([curdir] + inc_dirs + [numpy.get_include()]) cfg.setdefault('libraries', []).extend([ 'sc_amos', 'sc_c_misc', 'sc_cephes', 'sc_mach', 'sc_cdf', 'sc_specfun' ]) cfg.setdefault('define_macros', []).extend(define_macros) config.add_extension('_ufuncs', depends=ufuncs_dep, sources=ufuncs_src, extra_info=get_info("npymath"), **cfg) # Extension _ufuncs_cxx ufuncs_cxx_src = [ '_ufuncs_cxx.cxx', 'sf_error.c', '_faddeeva.cxx', 'Faddeeva.cc', '_wright.cxx', 'wright.cc' ] ufuncs_cxx_dep = (headers + ufuncs_cxx_src + cephes_src + ['*.hh']) config.add_extension('_ufuncs_cxx', sources=ufuncs_cxx_src, depends=ufuncs_cxx_dep, include_dirs=[curdir], define_macros=define_macros, extra_info=get_info("npymath")) cfg = dict(get_system_info('lapack_opt')) config.add_extension('_ellip_harm_2', sources=[ '_ellip_harm_2.c', 'sf_error.c', ], **cfg) # Cython API config.add_data_files('cython_special.pxd') cython_special_src = [ 'cython_special.c', 'sf_error.c', '_logit.c.src', "amos_wrappers.c", "cdf_wrappers.c", "specfun_wrappers.c" ] cython_special_dep = (headers + ufuncs_src + ufuncs_cxx_src + amos_src + c_misc_src + cephes_src + mach_src + cdf_src + specfun_src) cfg = dict(get_system_info('lapack_opt')) cfg.setdefault('include_dirs', []).extend([curdir] + inc_dirs + [numpy.get_include()]) cfg.setdefault('libraries', []).extend([ 'sc_amos', 'sc_c_misc', 'sc_cephes', 'sc_mach', 'sc_cdf', 'sc_specfun' ]) cfg.setdefault('define_macros', []).extend(define_macros) config.add_extension('cython_special', depends=cython_special_dep, sources=cython_special_src, extra_info=get_info("npymath"), **cfg) # combinatorics config.add_extension('_comb', sources=['_comb.c']) # testing for _round.h config.add_extension('_test_round', sources=['_test_round.c'], depends=['_round.h', 'c_misc/double2.h'], include_dirs=[numpy.get_include()], extra_info=get_info('npymath')) config.add_data_files('tests/*.py') config.add_data_files('tests/data/README') config.add_data_files('tests/data/*.npz') config.add_subpackage('_precompute') return config
def configuration(parent_package='', top_path=None): import numpy from numpy.distutils.misc_util import Configuration config = Configuration('csgraph', parent_package, top_path) config.add_data_dir('tests') config.add_extension('_shortest_path', sources=['_shortest_path.c'], include_dirs=[numpy.get_include()]) config.add_extension('_traversal', sources=['_traversal.c'], include_dirs=[numpy.get_include()]) config.add_extension('_min_spanning_tree', sources=['_min_spanning_tree.c'], include_dirs=[numpy.get_include()]) config.add_extension('_matching', sources=['_matching.c'], include_dirs=[numpy.get_include()]) config.add_extension('_flow', sources=['_flow.c'], include_dirs=[numpy.get_include()]) config.add_extension('_reordering', sources=['_reordering.c'], include_dirs=[numpy.get_include()]) config.add_extension('_tools', sources=['_tools.c'], include_dirs=[numpy.get_include()]) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, dot_join from numpy.distutils.system_info import get_info, dict_append config = Configuration('core', parent_package, top_path) local_dir = config.local_path codegen_dir = join(local_dir, 'code_generators') if is_released(config): warnings.simplefilter('error', MismatchCAPIWarning) # Check whether we have a mismatch between the set C API VERSION and the # actual C API VERSION check_api_version(C_API_VERSION, codegen_dir) generate_umath_py = join(codegen_dir, 'generate_umath.py') n = dot_join(config.name, 'generate_umath') generate_umath = npy_load_module('_'.join(n.split('.')), generate_umath_py, ('.py', 'U', 1)) header_dir = 'include/numpy' # this is relative to config.path_in_package cocache = CallOnceOnly() def generate_config_h(ext, build_dir): target = join(build_dir, header_dir, 'config.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir) # Check math library and C99 math funcs availability mathlibs = check_mathlib(config_cmd) moredefs.append(('MATHLIB', ','.join(mathlibs))) check_math_capabilities(config_cmd, moredefs, mathlibs) moredefs.extend(cocache.check_ieee_macros(config_cmd)[0]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0]) # Signal check if is_npy_no_signal(): moredefs.append('__NPY_PRIVATE_NO_SIGNAL') # Windows checks if sys.platform == 'win32' or os.name == 'nt': win32_checks(moredefs) # C99 restrict keyword moredefs.append(('NPY_RESTRICT', config_cmd.check_restrict())) # Inline check inline = config_cmd.check_inline() # Use relaxed stride checking if NPY_RELAXED_STRIDES_CHECKING: moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1)) # Use bogus stride debug aid when relaxed strides are enabled if NPY_RELAXED_STRIDES_DEBUG: moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1)) # Get long double representation rep = check_long_double_representation(config_cmd) moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1)) if check_for_right_shift_internal_compiler_error(config_cmd): moredefs.append('NPY_DO_NOT_OPTIMIZE_LONG_right_shift') moredefs.append('NPY_DO_NOT_OPTIMIZE_ULONG_right_shift') moredefs.append('NPY_DO_NOT_OPTIMIZE_LONGLONG_right_shift') moredefs.append('NPY_DO_NOT_OPTIMIZE_ULONGLONG_right_shift') # Generate the config.h file from moredefs with open(target, 'w') as target_f: for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # define inline to our keyword, or nothing target_f.write('#ifndef __cplusplus\n') if inline == 'inline': target_f.write('/* #undef inline */\n') else: target_f.write('#define inline %s\n' % inline) target_f.write('#endif\n') # add the guard to make sure config.h is never included directly, # but always through npy_config.h target_f.write( textwrap.dedent(""" #ifndef _NPY_NPY_CONFIG_H_ #error config.h should never be included directly, include npy_config.h instead #endif """)) log.info('File: %s' % target) with open(target) as target_f: log.info(target_f.read()) log.info('EOF') else: mathlibs = [] with open(target) as target_f: for line in target_f: s = '#define MATHLIB' if line.startswith(s): value = line[len(s):].strip() if value: mathlibs.extend(value.split(',')) # Ugly: this can be called within a library and not an extension, # in which case there is no libraries attributes (and none is # needed). if hasattr(ext, 'libraries'): ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) return target def generate_numpyconfig_h(ext, build_dir): """Depends on config.h: generate_config_h has to be called before !""" # put common include directory in build_dir on search path # allows using code generation in headers headers config.add_include_dirs(join(build_dir, "src", "common")) config.add_include_dirs(join(build_dir, "src", "npymath")) target = join(build_dir, header_dir, '_numpyconfig.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir) if is_npy_no_signal(): moredefs.append(('NPY_NO_SIGNAL', 1)) if is_npy_no_smp(): moredefs.append(('NPY_NO_SMP', 1)) else: moredefs.append(('NPY_NO_SMP', 0)) mathlibs = check_mathlib(config_cmd) moredefs.extend(cocache.check_ieee_macros(config_cmd)[1]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1]) if NPY_RELAXED_STRIDES_CHECKING: moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1)) if NPY_RELAXED_STRIDES_DEBUG: moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1)) # Check whether we can use inttypes (C99) formats if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']): moredefs.append(('NPY_USE_C99_FORMATS', 1)) # visibility check hidden_visibility = visibility_define(config_cmd) moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility)) # Add the C API/ABI versions moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION)) moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION)) # Add moredefs to header with open(target, 'w') as target_f: for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # Define __STDC_FORMAT_MACROS target_f.write( textwrap.dedent(""" #ifndef __STDC_FORMAT_MACROS #define __STDC_FORMAT_MACROS 1 #endif """)) # Dump the numpyconfig.h header to stdout log.info('File: %s' % target) with open(target) as target_f: log.info(target_f.read()) log.info('EOF') config.add_data_files((header_dir, target)) return target def generate_api_func(module_name): def generate_api(ext, build_dir): script = join(codegen_dir, module_name + '.py') sys.path.insert(0, codegen_dir) try: m = __import__(module_name) log.info('executing %s', script) h_file, c_file, doc_file = m.generate_api( os.path.join(build_dir, header_dir)) finally: del sys.path[0] config.add_data_files((header_dir, h_file), (header_dir, doc_file)) return (h_file, ) return generate_api generate_numpy_api = generate_api_func('generate_numpy_api') generate_ufunc_api = generate_api_func('generate_ufunc_api') config.add_include_dirs(join(local_dir, "src", "common")) config.add_include_dirs(join(local_dir, "src")) config.add_include_dirs(join(local_dir)) config.add_data_dir('include/numpy') config.add_include_dirs(join('src', 'npymath')) config.add_include_dirs(join('src', 'multiarray')) config.add_include_dirs(join('src', 'umath')) config.add_include_dirs(join('src', 'npysort')) config.add_define_macros([ ("NPY_INTERNAL_BUILD", "1") ]) # this macro indicates that Numpy build is in process config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")]) if sys.platform[:3] == "aix": config.add_define_macros([("_LARGE_FILES", None)]) else: config.add_define_macros([("_FILE_OFFSET_BITS", "64")]) config.add_define_macros([('_LARGEFILE_SOURCE', '1')]) config.add_define_macros([('_LARGEFILE64_SOURCE', '1')]) config.numpy_include_dirs.extend(config.paths('include')) deps = [ join('src', 'npymath', '_signbit.c'), join('include', 'numpy', '*object.h'), join(codegen_dir, 'genapi.py'), ] ####################################################################### # npymath library # ####################################################################### subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")]) def get_mathlib_info(*args): # Another ugly hack: the mathlib info is known once build_src is run, # but we cannot use add_installed_pkg_config here either, so we only # update the substitution dictionary during npymath build config_cmd = config.get_config_cmd() # Check that the toolchain works, to fail early if it doesn't # (avoid late errors with MATHLIB which are confusing if the # compiler does not work). st = config_cmd.try_link('int main(void) { return 0;}') if not st: # rerun the failing command in verbose mode config_cmd.compiler.verbose = True config_cmd.try_link('int main(void) { return 0;}') raise RuntimeError( "Broken toolchain: cannot link a simple C program") mlibs = check_mathlib(config_cmd) posix_mlib = ' '.join(['-l%s' % l for l in mlibs]) msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs]) subst_dict["posix_mathlib"] = posix_mlib subst_dict["msvc_mathlib"] = msvc_mlib npymath_sources = [ join('src', 'npymath', 'npy_math_internal.h.src'), join('src', 'npymath', 'npy_math.c'), join('src', 'npymath', 'ieee754.c.src'), join('src', 'npymath', 'npy_math_complex.c.src'), join('src', 'npymath', 'halffloat.c') ] # Must be true for CRT compilers but not MinGW/cygwin. See gh-9977. # Intel and Clang also don't seem happy with /GL is_msvc = (platform.platform().startswith('Windows') and platform.python_compiler().startswith('MS')) config.add_installed_library( 'npymath', sources=npymath_sources + [get_mathlib_info], install_dir='lib', build_info={ 'include_dirs': [], # empty list required for creating npy_math_internal.h 'extra_compiler_args': (['/GL-'] if is_msvc else []), }) config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config", subst_dict) config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config", subst_dict) ####################################################################### # npysort library # ####################################################################### # This library is created for the build but it is not installed npysort_sources = [ join('src', 'common', 'npy_sort.h.src'), join('src', 'npysort', 'quicksort.c.src'), join('src', 'npysort', 'mergesort.c.src'), join('src', 'npysort', 'timsort.c.src'), join('src', 'npysort', 'heapsort.c.src'), join('src', 'npysort', 'radixsort.c.src'), join('src', 'common', 'npy_partition.h.src'), join('src', 'npysort', 'selection.c.src'), join('src', 'common', 'npy_binsearch.h.src'), join('src', 'npysort', 'binsearch.c.src'), ] config.add_library('npysort', sources=npysort_sources, include_dirs=[]) ####################################################################### # multiarray_tests module # ####################################################################### config.add_extension('_multiarray_tests', sources=[ join('src', 'multiarray', '_multiarray_tests.c.src'), join('src', 'common', 'mem_overlap.c') ], depends=[ join('src', 'common', 'mem_overlap.h'), join('src', 'common', 'npy_extint128.h') ], libraries=['npymath']) ####################################################################### # _multiarray_umath module - common part # ####################################################################### common_deps = [ join('src', 'common', 'array_assign.h'), join('src', 'common', 'binop_override.h'), join('src', 'common', 'cblasfuncs.h'), join('src', 'common', 'lowlevel_strided_loops.h'), join('src', 'common', 'mem_overlap.h'), join('src', 'common', 'npy_cblas.h'), join('src', 'common', 'npy_config.h'), join('src', 'common', 'npy_ctypes.h'), join('src', 'common', 'npy_extint128.h'), join('src', 'common', 'npy_import.h'), join('src', 'common', 'npy_longdouble.h'), join('src', 'common', 'templ_common.h.src'), join('src', 'common', 'ucsnarrow.h'), join('src', 'common', 'ufunc_override.h'), join('src', 'common', 'umathmodule.h'), join('src', 'common', 'numpyos.h'), ] common_src = [ join('src', 'common', 'array_assign.c'), join('src', 'common', 'mem_overlap.c'), join('src', 'common', 'npy_longdouble.c'), join('src', 'common', 'templ_common.h.src'), join('src', 'common', 'ucsnarrow.c'), join('src', 'common', 'ufunc_override.c'), join('src', 'common', 'numpyos.c'), ] if os.environ.get('NPY_USE_BLAS_ILP64', "0") != "0": blas_info = get_info('blas_ilp64_opt', 2) else: blas_info = get_info('blas_opt', 0) have_blas = blas_info and ('HAVE_CBLAS', None) in blas_info.get( 'define_macros', []) if have_blas: extra_info = blas_info # These files are also in MANIFEST.in so that they are always in # the source distribution independently of HAVE_CBLAS. common_src.extend([ join('src', 'common', 'cblasfuncs.c'), join('src', 'common', 'python_xerbla.c'), ]) if uses_accelerate_framework(blas_info): common_src.extend(get_sgemv_fix()) else: extra_info = {} ####################################################################### # _multiarray_umath module - multiarray part # ####################################################################### multiarray_deps = [ join('src', 'multiarray', 'arrayobject.h'), join('src', 'multiarray', 'arraytypes.h'), join('src', 'multiarray', 'arrayfunction_override.h'), join('src', 'multiarray', 'npy_buffer.h'), join('src', 'multiarray', 'calculation.h'), join('src', 'multiarray', 'common.h'), join('src', 'multiarray', 'convert_datatype.h'), join('src', 'multiarray', 'convert.h'), join('src', 'multiarray', 'conversion_utils.h'), join('src', 'multiarray', 'ctors.h'), join('src', 'multiarray', 'descriptor.h'), join('src', 'multiarray', 'dragon4.h'), join('src', 'multiarray', 'getset.h'), join('src', 'multiarray', 'hashdescr.h'), join('src', 'multiarray', 'iterators.h'), join('src', 'multiarray', 'mapping.h'), join('src', 'multiarray', 'methods.h'), join('src', 'multiarray', 'multiarraymodule.h'), join('src', 'multiarray', 'nditer_impl.h'), join('src', 'multiarray', 'number.h'), join('src', 'multiarray', 'refcount.h'), join('src', 'multiarray', 'scalartypes.h'), join('src', 'multiarray', 'sequence.h'), join('src', 'multiarray', 'shape.h'), join('src', 'multiarray', 'strfuncs.h'), join('src', 'multiarray', 'typeinfo.h'), join('src', 'multiarray', 'usertypes.h'), join('src', 'multiarray', 'vdot.h'), join('include', 'numpy', 'arrayobject.h'), join('include', 'numpy', '_neighborhood_iterator_imp.h'), join('include', 'numpy', 'npy_endian.h'), join('include', 'numpy', 'arrayscalars.h'), join('include', 'numpy', 'noprefix.h'), join('include', 'numpy', 'npy_interrupt.h'), join('include', 'numpy', 'npy_3kcompat.h'), join('include', 'numpy', 'npy_math.h'), join('include', 'numpy', 'halffloat.h'), join('include', 'numpy', 'npy_common.h'), join('include', 'numpy', 'npy_os.h'), join('include', 'numpy', 'utils.h'), join('include', 'numpy', 'ndarrayobject.h'), join('include', 'numpy', 'npy_cpu.h'), join('include', 'numpy', 'numpyconfig.h'), join('include', 'numpy', 'ndarraytypes.h'), join('include', 'numpy', 'npy_1_7_deprecated_api.h'), # add library sources as distuils does not consider libraries # dependencies ] + npysort_sources + npymath_sources multiarray_src = [ join('src', 'multiarray', 'alloc.c'), join('src', 'multiarray', 'arrayobject.c'), join('src', 'multiarray', 'arraytypes.c.src'), join('src', 'multiarray', 'array_assign_scalar.c'), join('src', 'multiarray', 'array_assign_array.c'), join('src', 'multiarray', 'arrayfunction_override.c'), join('src', 'multiarray', 'buffer.c'), join('src', 'multiarray', 'calculation.c'), join('src', 'multiarray', 'compiled_base.c'), join('src', 'multiarray', 'common.c'), join('src', 'multiarray', 'convert.c'), join('src', 'multiarray', 'convert_datatype.c'), join('src', 'multiarray', 'conversion_utils.c'), join('src', 'multiarray', 'ctors.c'), join('src', 'multiarray', 'datetime.c'), join('src', 'multiarray', 'datetime_strings.c'), join('src', 'multiarray', 'datetime_busday.c'), join('src', 'multiarray', 'datetime_busdaycal.c'), join('src', 'multiarray', 'descriptor.c'), join('src', 'multiarray', 'dragon4.c'), join('src', 'multiarray', 'dtype_transfer.c'), join('src', 'multiarray', 'einsum.c.src'), join('src', 'multiarray', 'flagsobject.c'), join('src', 'multiarray', 'getset.c'), join('src', 'multiarray', 'hashdescr.c'), join('src', 'multiarray', 'item_selection.c'), join('src', 'multiarray', 'iterators.c'), join('src', 'multiarray', 'lowlevel_strided_loops.c.src'), join('src', 'multiarray', 'mapping.c'), join('src', 'multiarray', 'methods.c'), join('src', 'multiarray', 'multiarraymodule.c'), join('src', 'multiarray', 'nditer_templ.c.src'), join('src', 'multiarray', 'nditer_api.c'), join('src', 'multiarray', 'nditer_constr.c'), join('src', 'multiarray', 'nditer_pywrap.c'), join('src', 'multiarray', 'number.c'), join('src', 'multiarray', 'refcount.c'), join('src', 'multiarray', 'sequence.c'), join('src', 'multiarray', 'shape.c'), join('src', 'multiarray', 'scalarapi.c'), join('src', 'multiarray', 'scalartypes.c.src'), join('src', 'multiarray', 'strfuncs.c'), join('src', 'multiarray', 'temp_elide.c'), join('src', 'multiarray', 'typeinfo.c'), join('src', 'multiarray', 'usertypes.c'), join('src', 'multiarray', 'vdot.c'), ] ####################################################################### # _multiarray_umath module - umath part # ####################################################################### def generate_umath_c(ext, build_dir): target = join(build_dir, header_dir, '__umath_generated.c') dir = os.path.dirname(target) if not os.path.exists(dir): os.makedirs(dir) script = generate_umath_py if newer(script, target): with open(target, 'w') as f: f.write( generate_umath.make_code(generate_umath.defdict, generate_umath.__file__)) return [] umath_src = [ join('src', 'umath', 'umathmodule.c'), join('src', 'umath', 'reduction.c'), join('src', 'umath', 'funcs.inc.src'), join('src', 'umath', 'simd.inc.src'), join('src', 'umath', 'loops.h.src'), join('src', 'umath', 'loops.c.src'), join('src', 'umath', 'matmul.h.src'), join('src', 'umath', 'matmul.c.src'), join('src', 'umath', 'clip.h.src'), join('src', 'umath', 'clip.c.src'), join('src', 'umath', 'ufunc_object.c'), join('src', 'umath', 'extobj.c'), join('src', 'umath', 'cpuid.c'), join('src', 'umath', 'scalarmath.c.src'), join('src', 'umath', 'ufunc_type_resolution.c'), join('src', 'umath', 'override.c'), ] umath_deps = [ generate_umath_py, join('include', 'numpy', 'npy_math.h'), join('include', 'numpy', 'halffloat.h'), join('src', 'multiarray', 'common.h'), join('src', 'multiarray', 'number.h'), join('src', 'common', 'templ_common.h.src'), join('src', 'umath', 'simd.inc.src'), join('src', 'umath', 'override.h'), join(codegen_dir, 'generate_ufunc_api.py'), ] config.add_extension( '_multiarray_umath', sources=multiarray_src + umath_src + npymath_sources + common_src + [ generate_config_h, generate_numpyconfig_h, generate_numpy_api, join(codegen_dir, 'generate_numpy_api.py'), join('*.py'), generate_umath_c, generate_ufunc_api, ], depends=deps + multiarray_deps + umath_deps + common_deps, libraries=['npymath', 'npysort'], extra_info=extra_info) ####################################################################### # umath_tests module # ####################################################################### config.add_extension('_umath_tests', sources=[join('src', 'umath', '_umath_tests.c.src')]) ####################################################################### # custom rational dtype module # ####################################################################### config.add_extension( '_rational_tests', sources=[join('src', 'umath', '_rational_tests.c.src')]) ####################################################################### # struct_ufunc_test module # ####################################################################### config.add_extension( '_struct_ufunc_tests', sources=[join('src', 'umath', '_struct_ufunc_tests.c.src')]) ####################################################################### # operand_flag_tests module # ####################################################################### config.add_extension( '_operand_flag_tests', sources=[join('src', 'umath', '_operand_flag_tests.c.src')]) config.add_data_dir('tests') config.add_data_dir('tests/data') config.make_svn_version_py() return config
def configuration(parent_package='', top_path=None): import numpy from numpy.distutils.misc_util import Configuration config = Configuration('utils', parent_package, top_path) config.add_subpackage('sparsetools') # cd fast needs CBLAS blas_info = get_info('blas_opt', 0) if (not blas_info) or ( ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', [])): cblas_libs = ['cblas'] blas_info.pop('libraries', None) else: cblas_libs = blas_info.pop('libraries', []) config.add_extension('arraybuilder', sources=['arraybuilder.c']) config.add_extension('sparsefuncs', sources=['sparsefuncs.c']) config.add_extension('arrayfuncs', sources=['arrayfuncs.c'], depends=[join('src', 'cholesky_delete.c')], libraries=cblas_libs, include_dirs=[join('..', 'src', 'cblas'), numpy.get_include(), blas_info.pop('include_dirs', [])], extra_compile_args=blas_info.pop('extra_compile_args', []), **blas_info ) config.add_extension( 'murmurhash', sources=['murmurhash.c', join('src', 'MurmurHash3.cpp')], include_dirs=['src']) config.add_extension('graph_shortest_path', sources=['graph_shortest_path.c'], include_dirs=[numpy.get_include()]) config.add_extension('seq_dataset', sources=['seq_dataset.c'], include_dirs=[numpy.get_include()]) config.add_extension('weight_vector', sources=['weight_vector.c'], include_dirs=[numpy.get_include()]) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from numpy.distutils.misc_util import get_info as get_misc_info from numpy.distutils.system_info import get_info as get_sys_info from distutils.sysconfig import get_python_inc config = Configuration('spatial', parent_package, top_path) config.add_data_dir('tests') # qhull qhull_src = ['geom2.c', 'geom.c', 'global.c', 'io.c', 'libqhull.c', 'mem.c', 'merge.c', 'poly2.c', 'poly.c', 'qset.c', 'random.c', 'rboxlib.c', 'stat.c', 'user.c', 'usermem.c', 'userprintf.c', 'userprintf_rbox.c'] qhull_src = [join('qhull', 'src', x) for x in qhull_src] inc_dirs = [get_python_inc()] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.append(get_numpy_include_dirs()) cfg = dict(get_sys_info('lapack_opt')) cfg.setdefault('include_dirs', []).extend(inc_dirs) cfg.setdefault('define_macros', []).append(('qh_QHpointer','1')) config.add_extension('qhull', sources=['qhull.c'] + qhull_src, **cfg) # cKDTree ckdtree_src = ['query.cxx', 'build.cxx', 'globals.cxx', 'cpp_exc.cxx', 'query_pairs.cxx', 'count_neighbors.cxx', 'query_ball_point.cxx', 'query_ball_tree.cxx', 'sparse_distances.cxx'] ckdtree_src = [join('ckdtree', 'src', x) for x in ckdtree_src] ckdtree_headers = ['ckdtree_decl.h', 'cpp_exc.h', 'ckdtree_methods.h', 'cpp_utils.h', 'rectangle.h', 'distance.h', 'distance_box.h', 'ordered_pair.h'] ckdtree_headers = [join('ckdtree', 'src', x) for x in ckdtree_headers] ckdtree_dep = ['ckdtree.cxx'] + ckdtree_headers + ckdtree_src config.add_extension('ckdtree', sources=['ckdtree.cxx'] + ckdtree_src, depends=ckdtree_dep, include_dirs=inc_dirs + [join('ckdtree','src')]) # _distance_wrap config.add_extension('_distance_wrap', sources=[join('src', 'distance_wrap.c')], depends=[join('src', 'distance_impl.h')], include_dirs=[get_numpy_include_dirs()], extra_info=get_misc_info("npymath")) return config