def configuration(parent_package='',top_path=None, package_name=DISTNAME): if os.path.exists('MANIFEST'): os.remove('MANIFEST') write_info(os.path.join("audiolab", "info.py")) write_version(os.path.join("audiolab", "version.py")) # XXX: find a way to include the doc in sdist if os.path.exists(os.path.join("docs", "src")): write_version(os.path.join("docs", "src", "audiolab_version.py")) config = Configuration(None,parent_package,top_path, version=build_fverstring(), maintainer=MAINTAINER, maintainer_email=MAINTAINER_EMAIL, description=DESCRIPTION, license=LICENSE, url=URL, download_url=DOWNLOAD_URL, long_description=LONG_DESCRIPTION) config.set_options( ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=True, ) config.add_subpackage('audiolab') return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('reference', parent_package, top_path) config.add_subpackage('tests') return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info config = Configuration('fit_interpolate', parent_package, top_path) config.add_data_dir('tests') #util_dir = os.path.abspath(join(os.path.dirname(__file__),'..','utilities')) util_dir = join('..','utilities') util_srcs = [join(util_dir,'quad_tree.c'), join(util_dir,'sparse_dok.c'), join(util_dir,'sparse_csr.c')] if sys.platform == 'darwin': extra_args = None else: extra_args = ['-fopenmp'] config.add_extension('fitsmooth', sources=['fitsmooth.c']+util_srcs, include_dirs=[util_dir], extra_compile_args=extra_args, extra_link_args=extra_args) return config
def configuration(parent_package='',top_path=None): config = Configuration('f2py', parent_package, top_path) config.add_data_dir('tests') config.add_data_files('src/fortranobject.c', 'src/fortranobject.h', ) config.make_svn_version_py() def generate_f2py_py(build_dir): f2py_exe = 'f2py'+os.path.basename(sys.executable)[6:] if f2py_exe[-4:]=='.exe': f2py_exe = f2py_exe[:-4] + '.py' if 'bdist_wininst' in sys.argv and f2py_exe[-3:] != '.py': f2py_exe = f2py_exe + '.py' target = os.path.join(build_dir, f2py_exe) if newer(__file__, target): log.info('Creating %s', target) f = open(target, 'w') f.write('#!%s\n' % (sys.executable)) mainloc = os.path.join(os.path.dirname(__file__), "__main__.py") with open(mainloc) as mf: f.write(mf.read()) f.close() return target config.add_scripts(generate_f2py_py) log.info('F2PY Version %s', config.get_version()) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from numpy.distutils.system_info import get_info from distutils.sysconfig import get_python_inc config = Configuration('spatial_016', parent_package, top_path) inc_dirs = [get_python_inc()] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.append(get_numpy_include_dirs()) ckdtree_src = ['ckdtree_query.cxx', 'ckdtree_globals.cxx', 'ckdtree_cpp_exc.cxx'] ckdtree_src = [join('ckdtree', 'src', x) for x in ckdtree_src] ckdtree_headers = ['ckdtree_decl.h', 'ckdtree_exc.h', 'ckdtree_methods.h', 'ckdtree_utils.h'] ckdtree_headers = [join('ckdtree', 'src', x) for x in ckdtree_headers] ckdtree_dep = ['ckdtree.cxx'] + ckdtree_headers + ckdtree_src config.add_extension('ckdtree', sources=[join('ckdtree', 'ckdtree.cxx')] + ckdtree_src, depends=ckdtree_dep, include_dirs=inc_dirs + [join('ckdtree','src')]) return config
def configuration(parent_package='', top_path=None): import os.path as op from numpy.distutils.misc_util import Configuration from sfepy import Config site_config = Config() os_flag = {'posix' : 0, 'windows' : 1} auto_dir = op.dirname(__file__) auto_name = op.split(auto_dir)[-1] config = Configuration(auto_name, parent_package, top_path) defines = [('__SDIR__', "'\"%s\"'" % auto_dir), ('SFEPY_PLATFORM', os_flag[site_config.system()])] if '-DDEBUG_FMF' in site_config.debug_flags(): defines.append(('DEBUG_FMF', None)) fem_src = ['common_python.c'] fem_src = [op.join('../../fem/extmods', ii) for ii in fem_src] src = ['crcm.pyx', 'rcm.c'] config.add_extension('crcm', sources=src + fem_src, extra_compile_args=site_config.compile_flags(), extra_link_args=site_config.link_flags(), include_dirs=[auto_dir, '../../fem/extmods'], define_macros=defines) return config
def configuration(parent_package='',top_path=None): from scipy._build_utils.system_info import get_info, NotFoundError from numpy.distutils.misc_util import Configuration from scipy._build_utils import get_g77_abi_wrappers config = Configuration('isolve',parent_package,top_path) lapack_opt = get_info('lapack_opt') # iterative methods methods = ['BiCGREVCOM.f.src', 'BiCGSTABREVCOM.f.src', 'CGREVCOM.f.src', 'CGSREVCOM.f.src', # 'ChebyREVCOM.f.src', 'GMRESREVCOM.f.src', # 'JacobiREVCOM.f.src', 'QMRREVCOM.f.src', # 'SORREVCOM.f.src' ] Util = ['getbreak.f.src'] sources = Util + methods + ['_iterative.pyf.src'] sources = [join('iterative', x) for x in sources] sources += get_g77_abi_wrappers(lapack_opt) config.add_extension('_iterative', sources=sources, extra_info=lapack_opt) config.add_data_dir('tests') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('model', parent_package, top_path) config.add_subpackage('kernel') return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('',parent_package,top_path) config.add_extension('fortran_routines', sources = ['src/fortran/fortran_routines.f95']) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('testing', parent_package, top_path) config.add_subpackage('_private') config.add_data_dir('tests') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('',parent_package, top_path) config.add_extension('slicot', libraries=['lapack'], sources=['src/slicot.pyf'] + glob('src/*.f')) return config
def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration("", parent_package, top_path) config.add_extension("c_library", sources=["c_library.pyf", "c_functions.c"]) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration("wmap5Wrapper", parent_package, top_path, namespace_packages = ['wmap5Wrapper'], version='0.3.0', author = 'Joel Akeret', author_email="*****@*****.**", description = "perform the wmap likelihood computation", url = "http://www.fhnw.ch", long_description = desc) config.add_extension('_wmapWrapper', sources=[ 'likelihood_v3/read_archive_map.f90', 'likelihood_v3/read_fits.f90', 'likelihood_v3/healpix_types.f90', 'likelihood_v3/br_mod_dist.f90', 'likelihood_v3/WMAP_5yr_options.F90', 'likelihood_v3/WMAP_5yr_util.f90', 'likelihood_v3/WMAP_5yr_gibbs.F90', 'likelihood_v3/WMAP_5yr_tt_pixlike.F90', 'likelihood_v3/WMAP_5yr_tt_beam_ptsrc_chisq.f90', 'likelihood_v3/WMAP_5yr_teeebb_pixlike.F90', 'likelihood_v3/WMAP_5yr_likelihood.F90', 'source/WmapWrapperCore.f90', 'source/WmapWrapper.f90', '_wmapWrapper.pyf'] , include_dirs = include_dirs, library_dirs = library_dirs, libraries = libraries, extra_f90_compile_args=extra_f90_compile_args, extra_link_args=extra_link_args ) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('pyrex_ext',parent_package,top_path) config.add_extension('primes', ['primes.pyx']) config.add_data_dir('tests') return config
def configuration(parent_package='',top_path=None): import numpy from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) #config.add_extension('__umfpack', # sources=[umfpack_i], # depends=['umfpack.i'], # **build_info) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('externals',parent_package,top_path) config.add_subpackage('joblib') config.add_subpackage('joblib/test') return config
def configuration(parent_package='', top_path=None): import os.path as op from numpy.distutils.misc_util import Configuration from sfepy import Config site_config = Config() system = site_config.system() os_flag = {'posix' : 0, 'windows' : 1}[system] auto_dir = op.dirname(__file__) auto_name = op.split(auto_dir)[-1] config = Configuration(auto_name, parent_package, top_path) inline = 'inline' if system == 'posix' else '__inline' defines = [('SFEPY_PLATFORM', os_flag), ('inline', inline)] if '-DDEBUG_FMF' in site_config.debug_flags(): defines.append(('DEBUG_FMF', None)) common_path = '../../common/extmods' common_src = ['fmfield.c', 'geommech.c', 'common_python.c'] common_src = [op.join(common_path, ii) for ii in common_src] src = ['igac.pyx', 'nurbs.c'] config.add_extension('igac', sources=src + common_src, depends=common_src, extra_compile_args=site_config.compile_flags(), extra_link_args=site_config.link_flags(), include_dirs=[auto_dir, common_path], define_macros=defines) return config
def configuration(parent_package="", top_path=None): config = Configuration("pyxit", parent_package, top_path) config.add_extension("_estimator", sources=["_estimator.c"], include_dirs=[numpy.get_include()]) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('umfpack', parent_package, top_path) config.add_sconscript('SConstruct') config.add_data_dir('tests') # umf_info = get_info( 'umfpack', notfound_action = 1 ) # # umfpack_i_file = config.paths('umfpack.i')[0] # def umfpack_i(ext, build_dir): # if umf_info: # return umfpack_i_file # # blas_info = get_info('blas_opt') # build_info = {} # dict_append(build_info, **umf_info) # dict_append(build_info, **blas_info) # # config.add_extension( '__umfpack', # sources = [umfpack_i], # depends = ['umfpack.i'], # **build_info) # return config
def configuration(parent_package='',top_path=None): from numpy.distutils.system_info import get_info, NotFoundError from numpy.distutils.misc_util import Configuration config = Configuration('isolve',parent_package,top_path) lapack_opt = get_info('lapack_opt') if not lapack_opt: raise NotFoundError('no lapack/blas resources found') # iterative methods methods = ['BiCGREVCOM.f.src', 'BiCGSTABREVCOM.f.src', 'CGREVCOM.f.src', 'CGSREVCOM.f.src', # 'ChebyREVCOM.f.src', 'GMRESREVCOM.f.src', # 'JacobiREVCOM.f.src', 'QMRREVCOM.f.src', # 'SORREVCOM.f.src' ] Util = ['STOPTEST2.f.src','getbreak.f.src'] sources = Util + methods + ['_iterative.pyf.src'] config.add_extension('_iterative', sources = [join('iterative',x) for x in sources], extra_info = lapack_opt ) config.add_data_dir('tests') return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('statistics', parent_package, top_path) config.add_data_dir('tests') config.add_extension('intvol', 'intvol.pyx', include_dirs = [np.get_include()]) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration cblas_libs, blas_info = get_blas_info() libraries = [] if os.name == 'posix': cblas_libs.append('m') libraries.append('m') config = Configuration('cluster', parent_package, top_path) config.add_extension('_hierarchical', sources=['_hierarchical.c'], include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension( '_k_means', libraries=cblas_libs, sources=['_k_means.c'], include_dirs=[join('..', 'src', 'cblas'), numpy.get_include(), blas_info.pop('include_dirs', [])], extra_compile_args=blas_info.pop('extra_compile_args', []), **blas_info ) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('gufunc_sampler', parent_package, top_path) config.add_extension('_gs_kernels', ['_gs_kernels.c.src']) return config
def configuration(parent_package="", top_path=None): import numpy from numpy.distutils.misc_util import Configuration config = Configuration("vista_directory", parent_package, top_path) config.add_extension("vista", ["vista.c"]) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('units', parent_package, top_path) config.add_subpackage("tests") config.make_config_py() # installs __config__.py #config.make_svn_version_py() return config
def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration("f2py_ext", parent_package, top_path) config.add_extension("fib2", ["src/fib2.pyf", "src/fib1.f"]) config.add_data_dir("tests") return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration import os.path as op auto_name = op.split(op.dirname(__file__))[-1] config = Configuration(auto_name, parent_package, top_path) subdirs = [ 'applications', 'base', 'discrete', 'mesh', 'homogenization', 'linalg', 'mechanics', 'parallel', 'physics', 'postprocess', 'solvers', 'terms' ] for subdir in subdirs: config.add_subpackage(subdir) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('',parent_package,top_path) config.add_extension('m', sources = ['m.pyf','foo.c']) return config
def configuration(parent_package='', top_path=None): global config from numpy.distutils.misc_util import Configuration from numpy.distutils.fcompiler import get_default_fcompiler, CompilerNotFound build = True try: # figure out which compiler we're going to use compiler = get_default_fcompiler() # set some fortran compiler-dependent flags f90flags = [] if compiler == 'gnu95': f90flags.append('-fno-range-check') f90flags.append('-ffree-form') f90flags.append('-fPIC') elif compiler == 'intel' or compiler == 'intelem': f90flags.append('-132') # Need zero-level optimization to avoid build problems with rrtmg_lw_k_g.f90 #f90flags.append('-O2') # Suppress all compiler warnings (avoid huge CI log files) f90flags.append('-w') except CompilerNotFound: print('No Fortran compiler found, not building the RRTMG_LW radiation module!') build = False config = Configuration(package_name='_rrtmg_lw', parent_name=parent_package, top_path=top_path) if build: config.add_extension( name='_rrtmg_lw', sources=[rrtmg_lw_gen_source], extra_f90_compile_args=f90flags, f2py_options=['--quiet'], ) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('spline2', parent_package, top_path) config.add_extension('spline2c', sources=['spline2_nonint.c','spline2_wrap.c'] ) return config
def configuration(parent_package='', top_path=None): config = Configuration('calibration', parent_package, top_path) config.add_subpackage('model') return config
from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info import os, sys import sys fflags = '-fdefault-real-8 -ffixed-form' # TODO: Fix it so that these flags are default. config = Configuration('glmnet', parent_package=None, top_path=None) f_sources = ['glmnet/glmnet.pyf', 'glmnet/glmnet.f'] config.add_extension(name='_glmnet', sources=f_sources) config_dict = config.todict() if __name__ == '__main__': from numpy.distutils.core import setup setup(version='1.1-5', description='Python wrappers for the GLMNET package', author='David Warde-Farley', author_email='*****@*****.**', url='github.com/dwf/glmnet-python', license='GPL2', requires=['NumPy (>= 1.3)'], packages=['glmnet'], **config_dict)
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('climlab', parent_package, top_path) config.add_subpackage('convection') config.add_subpackage('domain') config.add_subpackage('dynamics') config.add_subpackage('model') config.add_subpackage('process') config.add_subpackage('radiation') config.add_subpackage('solar') config.add_subpackage('surface') config.add_subpackage('tests') config.add_subpackage('utils') #config.make_config_py() # installs __config__.py return config
import setuptools from numpy.distutils.core import setup from numpy.distutils.misc_util import Configuration sources = [ 'src/basis.f90', 'src/evaluate.f90', 'src/jacobian.f90', 'src/knots.f90', 'src/paramuni.f90', ] config = Configuration('MBI') config.add_extension( 'MBIlib', sources=sources) #, extra_compile_args=['--fcompiler=gnu95']) kwds = { 'install_requires': ['numpy', 'scipy'], 'version': '0.1', 'zip_safe': False, 'license': 'Apache (v2.0)', 'packages': ['MBI'], 'package_data': { 'MBI': ['examples/*.py'] }, 'include_package_data': True, #'script_args': ['build', '--fcompiler=gnu95', 'install'] } kwds.update(config.todict()) setup(**kwds)
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration libraries = [] if os.name == 'posix': libraries.append('m') config = Configuration('cluster', parent_package, top_path) config.add_extension('_dbscan_inner', sources=['_dbscan_inner.pyx'], include_dirs=[numpy.get_include()], language="c++") config.add_extension('_hierarchical_fast', sources=['_hierarchical_fast.pyx'], language="c++", include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension('_k_means_elkan', sources=['_k_means_elkan.pyx'], include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension('_k_means_fast', sources=['_k_means_fast.pyx'], include_dirs=numpy.get_include(), libraries=libraries) config.add_subpackage('tests') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('optparse', parent_package, top_path) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('scaleup', parent_package, top_path) config.make_config_py() return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('_lsq', parent_package, top_path) config.add_extension('givens_elimination', sources=['givens_elimination.c']) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, dot_join from numpy.distutils.system_info import get_info, default_lib_dirs config = Configuration('core', parent_package, top_path) local_dir = config.local_path codegen_dir = join(local_dir, 'code_generators') generate_umath_py = join(codegen_dir, 'generate_umath.py') n = dot_join(config.name, 'generate_umath') generate_umath = imp.load_module('_'.join(n.split('.')), open(generate_umath_py, 'U'), generate_umath_py, ('.py', 'U', 1)) header_dir = 'include/numpy' # this is relative to config.path_in_package def generate_config_h(ext, build_dir): target = join(build_dir, header_dir, 'config.h') dir = os.path.dirname(target) if not os.path.exists(dir): os.makedirs(dir) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) tc = generate_testcode(target) from distutils import sysconfig python_include = sysconfig.get_python_inc() python_h = join(python_include, 'Python.h') if not os.path.isfile(python_h): raise SystemError,\ "Non-existing %s. Perhaps you need to install"\ " python-dev|python-devel." % (python_h) result = config_cmd.try_run(tc, include_dirs=[python_include], library_dirs=default_lib_dirs) if not result: raise SystemError,"Failed to test configuration. "\ "See previous error messages for more information." moredefs = [] # mathlibs = [] tc = testcode_mathlib() mathlibs_choices = [[], ['m'], ['cpml']] mathlib = os.environ.get('MATHLIB') if mathlib: mathlibs_choices.insert(0, mathlib.split(',')) for libs in mathlibs_choices: if config_cmd.try_run(tc, libraries=libs): mathlibs = libs break else: raise EnvironmentError("math library missing; rerun " "setup.py after setting the " "MATHLIB env variable") ext.libraries.extend(mathlibs) moredefs.append(('MATHLIB', ','.join(mathlibs))) def check_func(func_name): return config_cmd.check_func(func_name, libraries=mathlibs, decl=False, headers=['math.h']) for func_name, defsymbol in FUNCTIONS_TO_CHECK: if check_func(func_name): moredefs.append(defsymbol) if is_npy_no_signal(): moredefs.append('__NPY_PRIVATE_NO_SIGNAL') if sys.platform == 'win32' or os.name == 'nt': from numpy.distutils.misc_util import get_build_architecture a = get_build_architecture() print 'BUILD_ARCHITECTURE: %r, os.name=%r, sys.platform=%r' % ( a, os.name, sys.platform) if a == 'AMD64': moredefs.append('DISTUTILS_USE_SDK') if sys.version[:3] < '2.4': if config_cmd.check_func('strtod', decl=False, headers=['stdlib.h']): moredefs.append(('PyOS_ascii_strtod', 'strtod')) target_f = open(target, 'a') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) target_f.close() print 'File:', target target_f = open(target) print target_f.read() target_f.close() print 'EOF' else: mathlibs = [] target_f = open(target) for line in target_f.readlines(): s = '#define MATHLIB' if line.startswith(s): value = line[len(s):].strip() if value: mathlibs.extend(value.split(',')) target_f.close() ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) return target def generate_numpyconfig_h(ext, build_dir): """Depends on config.h: generate_config_h has to be called before !""" target = join(build_dir, header_dir, 'numpyconfig.h') dir = os.path.dirname(target) if not os.path.exists(dir): os.makedirs(dir) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) testcode = generate_numpyconfig_code(target) from distutils import sysconfig python_include = sysconfig.get_python_inc() python_h = join(python_include, 'Python.h') if not os.path.isfile(python_h): raise SystemError,\ "Non-existing %s. Perhaps you need to install"\ " python-dev|python-devel." % (python_h) config.numpy_include_dirs result = config_cmd.try_run(testcode, include_dirs = [python_include] + \ config.numpy_include_dirs, library_dirs = default_lib_dirs) if not result: raise SystemError,"Failed to generate numpy configuration. "\ "See previous error messages for more information." print 'File: %s' % target target_f = open(target) print target_f.read() target_f.close() print 'EOF' config.add_data_files((header_dir, target)) return target def generate_api_func(module_name): def generate_api(ext, build_dir): script = join(codegen_dir, module_name + '.py') sys.path.insert(0, codegen_dir) try: m = __import__(module_name) log.info('executing %s', script) h_file, c_file, doc_file = m.generate_api( os.path.join(build_dir, header_dir)) finally: del sys.path[0] config.add_data_files((header_dir, h_file), (header_dir, doc_file)) return (h_file, ) return generate_api generate_array_api = generate_api_func('generate_array_api') generate_ufunc_api = generate_api_func('generate_ufunc_api') def generate_umath_c(ext, build_dir): target = join(build_dir, header_dir, '__umath_generated.c') dir = os.path.dirname(target) if not os.path.exists(dir): os.makedirs(dir) script = generate_umath_py if newer(script, target): f = open(target, 'w') f.write( generate_umath.make_code(generate_umath.defdict, generate_umath.__file__)) f.close() return [] config.add_data_files('include/numpy/*.h') config.add_include_dirs('src') config.numpy_include_dirs.extend(config.paths('include')) deps = [ join('src', 'arrayobject.c'), join('src', 'arraymethods.c'), join('src', 'scalartypes.inc.src'), join('src', 'arraytypes.inc.src'), join('src', '_signbit.c'), join('src', '_isnan.c'), join('src', 'ucsnarrow.c'), join('include', 'numpy', '*object.h'), 'include/numpy/fenv/fenv.c', 'include/numpy/fenv/fenv.h', join(codegen_dir, 'genapi.py'), join(codegen_dir, '*.txt') ] # Don't install fenv unless we need them. if sys.platform == 'cygwin': config.add_data_dir('include/numpy/fenv') config.add_extension( 'multiarray', sources=[ join('src', 'multiarraymodule.c'), generate_config_h, generate_numpyconfig_h, generate_array_api, join('src', 'scalartypes.inc.src'), join('src', 'arraytypes.inc.src'), join(codegen_dir, 'generate_array_api.py'), join('*.py') ], depends=deps, ) config.add_extension( 'umath', sources=[ generate_config_h, generate_numpyconfig_h, join('src', 'umathmodule.c.src'), generate_umath_c, generate_ufunc_api, join('src', 'scalartypes.inc.src'), join('src', 'arraytypes.inc.src'), ], depends=[ join('src', 'ufuncobject.c'), generate_umath_py, join(codegen_dir, 'generate_ufunc_api.py'), ] + deps, ) config.add_extension( '_sort', sources=[ join('src', '_sortmodule.c.src'), generate_config_h, generate_numpyconfig_h, generate_array_api, ], ) config.add_extension( 'scalarmath', sources=[ join('src', 'scalarmathmodule.c.src'), generate_config_h, generate_numpyconfig_h, generate_array_api, generate_ufunc_api ], ) # Configure blasdot blas_info = get_info('blas_opt', 0) #blas_info = {} def get_dotblas_sources(ext, build_dir): if blas_info: if ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', []): return None # dotblas needs ATLAS, Fortran compiled blas will not be sufficient. return ext.depends[:1] return None # no extension module will be built config.add_extension('_dotblas', sources=[get_dotblas_sources], depends=[ join('blasdot', '_dotblas.c'), join('blasdot', 'cblas.h'), ], include_dirs=['blasdot'], extra_info=blas_info) config.add_data_dir('tests') config.add_data_dir('tests/data') config.make_svn_version_py() return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('io', parent_package, top_path) config.add_extension('_test_fortran', sources=['_test_fortran.pyf', '_test_fortran.f']) config.add_data_dir('tests') config.add_subpackage('_matlab') config.add_subpackage('arff') config.add_subpackage('_harwell_boeing') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.system_info import get_info, NotFoundError from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from scipy._build_utils import get_sgemv_fix, get_g77_abi_wrappers, split_fortran_files config = Configuration('linalg', parent_package, top_path) lapack_opt = get_info('lapack_opt') if not lapack_opt: raise NotFoundError('no lapack/blas resources found') atlas_version = ([ v[3:-3] for k, v in lapack_opt.get('define_macros', []) if k == 'ATLAS_INFO' ] + [None])[0] if atlas_version: print(('ATLAS version: %s' % atlas_version)) # fblas: sources = ['fblas.pyf.src'] sources += get_g77_abi_wrappers(lapack_opt) sources += get_sgemv_fix(lapack_opt) config.add_extension('_fblas', sources=sources, depends=['fblas_l?.pyf.src'], extra_info=lapack_opt) # flapack: sources = ['flapack.pyf.src'] sources += get_g77_abi_wrappers(lapack_opt) config.add_extension('_flapack', sources=sources, depends=['flapack_user.pyf.src'], extra_info=lapack_opt) if atlas_version is not None: # cblas: config.add_extension('_cblas', sources=['cblas.pyf.src'], depends=['cblas.pyf.src', 'cblas_l1.pyf.src'], extra_info=lapack_opt) # clapack: config.add_extension('_clapack', sources=['clapack.pyf.src'], depends=['clapack.pyf.src'], extra_info=lapack_opt) # _flinalg: config.add_extension('_flinalg', sources=[join('src', 'det.f'), join('src', 'lu.f')], extra_info=lapack_opt) # _interpolative: routines_to_split = [ 'dfftb1', 'dfftf1', 'dffti1', 'dsint1', 'dzfft1', 'id_srand', 'idd_copyints', 'idd_id2svd0', 'idd_pairsamps', 'idd_permute', 'idd_permuter', 'idd_random_transf0', 'idd_random_transf0_inv', 'idd_random_transf_init0', 'idd_subselect', 'iddp_asvd0', 'iddp_rsvd0', 'iddr_asvd0', 'iddr_rsvd0', 'idz_estrank0', 'idz_id2svd0', 'idz_permute', 'idz_permuter', 'idz_random_transf0_inv', 'idz_random_transf_init0', 'idz_random_transf_init00', 'idz_realcomp', 'idz_realcomplex', 'idz_reco', 'idz_subselect', 'idzp_aid0', 'idzp_aid1', 'idzp_asvd0', 'idzp_rsvd0', 'idzr_asvd0', 'idzr_reco', 'idzr_rsvd0', 'zfftb1', 'zfftf1', 'zffti1', ] print('Splitting linalg.interpolative Fortran source files') fnames = split_fortran_files( join( os.path.split(os.path.abspath(__file__))[0], 'src', 'id_dist', 'src'), routines_to_split) fnames = [join('src', 'id_dist', 'src', f) for f in fnames] config.add_extension('_interpolative', fnames + ["interpolative.pyf"], extra_info=lapack_opt) # _calc_lwork: config.add_extension('_calc_lwork', [join('src', 'calc_lwork.f')], extra_info=lapack_opt) # _solve_toeplitz: config.add_extension('_solve_toeplitz', sources=[('_solve_toeplitz.c')], include_dirs=[get_numpy_include_dirs()]) config.add_data_dir('tests') config.add_data_dir('benchmarks') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('numpy', parent_package, top_path) config.add_subpackage('compat') config.add_subpackage('core') config.add_subpackage('distutils') config.add_subpackage('doc') config.add_subpackage('f2py') config.add_subpackage('fft') config.add_subpackage('lib') config.add_subpackage('linalg') config.add_subpackage('ma') config.add_subpackage('matrixlib') config.add_subpackage('polynomial') config.add_subpackage('random') config.add_subpackage('testing') config.add_data_dir('doc') config.add_data_dir('tests') config.make_config_py() # installs __config__.py return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('flithic', parent_package, top_path) config.add_subpackage('distance') config.make_config_py() return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('twitter', parent_package, top_path) #config.add_data_dir('tests') return config
def configuration(parent_package='', top_path=None): if os.path.exists('MANIFEST'): os.remove('MANIFEST') from numpy.distutils.misc_util import Configuration config = Configuration(None, parent_package, top_path) config.add_subpackage('lightning') return config if __name__ == "__main__":
def configuration(parent_package='', top_path=None): import numpy from numpy.distutils.misc_util import Configuration config = Configuration('neighbors', parent_package, top_path) libraries = [] if os.name == 'posix': libraries.append('m') config.add_extension('ball_tree', sources=['ball_tree.pyx'], include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension('kd_tree', sources=['kd_tree.pyx'], include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension('dist_metrics', sources=['dist_metrics.pyx'], include_dirs=[ numpy.get_include(), os.path.join(numpy.get_include(), 'numpy') ], libraries=libraries) config.add_extension('typedefs', sources=['typedefs.pyx'], include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension("quad_tree", sources=["quad_tree.pyx"], include_dirs=[numpy.get_include()], libraries=libraries) config.add_subpackage('tests') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs config = Configuration('_LBP', parent_package, top_path) #config.add_data_dir('tests') cython(['src/lbp_bins.pyx'], working_path=base_path) cython(['src/_lbp.pyx'], working_path=base_path) cython(['src/_lbp_ang.pyx'], working_path=base_path) cython(['src/_lbp_rad.pyx'], working_path=base_path) cython(['src/_nilbp.pyx'], working_path=base_path) cython(['src/_sp_neighbors.pyx'], working_path=base_path) cython(['src/boundary.pyx'], working_path=base_path) config.add_extension('lbp_bins', sources=['src/lbp_bins.c'], include_dirs=[get_numpy_include_dirs()]) #include_dirs=[get_numpy_include_dirs()]) config.add_extension('_lbp', sources=['src/_lbp.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('_lbp_ang', sources=['src/_lbp_ang.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('_lbp_rad', sources=['src/_lbp_rad.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('_nilbp', sources=['src/_nilbp.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('_sp_neighbors', sources=['src/_sp_neighbors.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('boundary', sources=['src/boundary.c'], include_dirs=[get_numpy_include_dirs()]) return config
def configuration(parent_package='', top_path=None): libraries = [] if os.name == 'posix': libraries.append('m') config = Configuration('lnpy', parent_package, top_path) config.add_subpackage('transform') config.add_subpackage('lnp') config.add_subpackage('linear') config.add_subpackage('multilinear') config.add_subpackage('learn') config.add_subpackage('io') # cython file with fast methods libraries = [] if os.name == 'posix': libraries.append('m') sources = ['fast_tools.cpp'] includes = [numpy.get_include()] compile_args = ['-O3'] config.add_extension('fast_tools', sources=sources, libraries=libraries, include_dirs=includes, extra_compile_args=compile_args, language='c++') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('yt', parent_package, top_path) config.add_subpackage('analysis_modules') config.add_subpackage('data_objects') config.add_subpackage('fields') config.add_subpackage('extern') config.add_subpackage('frontends') config.add_subpackage('geometry') config.add_subpackage('gui') config.add_subpackage('units') config.add_subpackage('utilities') config.add_subpackage('visualization') config.make_config_py() #config.make_svn_version_py() return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration, dot_join from numpy.distutils.system_info import get_info config = Configuration('core', parent_package, top_path) local_dir = config.local_path codegen_dir = join(local_dir, 'code_generators') if is_released(config): warnings.simplefilter('error', MismatchCAPIWarning) # Check whether we have a mismatch between the set C API VERSION and the # actual C API VERSION check_api_version(C_API_VERSION, codegen_dir) generate_umath_py = join(codegen_dir, 'generate_umath.py') n = dot_join(config.name, 'generate_umath') generate_umath = imp.load_module('_'.join(n.split('.')), open(generate_umath_py, 'U'), generate_umath_py, ('.py', 'U', 1)) header_dir = 'include/numpy' # this is relative to config.path_in_package cocache = CallOnceOnly() def generate_config_h(ext, build_dir): target = join(build_dir, header_dir, 'config.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir) # Check math library and C99 math funcs availability mathlibs = check_mathlib(config_cmd) moredefs.append(('MATHLIB', ','.join(mathlibs))) check_math_capabilities(config_cmd, moredefs, mathlibs) moredefs.extend(cocache.check_ieee_macros(config_cmd)[0]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0]) # Signal check if is_npy_no_signal(): moredefs.append('__NPY_PRIVATE_NO_SIGNAL') # Windows checks if sys.platform == 'win32' or os.name == 'nt': win32_checks(moredefs) # C99 restrict keyword moredefs.append(('NPY_RESTRICT', config_cmd.check_restrict())) # Inline check inline = config_cmd.check_inline() # Check whether we need our own wide character support if not config_cmd.check_decl('Py_UNICODE_WIDE', headers=['Python.h']): PYTHON_HAS_UNICODE_WIDE = True else: PYTHON_HAS_UNICODE_WIDE = False if ENABLE_SEPARATE_COMPILATION: moredefs.append(('ENABLE_SEPARATE_COMPILATION', 1)) if NPY_RELAXED_STRIDES_CHECKING: moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1)) # Get long double representation if sys.platform != 'darwin': rep = check_long_double_representation(config_cmd) if rep in ['INTEL_EXTENDED_12_BYTES_LE', 'INTEL_EXTENDED_16_BYTES_LE', 'MOTOROLA_EXTENDED_12_BYTES_BE', 'IEEE_QUAD_LE', 'IEEE_QUAD_BE', 'IEEE_DOUBLE_LE', 'IEEE_DOUBLE_BE', 'DOUBLE_DOUBLE_BE', 'DOUBLE_DOUBLE_LE']: moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1)) else: raise ValueError("Unrecognized long double format: %s" % rep) # Py3K check if sys.version_info[0] == 3: moredefs.append(('NPY_PY3K', 1)) # Generate the config.h file from moredefs target_f = open(target, 'w') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # define inline to our keyword, or nothing target_f.write('#ifndef __cplusplus\n') if inline == 'inline': target_f.write('/* #undef inline */\n') else: target_f.write('#define inline %s\n' % inline) target_f.write('#endif\n') # add the guard to make sure config.h is never included directly, # but always through npy_config.h target_f.write(""" #ifndef _NPY_NPY_CONFIG_H_ #error config.h should never be included directly, include npy_config.h instead #endif """) target_f.close() print('File:', target) target_f = open(target) print(target_f.read()) target_f.close() print('EOF') else: mathlibs = [] target_f = open(target) for line in target_f: s = '#define MATHLIB' if line.startswith(s): value = line[len(s):].strip() if value: mathlibs.extend(value.split(',')) target_f.close() # Ugly: this can be called within a library and not an extension, # in which case there is no libraries attributes (and none is # needed). if hasattr(ext, 'libraries'): ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) return target def generate_numpyconfig_h(ext, build_dir): """Depends on config.h: generate_config_h has to be called before !""" # put private include directory in build_dir on search path # allows using code generation in headers headers config.add_include_dirs(join(build_dir, "src", "private")) target = join(build_dir, header_dir, '_numpyconfig.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir) if is_npy_no_signal(): moredefs.append(('NPY_NO_SIGNAL', 1)) if is_npy_no_smp(): moredefs.append(('NPY_NO_SMP', 1)) else: moredefs.append(('NPY_NO_SMP', 0)) mathlibs = check_mathlib(config_cmd) moredefs.extend(cocache.check_ieee_macros(config_cmd)[1]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1]) if ENABLE_SEPARATE_COMPILATION: moredefs.append(('NPY_ENABLE_SEPARATE_COMPILATION', 1)) if NPY_RELAXED_STRIDES_CHECKING: moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1)) # Check wether we can use inttypes (C99) formats if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']): moredefs.append(('NPY_USE_C99_FORMATS', 1)) # visibility check hidden_visibility = visibility_define(config_cmd) moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility)) # Add the C API/ABI versions moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION)) moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION)) # Add moredefs to header target_f = open(target, 'w') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # Define __STDC_FORMAT_MACROS target_f.write(""" #ifndef __STDC_FORMAT_MACROS #define __STDC_FORMAT_MACROS 1 #endif """) target_f.close() # Dump the numpyconfig.h header to stdout print('File: %s' % target) target_f = open(target) print(target_f.read()) target_f.close() print('EOF') config.add_data_files((header_dir, target)) return target def generate_api_func(module_name): def generate_api(ext, build_dir): script = join(codegen_dir, module_name + '.py') sys.path.insert(0, codegen_dir) try: m = __import__(module_name) log.info('executing %s', script) h_file, c_file, doc_file = m.generate_api(os.path.join(build_dir, header_dir)) finally: del sys.path[0] config.add_data_files((header_dir, h_file), (header_dir, doc_file)) return (h_file,) return generate_api generate_numpy_api = generate_api_func('generate_numpy_api') generate_ufunc_api = generate_api_func('generate_ufunc_api') config.add_include_dirs(join(local_dir, "src", "private")) config.add_include_dirs(join(local_dir, "src")) config.add_include_dirs(join(local_dir)) config.add_data_files('include/numpy/*.h') config.add_include_dirs(join('src', 'npymath')) config.add_include_dirs(join('src', 'multiarray')) config.add_include_dirs(join('src', 'umath')) config.add_include_dirs(join('src', 'npysort')) config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")]) config.add_define_macros([("_FILE_OFFSET_BITS", "64")]) config.add_define_macros([('_LARGEFILE_SOURCE', '1')]) config.add_define_macros([('_LARGEFILE64_SOURCE', '1')]) config.numpy_include_dirs.extend(config.paths('include')) deps = [join('src', 'npymath', '_signbit.c'), join('include', 'numpy', '*object.h'), join(codegen_dir, 'genapi.py'), ] ####################################################################### # dummy module # ####################################################################### # npymath needs the config.h and numpyconfig.h files to be generated, but # build_clib cannot handle generate_config_h and generate_numpyconfig_h # (don't ask). Because clib are generated before extensions, we have to # explicitly add an extension which has generate_config_h and # generate_numpyconfig_h as sources *before* adding npymath. config.add_extension('_dummy', sources=[join('src', 'dummymodule.c'), generate_config_h, generate_numpyconfig_h, generate_numpy_api] ) ####################################################################### # npymath library # ####################################################################### subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")]) def get_mathlib_info(*args): # Another ugly hack: the mathlib info is known once build_src is run, # but we cannot use add_installed_pkg_config here either, so we only # update the substition dictionary during npymath build config_cmd = config.get_config_cmd() # Check that the toolchain works, to fail early if it doesn't # (avoid late errors with MATHLIB which are confusing if the # compiler does not work). st = config_cmd.try_link('int main(void) { return 0;}') if not st: raise RuntimeError("Broken toolchain: cannot link a simple C program") mlibs = check_mathlib(config_cmd) posix_mlib = ' '.join(['-l%s' % l for l in mlibs]) msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs]) subst_dict["posix_mathlib"] = posix_mlib subst_dict["msvc_mathlib"] = msvc_mlib npymath_sources = [join('src', 'npymath', 'npy_math.c.src'), join('src', 'npymath', 'ieee754.c.src'), join('src', 'npymath', 'npy_math_complex.c.src'), join('src', 'npymath', 'halffloat.c') ] config.add_installed_library('npymath', sources=npymath_sources + [get_mathlib_info], install_dir='lib') config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config", subst_dict) config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config", subst_dict) ####################################################################### # npysort library # ####################################################################### # This library is created for the build but it is not installed npysort_sources = [join('src', 'npysort', 'quicksort.c.src'), join('src', 'npysort', 'mergesort.c.src'), join('src', 'npysort', 'heapsort.c.src'), join('src', 'private', 'npy_partition.h.src'), join('src', 'npysort', 'selection.c.src'), join('src', 'private', 'npy_binsearch.h.src'), join('src', 'npysort', 'binsearch.c.src'), ] config.add_library('npysort', sources=npysort_sources, include_dirs=[]) ####################################################################### # multiarray module # ####################################################################### # Multiarray version: this function is needed to build foo.c from foo.c.src # when foo.c is included in another file and as such not in the src # argument of build_ext command def generate_multiarray_templated_sources(ext, build_dir): from numpy.distutils.misc_util import get_cmd subpath = join('src', 'multiarray') sources = [join(local_dir, subpath, 'scalartypes.c.src'), join(local_dir, subpath, 'arraytypes.c.src'), join(local_dir, subpath, 'nditer_templ.c.src'), join(local_dir, subpath, 'lowlevel_strided_loops.c.src'), join(local_dir, subpath, 'einsum.c.src'), join(local_dir, 'src', 'private', 'templ_common.h.src') ] # numpy.distutils generate .c from .c.src in weird directories, we have # to add them there as they depend on the build_dir config.add_include_dirs(join(build_dir, subpath)) cmd = get_cmd('build_src') cmd.ensure_finalized() cmd.template_sources(sources, ext) multiarray_deps = [ join('src', 'multiarray', 'arrayobject.h'), join('src', 'multiarray', 'arraytypes.h'), join('src', 'multiarray', 'array_assign.h'), join('src', 'multiarray', 'buffer.h'), join('src', 'multiarray', 'calculation.h'), join('src', 'multiarray', 'common.h'), join('src', 'multiarray', 'convert_datatype.h'), join('src', 'multiarray', 'convert.h'), join('src', 'multiarray', 'conversion_utils.h'), join('src', 'multiarray', 'ctors.h'), join('src', 'multiarray', 'descriptor.h'), join('src', 'multiarray', 'getset.h'), join('src', 'multiarray', 'hashdescr.h'), join('src', 'multiarray', 'iterators.h'), join('src', 'multiarray', 'mapping.h'), join('src', 'multiarray', 'methods.h'), join('src', 'multiarray', 'multiarraymodule.h'), join('src', 'multiarray', 'nditer_impl.h'), join('src', 'multiarray', 'numpymemoryview.h'), join('src', 'multiarray', 'number.h'), join('src', 'multiarray', 'numpyos.h'), join('src', 'multiarray', 'refcount.h'), join('src', 'multiarray', 'scalartypes.h'), join('src', 'multiarray', 'sequence.h'), join('src', 'multiarray', 'shape.h'), join('src', 'multiarray', 'ucsnarrow.h'), join('src', 'multiarray', 'usertypes.h'), join('src', 'multiarray', 'vdot.h'), join('src', 'private', 'npy_config.h'), join('src', 'private', 'templ_common.h.src'), join('src', 'private', 'lowlevel_strided_loops.h'), join('src', 'private', 'mem_overlap.h'), join('src', 'private', 'npy_extint128.h'), join('include', 'numpy', 'arrayobject.h'), join('include', 'numpy', '_neighborhood_iterator_imp.h'), join('include', 'numpy', 'npy_endian.h'), join('include', 'numpy', 'arrayscalars.h'), join('include', 'numpy', 'noprefix.h'), join('include', 'numpy', 'npy_interrupt.h'), join('include', 'numpy', 'npy_3kcompat.h'), join('include', 'numpy', 'npy_math.h'), join('include', 'numpy', 'halffloat.h'), join('include', 'numpy', 'npy_common.h'), join('include', 'numpy', 'npy_os.h'), join('include', 'numpy', 'utils.h'), join('include', 'numpy', 'ndarrayobject.h'), join('include', 'numpy', 'npy_cpu.h'), join('include', 'numpy', 'numpyconfig.h'), join('include', 'numpy', 'ndarraytypes.h'), join('include', 'numpy', 'npy_1_7_deprecated_api.h'), join('include', 'numpy', '_numpyconfig.h.in'), # add library sources as distuils does not consider libraries # dependencies ] + npysort_sources + npymath_sources multiarray_src = [ join('src', 'multiarray', 'alloc.c'), join('src', 'multiarray', 'arrayobject.c'), join('src', 'multiarray', 'arraytypes.c.src'), join('src', 'multiarray', 'array_assign.c'), join('src', 'multiarray', 'array_assign_scalar.c'), join('src', 'multiarray', 'array_assign_array.c'), join('src', 'multiarray', 'buffer.c'), join('src', 'multiarray', 'calculation.c'), join('src', 'multiarray', 'compiled_base.c'), join('src', 'multiarray', 'common.c'), join('src', 'multiarray', 'convert.c'), join('src', 'multiarray', 'convert_datatype.c'), join('src', 'multiarray', 'conversion_utils.c'), join('src', 'multiarray', 'ctors.c'), join('src', 'multiarray', 'datetime.c'), join('src', 'multiarray', 'datetime_strings.c'), join('src', 'multiarray', 'datetime_busday.c'), join('src', 'multiarray', 'datetime_busdaycal.c'), join('src', 'multiarray', 'descriptor.c'), join('src', 'multiarray', 'dtype_transfer.c'), join('src', 'multiarray', 'einsum.c.src'), join('src', 'multiarray', 'flagsobject.c'), join('src', 'multiarray', 'getset.c'), join('src', 'multiarray', 'hashdescr.c'), join('src', 'multiarray', 'item_selection.c'), join('src', 'multiarray', 'iterators.c'), join('src', 'multiarray', 'lowlevel_strided_loops.c.src'), join('src', 'multiarray', 'mapping.c'), join('src', 'multiarray', 'methods.c'), join('src', 'multiarray', 'multiarraymodule.c'), join('src', 'multiarray', 'nditer_templ.c.src'), join('src', 'multiarray', 'nditer_api.c'), join('src', 'multiarray', 'nditer_constr.c'), join('src', 'multiarray', 'nditer_pywrap.c'), join('src', 'multiarray', 'number.c'), join('src', 'multiarray', 'numpymemoryview.c'), join('src', 'multiarray', 'numpyos.c'), join('src', 'multiarray', 'refcount.c'), join('src', 'multiarray', 'sequence.c'), join('src', 'multiarray', 'shape.c'), join('src', 'multiarray', 'scalarapi.c'), join('src', 'multiarray', 'scalartypes.c.src'), join('src', 'multiarray', 'usertypes.c'), join('src', 'multiarray', 'ucsnarrow.c'), join('src', 'multiarray', 'vdot.c'), join('src', 'private', 'templ_common.h.src'), join('src', 'private', 'mem_overlap.c'), ] blas_info = get_info('blas_opt', 0) if blas_info and ('HAVE_CBLAS', None) in blas_info.get('define_macros', []): extra_info = blas_info multiarray_src.extend([join('src', 'multiarray', 'cblasfuncs.c'), join('src', 'multiarray', 'python_xerbla.c'), ]) if uses_accelerate_framework(blas_info): multiarray_src.extend(get_sgemv_fix()) else: extra_info = {} if not ENABLE_SEPARATE_COMPILATION: multiarray_deps.extend(multiarray_src) multiarray_src = [join('src', 'multiarray', 'multiarraymodule_onefile.c')] multiarray_src.append(generate_multiarray_templated_sources) config.add_extension('multiarray', sources=multiarray_src + [generate_config_h, generate_numpyconfig_h, generate_numpy_api, join(codegen_dir, 'generate_numpy_api.py'), join('*.py')], depends=deps + multiarray_deps, libraries=['npymath', 'npysort'], extra_info=extra_info) ####################################################################### # umath module # ####################################################################### # umath version: this function is needed to build foo.c from foo.c.src # when foo.c is included in another file and as such not in the src # argument of build_ext command def generate_umath_templated_sources(ext, build_dir): from numpy.distutils.misc_util import get_cmd subpath = join('src', 'umath') sources = [ join(local_dir, subpath, 'loops.h.src'), join(local_dir, subpath, 'loops.c.src'), join(local_dir, subpath, 'scalarmath.c.src'), join(local_dir, subpath, 'simd.inc.src')] # numpy.distutils generate .c from .c.src in weird directories, we have # to add them there as they depend on the build_dir config.add_include_dirs(join(build_dir, subpath)) cmd = get_cmd('build_src') cmd.ensure_finalized() cmd.template_sources(sources, ext) def generate_umath_c(ext, build_dir): target = join(build_dir, header_dir, '__umath_generated.c') dir = os.path.dirname(target) if not os.path.exists(dir): os.makedirs(dir) script = generate_umath_py if newer(script, target): f = open(target, 'w') f.write(generate_umath.make_code(generate_umath.defdict, generate_umath.__file__)) f.close() return [] umath_src = [ join('src', 'umath', 'umathmodule.c'), join('src', 'umath', 'reduction.c'), join('src', 'umath', 'funcs.inc.src'), join('src', 'umath', 'simd.inc.src'), join('src', 'umath', 'loops.h.src'), join('src', 'umath', 'loops.c.src'), join('src', 'umath', 'ufunc_object.c'), join('src', 'umath', 'scalarmath.c.src'), join('src', 'umath', 'ufunc_type_resolution.c')] umath_deps = [ generate_umath_py, join('src', 'multiarray', 'common.h'), join('src', 'private', 'templ_common.h.src'), join('src', 'umath', 'simd.inc.src'), join(codegen_dir, 'generate_ufunc_api.py'), join('src', 'private', 'ufunc_override.h')] + npymath_sources if not ENABLE_SEPARATE_COMPILATION: umath_deps.extend(umath_src) umath_src = [join('src', 'umath', 'umathmodule_onefile.c')] umath_src.append(generate_umath_templated_sources) umath_src.append(join('src', 'umath', 'funcs.inc.src')) umath_src.append(join('src', 'umath', 'simd.inc.src')) config.add_extension('umath', sources=umath_src + [generate_config_h, generate_numpyconfig_h, generate_umath_c, generate_ufunc_api], depends=deps + umath_deps, libraries=['npymath'], ) ####################################################################### # umath_tests module # ####################################################################### config.add_extension('umath_tests', sources=[join('src', 'umath', 'umath_tests.c.src')]) ####################################################################### # custom rational dtype module # ####################################################################### config.add_extension('test_rational', sources=[join('src', 'umath', 'test_rational.c.src')]) ####################################################################### # struct_ufunc_test module # ####################################################################### config.add_extension('struct_ufunc_test', sources=[join('src', 'umath', 'struct_ufunc_test.c.src')]) ####################################################################### # multiarray_tests module # ####################################################################### config.add_extension('multiarray_tests', sources=[join('src', 'multiarray', 'multiarray_tests.c.src'), join('src', 'private', 'mem_overlap.c')], depends=[join('src', 'private', 'mem_overlap.h'), join('src', 'private', 'npy_extint128.h')]) ####################################################################### # operand_flag_tests module # ####################################################################### config.add_extension('operand_flag_tests', sources=[join('src', 'umath', 'operand_flag_tests.c.src')]) config.add_data_dir('tests') config.add_data_dir('tests/data') config.make_svn_version_py() return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('PYME', parent_package, top_path) config.add_subpackage('Analysis') config.add_subpackage('Acquire') config.add_subpackage('DSView') #config.add_subpackage('PSFGen') #config.add_subpackage('cSMI') config.add_subpackage('ParallelTasks') config.add_subpackage('IO') config.add_subpackage('Deconv') #config.add_subpackage('PSFEst') config.add_subpackage('simulation') config.add_subpackage('misc') config.add_subpackage('LMVis') config.add_subpackage('ui') config.add_subpackage('util') config.add_subpackage('util.shmarray') config.add_subpackage('util.mProfile') config.add_subpackage('util.fProfile') config.add_data_dir('util/fProfile/html') config.add_subpackage('localization') config.add_subpackage('recipes') config.add_data_dir('recipes/Recipes') config.add_subpackage('cluster') config.add_data_dir('cluster/clusterUI') #config.add_subpackage('pad') #config.add_subpackage('bcl') config.add_subpackage('resources') config.add_subpackage('contrib') config.add_subpackage('experimental') config.add_subpackage('tileviewer') #config.add_subpackage('dataBrowser') #config.add_subpackage('shmarray') #config.add_subpackage('SampleDB2') #config.add_subpackage('clusterUI') #config.add_scripts(glob.glob('scripts/*')) if False: #not 'CONDA_BUILD' in os.environ.keys(): #entry points are defined in the condas meta.yaml - not needed here #if running under conda if sys.platform == 'win32': config.add_scripts('scripts/*') else: #don't add .cmd files config.add_scripts('scripts/*.py') config.get_version() #config.set_options() #config.make_svn_version_py() # installs __svn_version__.py #config.make_config_py() return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('gnu2ms',parent_package,top_path) config.add_sconscript('SConstruct') return config
def configuration(parent_package='', top_path=None): import numpy from numpy.distutils.misc_util import Configuration config = Configuration('utils', parent_package, top_path) config.add_subpackage('sparsetools') cblas_libs, blas_info = get_blas_info() cblas_compile_args = blas_info.pop('extra_compile_args', []) cblas_includes = [ join('..', 'src', 'cblas'), numpy.get_include(), blas_info.pop('include_dirs', []) ] libraries = [] if os.name == 'posix': libraries.append('m') cblas_libs.append('m') config.add_extension('sparsefuncs_fast', sources=['sparsefuncs_fast.c'], libraries=libraries) config.add_extension('arrayfuncs', sources=['arrayfuncs.c'], depends=[join('src', 'cholesky_delete.h')], libraries=cblas_libs, include_dirs=cblas_includes, extra_compile_args=cblas_compile_args, **blas_info) config.add_extension( 'murmurhash', sources=['murmurhash.c', join('src', 'MurmurHash3.cpp')], include_dirs=['src']) config.add_extension('lgamma', sources=['lgamma.c', join('src', 'gamma.c')], include_dirs=['src'], libraries=libraries) config.add_extension('graph_shortest_path', sources=['graph_shortest_path.c'], include_dirs=[numpy.get_include()]) config.add_extension('fast_dict', sources=['fast_dict.cpp'], language="c++", include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension('seq_dataset', sources=['seq_dataset.c'], include_dirs=[numpy.get_include()]) config.add_extension('weight_vector', sources=['weight_vector.c'], include_dirs=cblas_includes, libraries=cblas_libs, **blas_info) config.add_extension("random", sources=["random.c"], include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension("_logistic_sigmoid", sources=["_logistic_sigmoid.c"], include_dirs=[numpy.get_include()], libraries=libraries) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs config = Configuration('restoration', parent_package, top_path) config.add_data_dir('tests') cython(['_unwrap_1d.pyx'], working_path=base_path) cython(['_unwrap_2d.pyx'], working_path=base_path) cython(['_unwrap_3d.pyx'], working_path=base_path) cython(['_denoise_cy.pyx'], working_path=base_path) cython(['_nl_means_denoising.pyx'], working_path=base_path) config.add_extension('_unwrap_1d', sources=['_unwrap_1d.c'], include_dirs=[get_numpy_include_dirs()]) unwrap_sources_2d = ['_unwrap_2d.c', 'unwrap_2d_ljmu.c'] config.add_extension('_unwrap_2d', sources=unwrap_sources_2d, include_dirs=[get_numpy_include_dirs()]) unwrap_sources_3d = ['_unwrap_3d.c', 'unwrap_3d_ljmu.c'] config.add_extension('_unwrap_3d', sources=unwrap_sources_3d, include_dirs=[get_numpy_include_dirs()]) config.add_extension('_denoise_cy', sources=['_denoise_cy.c'], include_dirs=[get_numpy_include_dirs(), '../_shared']) config.add_extension('_nl_means_denoising', sources=['_nl_means_denoising.c'], include_dirs=[get_numpy_include_dirs()]) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info config = Configuration('integrate', parent_package, top_path) # Get a local copy of lapack_opt_info lapack_opt = dict(get_info('lapack_opt',notfound_action=2)) # Pop off the libraries list so it can be combined with # additional required libraries lapack_libs = lapack_opt.pop('libraries', []) mach_src = [join('mach','*.f')] quadpack_src = [join('quadpack','*.f')] odepack_src = [join('odepack','*.f')] dop_src = [join('dop','*.f')] quadpack_test_src = [join('tests','_test_multivariate.c')] odeint_banded_test_src = [join('tests', 'banded5x5.f')] config.add_library('mach', sources=mach_src, config_fc={'noopt':(__file__,1)}) config.add_library('quadpack', sources=quadpack_src) config.add_library('odepack', sources=odepack_src) config.add_library('dop', sources=dop_src) # Extensions # quadpack: include_dirs = [join(os.path.dirname(__file__), '..', '_lib', 'src')] if 'include_dirs' in lapack_opt: lapack_opt = dict(lapack_opt) include_dirs.extend(lapack_opt.pop('include_dirs')) config.add_extension('_quadpack', sources=['_quadpackmodule.c'], libraries=(['quadpack', 'mach'] + lapack_libs), depends=(['quadpack.h','__quadpack.h'] + quadpack_src + mach_src), include_dirs=include_dirs, **lapack_opt) # odepack odepack_libs = ['odepack','mach'] + lapack_libs odepack_opts = lapack_opt.copy() odepack_opts.update(numpy_nodepr_api) config.add_extension('_odepack', sources=['_odepackmodule.c'], libraries=odepack_libs, depends=(odepack_src + mach_src), **odepack_opts) # vode config.add_extension('vode', sources=['vode.pyf'], libraries=odepack_libs, depends=(odepack_src + mach_src), **lapack_opt) # lsoda config.add_extension('lsoda', sources=['lsoda.pyf'], libraries=odepack_libs, depends=(odepack_src + mach_src), **lapack_opt) # dop config.add_extension('_dop', sources=['dop.pyf'], libraries=['dop'], depends=dop_src) config.add_extension('_test_multivariate', sources=quadpack_test_src) # Fortran+f2py extension module for testing odeint. config.add_extension('_test_odeint_banded', sources=odeint_banded_test_src, libraries=odepack_libs, depends=(odepack_src + mach_src), **lapack_opt) config.add_data_dir('tests') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('stats', parent_package, top_path) config.add_data_dir('tests') statlib_src = [join('statlib', '*.f')] config.add_library('statlib', sources=statlib_src) # add statlib module config.add_extension('statlib', sources=['statlib.pyf'], f2py_options=['--no-wrap-functions'], libraries=['statlib'], depends=statlib_src) # add vonmises_cython module config.add_extension( 'vonmises_cython', sources=['vonmises_cython.c'], # FIXME: use cython source ) # add mvn module config.add_extension( 'mvn', sources=['mvn.pyf', 'mvndst.f'], ) return config
def configuration(parent_package="", top_path=None): """ Config function mainly used to compile C code. """ config = Configuration("", parent_package, top_path) # GSE2 path = os.path.join("obspy", "io", "gse2", "src", "GSE_UTI") files = [os.path.join(path, "gse_functions.c")] # compiler specific options kwargs = {} if IS_MSVC: # get export symbols kwargs['export_symbols'] = export_symbols(path, 'gse_functions.def') config.add_extension(_get_lib_name("gse2", add_extension_suffix=False), files, **kwargs) # LIBMSEED path = os.path.join("obspy", "io", "mseed", "src") files = [os.path.join(path, "obspy-readbuffer.c")] if not EXTERNAL_LIBMSEED: files += glob.glob(os.path.join(path, "libmseed", "*.c")) # compiler specific options kwargs = {} if IS_MSVC: # needed by libmseed lmplatform.h kwargs['define_macros'] = [('WIN32', '1')] # get export symbols kwargs['export_symbols'] = \ export_symbols(path, 'libmseed', 'libmseed.def') kwargs['export_symbols'] += \ export_symbols(path, 'obspy-readbuffer.def') if EXTERNAL_LIBMSEED: kwargs['libraries'] = ['mseed'] config.add_extension(_get_lib_name("mseed", add_extension_suffix=False), files, **kwargs) # SEGY path = os.path.join("obspy", "io", "segy", "src") files = [os.path.join(path, "ibm2ieee.c")] # compiler specific options kwargs = {} if IS_MSVC: # get export symbols kwargs['export_symbols'] = export_symbols(path, 'libsegy.def') config.add_extension(_get_lib_name("segy", add_extension_suffix=False), files, **kwargs) # SIGNAL path = os.path.join("obspy", "signal", "src") files = glob.glob(os.path.join(path, "*.c")) # compiler specific options kwargs = {} if IS_MSVC: # get export symbols kwargs['export_symbols'] = export_symbols(path, 'libsignal.def') config.add_extension(_get_lib_name("signal", add_extension_suffix=False), files, **kwargs) # EVALRESP path = os.path.join("obspy", "signal", "src") if EXTERNAL_EVALRESP: files = glob.glob(os.path.join(path, "evalresp", "_obspy*.c")) else: files = glob.glob(os.path.join(path, "evalresp", "*.c")) # compiler specific options kwargs = {} if IS_MSVC: # needed by evalresp evresp.h kwargs['define_macros'] = [('WIN32', '1')] # get export symbols kwargs['export_symbols'] = export_symbols(path, 'libevresp.def') if EXTERNAL_EVALRESP: kwargs['libraries'] = ['evresp'] config.add_extension(_get_lib_name("evresp", add_extension_suffix=False), files, **kwargs) # TAU path = os.path.join("obspy", "taup", "src") files = [os.path.join(path, "inner_tau_loops.c")] # compiler specific options kwargs = {} if IS_MSVC: # get export symbols kwargs['export_symbols'] = export_symbols(path, 'libtau.def') config.add_extension(_get_lib_name("tau", add_extension_suffix=False), files, **kwargs) add_data_files(config) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration import numpy as np config = Configuration('stats', parent_package, top_path) config.add_data_dir('tests') statlib_src = [join('statlib', '*.f')] config.add_library('statlib', sources=statlib_src) # add statlib module config.add_extension('statlib', sources=['statlib.pyf'], f2py_options=['--no-wrap-functions'], libraries=['statlib'], depends=statlib_src ) # add _stats module config.add_extension('_stats', sources=['_stats.c'], ) # add mvn module config.add_extension('mvn', sources=['mvn.pyf', 'mvndst.f'], ) # add _sobol module config.add_extension('_sobol', sources=['_sobol.c', ], ) config.add_data_files('_sobol_direction_numbers.npz') # add BiasedUrn module config.add_data_files('biasedurn.pxd') from _generate_pyx import isNPY_OLD NPY_OLD = isNPY_OLD() biasedurn_libs = [] if NPY_OLD else ['npyrandom'] biasedurn_libdirs = [] if NPY_OLD else [join(np.get_include(), '..', '..', 'random', 'lib')] ext = config.add_extension( 'biasedurn', sources=[ 'biasedurn.cxx', 'biasedurn/impls.cpp', 'biasedurn/fnchyppr.cpp', 'biasedurn/wnchyppr.cpp', 'biasedurn/stoc1.cpp', 'biasedurn/stoc3.cpp'], include_dirs=[np.get_include()], library_dirs=biasedurn_libdirs, libraries=biasedurn_libs, define_macros=[('R_BUILD', None)], language='c++', extra_compile_args=['-Wno-narrowing'] if system() == 'Darwin' else [], depends=['biasedurn/stocR.h'], ) ext._pre_build_hook = pre_build_hook return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('distutils', parent_package, top_path) config.add_subpackage('command') config.add_subpackage('fcompiler') config.add_data_dir('tests') config.add_data_files('site.cfg') config.add_data_files('mingw/gfortran_vs2003_hack.c') config.make_config_py() return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.misc_util import scons_generate_config_py pkgname = 'numpy' config = Configuration(pkgname, parent_package, top_path, setup_name='setupscons.py') config.add_subpackage('distutils') config.add_subpackage('testing') config.add_subpackage('f2py') config.add_subpackage('core') config.add_subpackage('lib') config.add_subpackage('oldnumeric') config.add_subpackage('numarray') config.add_subpackage('fft') config.add_subpackage('linalg') config.add_subpackage('random') config.add_subpackage('ma') config.add_data_dir('doc') config.add_data_dir('tests') def add_config(*args, **kw): # Generate __config__, handle inplace issues. if kw['scons_cmd'].inplace: target = pjoin(kw['pkg_name'], '__config__.py') else: target = pjoin(kw['scons_cmd'].build_lib, kw['pkg_name'], '__config__.py') scons_generate_config_py(target) config.add_sconscript(None, post_hook=add_config) return config