def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info config = Configuration('tpmc', parent_package, top_path) ## cmake files config.add_data_files('FindTpmc.cmake') ## header files print(config.include_dirs) config.add_data_files(constants['header_dir'] + '/*.hh') from os.path import join config.add_include_dirs(join('tpmc', 'include')) config.add_include_dirs(join('tpmc', 'lut')) ## libtpmc_tables config.add_installed_library( 'tpmc_tables', sources=[ 'tpmc/src/marchingcubestables.cc', 'tpmc/src/geometrytype.cc', 'tpmc/src/referenceelements.cc', 'tpmc/src/aberthfunctor.cc', generate_lut ], install_dir=constants['lib_dir'], build_info={'extra_compiler_args': ['-std=c++11']}) ## read version info from file config.get_version('tpmc/__version__.py') ## tpmc-config script config.add_scripts('tpmc-config') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_info sys.argv.extend(['config_fc', '--fcompiler=gnu95']) config = Configuration('prec', parent_package, top_path) config.set_options( ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=False, ) plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3]) inc_dir = ['build/temp%s' % plat_specifier] src = ['sparsekit.f90', 'dlauc1.f', 'dgeqpw.f', 'dgeqpc.f', 'dtrrnk.f', 'dlasmx.f',\ 'dtrqxc.f','dgeqpx.f', 'dtrqpx.f', 'dgeqpb.f', 'dsort.f', 'blassm.f', 'matvec.f', 'unary.f', 'formats.f'] config.add_include_dirs(inc_dir) config.add_library('mylib', sources=src) print_src = ['putstrmodule.F90', 'dispmodule.f90', 'fast_direct.f90'] config.add_library('print_lib', sources=print_src) local_src = ['prec.f90'] config.add_extension('prec', sources=local_src, depends=['mylib', 'print_lib'], libraries=['mylib', 'print_lib', 'lapack', 'blas']) #src = [tt_fort + '/' + x for x in tt_src] #src += ['core.pyf'] #config.add_extension('core_f90',sources=src) package_dir = {'': 'prec'}, return config
def configuration(parent_package='', top_path=None): # Create package configuration config = Configuration('cnn', parent_package, top_path) config.add_data_dir('tests') # Add Cython module extension config.add_include_dirs(config.name.replace('.', os.sep)) info = system_info() opts = info.calc_extra_info() if info.cp.has_section('opencl'): info.section = 'opencl' config.add_include_dirs(info.get_include_dirs()) opts['library_dirs'] = info.get_lib_dirs() opts['extra_link_args'] = ['-l%s' % s for s in info.get_libraries()] else: opencl_link_args = ['-lOpenCL'] config.add_extension('_utils', sources=['_utils.c', 'utils.c', 'opencl_utils.c'], **opts) # Add OpenCL kernel files add_opencl_files( config, 'opencl', ['test1d.cl', 'convolve_image.cl', 'relu_max_pool_image.cl']) return config
def configuration(parent_package='', top_path=None): plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3]) inc_dir = ['build/temp%s' % plat_specifier] config = Configuration('tt', parent_package, top_path) config.add_include_dirs(inc_dir) config.set_options( ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=False, ) config.add_library('print_lib', sources=[join(PRINT_DIR, x) for x in PRINT_SRC]) config.add_library('mytt', sources=[join(TTFORT_DIR, x) for x in TTFORT_SRC]) config.add_subpackage('core') config.add_subpackage('amen') config.add_subpackage('ksl') config.add_subpackage('eigb') config.add_subpackage('maxvol') config.add_subpackage('cross') config.add_subpackage('optimize') config.add_subpackage('utils') config.add_subpackage('riemannian') return config
def configuration(parent_package='', top_path=None): global config from numpy.distutils.misc_util import Configuration from numpy.distutils.fcompiler import get_default_fcompiler, CompilerNotFound build = True try: # figure out which compiler we're going to use compiler = get_default_fcompiler() # set some fortran compiler-dependent flags f90flags = [] if compiler == 'gnu95': f90flags.append('-fdefault-real-8') elif compiler == 'intel' or compiler == 'intelem': f90flags.append('-132') f90flags.append('-r8') # Suppress all compiler warnings (avoid huge CI log files) f90flags.append('-w') except CompilerNotFound: print( 'No Fortran compiler found, not building the CAM3 radiation module!' ) build = False config = Configuration(package_name='cam3', parent_name=parent_package, top_path=top_path) config.add_data_files(join('data', 'abs_ems_factors_fastvx.c030508.nc')) if build: config.add_extension(name='_cam3', sources=[cam3_gen_source], extra_f90_compile_args=f90flags, f2py_options=['--quiet']) config.add_include_dirs('src') return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration #from numpy.distutils.system_info import get_info from arachnid.distutils.compiler import compiler_options import os ''' try: blas_opt = get_info('blas_opt',notfound_action=2) except: try: blas_opt = get_info('mkl',notfound_action=2) except: blas_opt = get_info('blas') ''' config = Configuration('util', parent_package, top_path) #fcompiler_args = compiler_options()[0] compiler_args, compiler_libraries, compiler_defs = compiler_options()[3:] flink_args = compiler_args img_src = 'image_utility_wrap.cpp' if os.path.exists(os.path.join(os.path.dirname(__file__), 'image_utility_wrap.cpp')) else 'image_utility.i' resample_src = 'resample_wrap.cpp' if os.path.exists(os.path.join(os.path.dirname(__file__), 'resample_wrap.cpp')) else 'resample.i' config.add_extension('_image_utility', sources=[img_src, 'radon.c'], define_macros=[('__STDC_FORMAT_MACROS', 1)]+compiler_defs, depends=['image_utility.h'], swig_opts=['-c++'], extra_compile_args=compiler_args, extra_link_args=compiler_args, libraries=compiler_libraries) config.add_extension('_resample', sources=[resample_src], define_macros=[('__STDC_FORMAT_MACROS', 1)]+compiler_defs, depends=['resample.hpp'], swig_opts=['-c++'], extra_compile_args=compiler_args, extra_link_args=compiler_args, libraries=compiler_libraries) config.add_extension('_ctf', sources=['ctf.F90'], define_macros=compiler_defs, extra_compile_args=compiler_args, extra_link_args=flink_args) config.add_include_dirs(os.path.dirname(__file__)) return config
def configuration(): config = Configuration(package_name="sensory_integration_time", parent_name=None) config.add_include_dirs(*include_dirs) config.add_library( name="incgamNEG", sources=[ os.path.join(SOURCE_FILE_PATH, "setprecision.f90"), os.path.join(SOURCE_FILE_PATH, "someconstants.f90"), os.path.join(SOURCE_FILE_PATH, "gammaError.f90"), os.path.join(SOURCE_FILE_PATH, "incgamNEG.f90") ], extra_f90_compile_args=["-O3", "-fPIC", "-Wc-binding-type"]) config.add_extension( name="leaky_integral_calculator", language="c", sources=["sensory_integration_time/src/leaky_integral_calculator.c"], libraries=[ "m", "dl", "gsl", "gslcblas", "quadmath", ], include_dirs=include_dirs, library_dirs=library_dirs, extra_link_args=["-lincgamNEG"]) return config
def configuration(parent_package='', top_path=None): plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3]) inc_dir = ['build/temp%s' % plat_specifier] config = Configuration('tt', parent_package, top_path) config.add_include_dirs(inc_dir) config.set_options( ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=False, ) config.add_library('print_lib', sources=[join(PRINT_DIR, x) for x in PRINT_SRC]) config.add_library('mytt', sources=[join(TTFORT_DIR, x) for x in TTFORT_SRC]) config.add_subpackage('core') config.add_subpackage('amen') config.add_subpackage('ksl') config.add_subpackage('eigb') config.add_subpackage('maxvol') config.add_subpackage('cross') config.add_subpackage('optimize') config.add_subpackage('utils') config.add_subpackage('riemannian') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('pymetis', parent_package, top_path) config.add_data_dir('tests') METIS_DIR = 'metis-4.0' metis_src = [join(METIS_DIR, '*.c')] metis_headers = [join(METIS_DIR, '*.h')] include_dir = METIS_DIR #print(metis_headers) #print(metis_src) #print(include_dir) config.add_include_dirs([include_dir]) config.add_library('metis', sources=metis_src) src_files = ['metis_ext.c', 'metis_bridge.c'] if sys.platform == 'win32': src_files = src_files + ['random.c'] config.add_extension('metis_ext', sources=src_files, include_dirs=[include_dir], depends=(metis_src + metis_headers), extra_compile_args=['-I' + include_dir], libraries=['metis', 'm']) return config
def configuration(parent_package='',top_path=None): sys.argv.extend ( ['config_fc', '--fcompiler=gnu95']) config = Configuration('tt', parent_package, top_path) config.set_options(ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=False, ) plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3]) inc_dir = ['build/temp%s' % plat_specifier] tt_dir = 'tt-fort' tt_src = ['nan.f90', 'timef.f90', 'say.f90', 'rnd.f90', 'ptype.f90', 'sort.f90', 'trans.f90', 'ort.f90', 'mat.f90', 'check.f90', 'lr.f90', 'maxvol.f90', 'svd.f90', 'matrix_util.f90', 'tt.f90', 'ttaux.f90', 'ttop.f90', 'ttio.f90', 'tts.f90', 'python_conv.f90','tt_linalg.f90'] tt_src = [tt_dir+'/'+x for x in tt_src] print_dir = 'tt-fort/print' print_src = ['putstrmodule.F90','dispmodule.f90'] print_src = [print_dir+'/'+x for x in print_src] config.add_include_dirs(inc_dir) config.add_library('print_lib',sources=print_src) config.add_library('mytt',sources=tt_src) config.add_subpackage('core') config.add_subpackage('amr') config.add_subpackage('kls') config.add_subpackage('ksl') config.add_subpackage('eigb') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('slicetiming', parent_package, top_path) config.add_subpackage('tests') config.add_include_dirs(config.name.replace('.', os.sep)) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('segmentation', parent_package, top_path) config.add_subpackage('tests') config.add_include_dirs(config.name.replace('.', os.sep)) config.add_extension('_segmentation', sources=['_segmentation.pyx', 'mrf.c']) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('segmentation', parent_package, top_path) config.add_data_dir('tests') config.add_include_dirs(config.name.replace('.', os.sep)) config.add_extension('_segmentation', sources=['_segmentation.pyx', 'mrf.c']) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('image', parent_package, top_path) config.add_data_dir('tests') config.add_data_dir('benchmarks') config.add_include_dirs(config.name.replace('.', os.sep)) print config return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('lib', parent_package, top_path) config.add_include_dirs(join('..', 'core', 'include')) config.add_extension('_compiled_base', sources=[join('src', '_compiled_base.c')]) config.add_data_dir('tests') return config
def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration("image", parent_package, top_path) config.add_data_dir("tests") config.add_data_dir("benchmarks") config.add_include_dirs(config.name.replace(".", os.sep)) config.add_extension("_image", sources=["_image.pyx", "cubic_spline.c"]) print config return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('image', parent_package, top_path) config.add_data_dir('tests') config.add_data_dir('benchmarks') config.add_include_dirs(config.name.replace('.', os.sep)) config.add_extension('image_module', sources=['image_module.pyx', 'cubic_spline.c']) print config return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration import numpy config = Configuration('core', parent_package, top_path) config.set_options(quiet=True) config.add_subpackage('image') config.add_subpackage('orient') config.add_subpackage('parallel') config.add_subpackage('learn') try: numpy_include = numpy.get_include() except: numpy_include = numpy.get_numpy_include() #@UndefinedVariable config.add_include_dirs(numpy_include) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('registration', parent_package, top_path) config.add_subpackage('tests') config.add_include_dirs(config.name.replace('.', os.sep)) config.add_extension('_registration', sources=[ '_registration.pyx', 'joint_histogram.c', 'wichmann_prng.c', 'cubic_spline.c', 'polyaffine.c' ]) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import \ Configuration, get_numpy_include_dirs config = Configuration('micpy', parent_package, top_path) config.add_include_dirs([numpy_private_dir, 'micpy']) add_multiarray_ext(config) add_mpymath_lib(config) add_umath_ext(config) config.add_subpackage('random', subpackage_path=join('micpy', 'random')) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info config = Configuration('niseg', parent_package, top_path) config.add_subpackage('tests') config.add_subpackage('testing') config.add_include_dirs(config.name.replace('.', os.sep)) config.add_extension('_segmentation', sources=['_segmentation.pyx', 'mrf.c', 'pve.c']) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('lib', parent_package, top_path) config.add_include_dirs(join('..', 'core', 'include')) config.add_extension('_compiled_base', sources=[join('src', '_compiled_base.c')] ) config.add_data_dir('benchmarks') config.add_data_dir('tests') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('statistics', parent_package, top_path) config.add_subpackage('models') config.add_subpackage('formula') config.add_subpackage('bench') config.add_subpackage('tests') config.add_include_dirs(config.name.replace('.', os.sep)) config.add_extension('intvol', 'intvol.pyx', include_dirs=[np.get_include()]) config.add_extension('histogram', 'histogram.pyx', include_dirs=[np.get_include()]) config.add_extension('_quantile', sources=['_quantile.pyx', 'quantile.c']) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('registration', parent_package, top_path) config.add_data_dir('tests') config.add_include_dirs(config.name.replace('.', os.sep)) config.add_extension( '_registration', sources=['_registration.pyx', 'joint_histogram.c', 'wichmann_prng.c', 'cubic_spline.c', 'polyaffine.c']) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('lib', parent_package, top_path) config.add_include_dirs(join('..', 'core', 'include')) import sys if '__pypy__' not in sys.builtin_module_names: config.add_extension('_compiled_base', sources=[join('src', '_compiled_base.c')]) config.add_data_dir('benchmarks') config.add_data_dir('tests') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('statistics', parent_package, top_path) config.add_subpackage('models') config.add_subpackage('formula') config.add_subpackage('bench') config.add_subpackage('tests') config.add_include_dirs(config.name.replace('.', os.sep)) config.add_extension('intvol', 'intvol.pyx', include_dirs=[np.get_include()]) config.add_extension('histogram', 'histogram.pyx', include_dirs=[np.get_include()]) config.add_extension('_quantile', sources=['_quantile.pyx', 'quantile.c']) return config
def configuration(parent_package='',top_path=None): #sys.argv.extend ( ['config_fc'] ) config = Configuration('rect_maxvol', parent_package, top_path) config.set_options(ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=False, ) plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3]) inc_dir = ['build/temp%s' % plat_specifier] config.add_include_dirs(inc_dir) config.add_subpackage('cython_boost') return config
def configuration(parent_package='', top_path=None): import os from numpy.distutils.misc_util import Configuration from numpy.distutils import fcompiler # figure out which compiler we're going to use compiler = fcompiler.get_default_fcompiler() # set some fortran compiler-dependent flags f90flags = [] if compiler == 'gnu95': f90flags.append('-fdefault-real-8') elif compiler == 'intel' or compiler == 'intelem': f90flags.append('-132') f90flags.append('-r8') # Suppress all compiler warnings (avoid huge CI log files) f90flags.append('-w') sourcelist = ['_cam3.pyf', 'src/pmgrid.F90', 'src/prescribed_aerosols.F90', 'src/quicksort.F90', 'src/abortutils.F90', 'src/shr_kind_mod.F90', 'src/absems.F90', 'src/wv_saturation.F90', 'src/aer_optics.F90', 'src/cmparray_mod.F90', 'src/shr_const_mod.F90', 'src/physconst.F90', 'src/pkg_cldoptics.F90', 'src/gffgch.F90', 'src/chem_surfvals.F90', 'src/volcrad.F90', 'src/radae.F90', 'src/radlw.F90', 'src/radsw.F90', 'src/crm.F90', 'Driver.f90'] config = Configuration(package_name='cam3', parent_name=parent_package, top_path=top_path) config.add_extension(name='_cam3', sources=sourcelist, extra_f90_compile_args=f90flags, f2py_options=['--quiet']) config.add_include_dirs('src') config.add_data_files(os.path.join('data', 'abs_ems_factors_fastvx.c030508.nc')) return config
def configuration(parent_package='',top_path=None): config = Configuration('libraries',parent_package,top_path) config.add_include_dirs('plat_det') config.add_library('plat_det', # platform detection sources=[join('plat_det', '*.c')], headers=[join('plat_det', '*.h')]) extra_args = [] dcomp = get_default_compiler() if cpuidpy.has_SSE2 and (dcomp == 'mingw32' or dcomp == 'gcc'): extra_args = ['-O3', '-msse2', '-mfpmath=sse', '-malign-double'] if get_gcc_version() >= '4.2': extra_args.append('-mtune=generic') else: extra_args.append('-march=pentium4') extra_args.append('-mtune=pentium4') config.add_include_dirs('cephesd') config.add_library('cephesd', sources=[join('cephesd', '*.c')], headers=[join('cephesd', '*.h')], include_dirs=['plat_det']) atlas_info = get_info('atlas') build_info = {} if atlas_info.has_key('define_macros'): build_info['macros'] = copy(atlas_info['define_macros']) else: build_info['macros'] = [] build_info['macros'].append(('HAS_ATLAS', None)) if atlas_info.has_key('include_dirs'): build_info['include_dirs'] = copy(atlas_info['include_dirs']) else: build_info['include_dirs'] = [] build_info['include_dirs'].append('plat_det') build_info['extra_compiler_args'] = extra_args build_info['language'] = 'C' config.add_include_dirs('cblas_ext') config.add_library('cblas_ext', sources=[join('cblas_ext', '*.c')], headers=[join('cblas_ext', '*.h')], **build_info) build_info2 = copy(build_info) build_info2['include_dirs'].append('cblas_ext') config.add_include_dirs('sdcpp') config.add_library('sdcpp', sources=[join('sdcpp', '*.cpp')], headers=[join('sdcpp', '*.h')], **build_info2) return config
def configuration(parent_package='',top_path=None): import numpy from numpy.distutils.misc_util import Configuration config = Configuration(None, parent_package, top_path) config.set_options(ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=True) config.add_include_dirs(numpy.get_include()) config.add_include_dirs(os.path.join(config.top_path,'pysparse','include')) config.add_subpackage('pysparse') # Set config.version config.get_version(os.path.join('pysparse','version.py')) return config
def configuration(parent_package='', top_path=None): import numpy from numpy.distutils.misc_util import Configuration config = Configuration(None, parent_package, top_path) config.set_options(ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=True) config.add_include_dirs(numpy.get_include()) config.add_include_dirs( os.path.join(config.top_path, 'pysparse', 'include')) config.add_subpackage('pysparse') # Set config.version config.get_version(os.path.join('pysparse', 'version.py')) return config
def configuration(parent_package='', top_path=None): sys.argv.extend(['config_fc', '--fcompiler=gnu95']) config = Configuration('tt', parent_package, top_path) config.set_options( ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=False, ) plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3]) inc_dir = ['build/temp%s' % plat_specifier] tt_dir = 'tt-fort' tt_src = [ 'nan.f90', 'default.f90', 'timef.f90', 'say.f90', 'rnd.f90', 'ptype.f90', 'sort.f90', 'trans.f90', 'ort.f90', 'mat.f90', 'check.f90', 'lr.f90', 'maxvol.f90', 'svd.f90', 'matrix_util.f90', 'tt.f90', 'ttaux.f90', 'ttop.f90', 'ttio.f90', 'tts.f90', 'python_conv.f90', 'tt_linalg.f90', 'ttlocsolve.f90', 'ttnodeop.f90', 'ttamen.f90' ] tt_src = [tt_dir + '/' + x for x in tt_src] print_dir = 'tt-fort/print' print_src = ['putstrmodule.F90', 'dispmodule.f90'] print_src = [print_dir + '/' + x for x in print_src] config.add_include_dirs(inc_dir) config.add_library('print_lib', sources=print_src) config.add_library('mytt', sources=tt_src) config.add_subpackage('core') config.add_subpackage('amen') config.add_subpackage('ksl') config.add_subpackage('eigb') config.add_subpackage('maxvol') config.add_subpackage('cross') config.add_subpackage('optimize') config.add_subpackage('utils') config.add_subpackage('riemannian') return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from arachnid.distutils.compiler import compiler_options import os config = Configuration('core', parent_package, top_path) compiler_args, compiler_libraries, compiler_defs = compiler_options()[3:] config.add_library('_healpixlib', sources=['healpix/ang2pix_nest.c', 'healpix/ang2pix_ring.c', 'healpix/pix2ang_nest.c', 'healpix/pix2ang_ring.c', 'healpix/nest2ring.c', 'healpix/ring2nest.c', 'healpix/nside2npix.c', 'healpix/npix2nside.c', 'healpix/mk_pix2xy.c', 'healpix/mk_xy2pix.c'], depends=['healpix/chealpix.h']) config.add_extension('_transformations', sources=['transforms.c']) config.add_extension('_healpix', sources=['healpix.c'], libraries=['_healpixlib']) config.add_include_dirs(os.path.dirname(__file__)) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from numpy.distutils.system_info import get_info, system_info config = Configuration('labs', parent_package, top_path) # fff library config.add_include_dirs(os.path.join(LIBS,'fff')) config.add_include_dirs(os.path.join(LIBS,'fff_python_wrapper')) config.add_include_dirs(get_numpy_include_dirs()) sources = [os.path.join(LIBS,'fff','*.c')] sources.append(os.path.join(LIBS,'fff_python_wrapper','*.c')) config.add_library('cstat', sources=sources) # Subpackages config.add_subpackage('bindings') config.add_subpackage('glm') config.add_subpackage('group') config.add_subpackage('spatial_models') config.add_subpackage('utils') config.add_subpackage('viz_tools') config.add_subpackage('datasets') config.add_subpackage('tests') config.make_config_py() # installs __config__.py return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from arachnid.distutils.compiler import compiler_options import os config = Configuration('core', parent_package, top_path) compiler_args, compiler_libraries, compiler_defs = compiler_options()[3:] config.add_library('_healpixlib', sources=[ 'healpix/ang2pix_nest.c', 'healpix/ang2pix_ring.c', 'healpix/pix2ang_nest.c', 'healpix/pix2ang_ring.c', 'healpix/nest2ring.c', 'healpix/ring2nest.c', 'healpix/nside2npix.c', 'healpix/npix2nside.c', 'healpix/mk_pix2xy.c', 'healpix/mk_xy2pix.c' ], depends=['healpix/chealpix.h']) config.add_extension('_transformations', sources=['transforms.c']) config.add_extension('_healpix', sources=['healpix.c'], libraries=['_healpixlib']) config.add_include_dirs(os.path.dirname(__file__)) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('pymetis', parent_package, top_path) config.add_data_dir('tests') METIS_DIR = 'metis-4.0' metis_src = [join(METIS_DIR,'*.c')] metis_headers = [join(METIS_DIR,'*.h')] include_dir = METIS_DIR #print(metis_headers) #print(metis_src) #print(include_dir) config.add_include_dirs([include_dir]) config.add_library('metis', sources=metis_src) src_files = ['metis_ext.c', 'metis_bridge.c'] if sys.platform == 'win32': src_files = src_files + ['random.c'] config.add_extension('metis_ext', sources=src_files, include_dirs = [include_dir], depends=(metis_src + metis_headers), extra_compile_args=['-I'+include_dir], libraries = ['metis', 'm']) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info from arachnid.distutils.compiler import compiler_options import os blas_opt = None for opt in ('mkl', 'atlas', 'blas'): try: blas_opt = get_info(opt,notfound_action=2) except: pass else: break if blas_opt is None: blas_opt = get_info('blas_opt',notfound_action=2) config = Configuration('core', parent_package, top_path) #fcompiler_args = compiler_options()[0] compiler_args, compiler_libraries, compiler_defs = compiler_options()[3:] fastdot_src = 'fastdot_wrap.cpp' if os.path.exists(os.path.join(os.path.dirname(__file__), 'fastdot_wrap.cpp')) else 'fastdot.i' config.add_extension('_fastdot', sources=[fastdot_src], define_macros=[('__STDC_FORMAT_MACROS', 1)]+compiler_defs, depends=['fastdot.hpp'], swig_opts=['-c++'], extra_info = blas_opt, extra_compile_args=compiler_args, extra_link_args=compiler_args, libraries=compiler_libraries) config.add_include_dirs(os.path.dirname(__file__)) return config
def configuration(parent_package='', top_path=None): #sys.argv.extend(['config_fc', '--fcompiler=gnu95']) #ce_src = [join(dir_ce, x) for x in src] #print_ce_src = [join(dir_ce, x) for x in print_src] plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3]) inc_dir = ['build/temp%s' % plat_specifier] config = Configuration('ce', parent_package, top_path) config.add_include_dirs(inc_dir) config.set_options( ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=False, ) print("make mylib") config.add_library('mylib', sources=[join(dir_ce, x) for x in src]) config.add_library('print_lib', sources=[join(dir_ce, x) for x in print_src]) print("add_subpackage('fortran_core')") config.add_subpackage('fortran_core') print("!!!!!") return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info from arachnid.distutils.compiler import compiler_options import os blas_opt = None for opt in ('mkl', 'atlas', 'blas'): try: blas_opt = get_info(opt, notfound_action=2) except: pass else: break if blas_opt is None: blas_opt = get_info('blas_opt', notfound_action=2) config = Configuration('core', parent_package, top_path) #fcompiler_args = compiler_options()[0] compiler_args, compiler_libraries, compiler_defs = compiler_options()[3:] fastdot_src = 'fastdot_wrap.cpp' if os.path.exists( os.path.join(os.path.dirname(__file__), 'fastdot_wrap.cpp')) else 'fastdot.i' config.add_extension('_fastdot', sources=[fastdot_src], define_macros=[('__STDC_FORMAT_MACROS', 1)] + compiler_defs, depends=['fastdot.hpp'], swig_opts=['-c++'], extra_info=blas_opt, extra_compile_args=compiler_args, extra_link_args=compiler_args, libraries=compiler_libraries) config.add_include_dirs(os.path.dirname(__file__)) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info from arachnid.distutils.compiler import compiler_options #detect_openmp import os compiler_args, compiler_libraries, compiler_defs, ccompiler_args, ccompiler_lib, ccompiler_defs = compiler_options()[:6] ccompiler_lib; try: fftw_opt = get_info('mkl',notfound_action=2) except: try: fftw_opt = get_info('fftw',notfound_action=2) #fftw_opt['libraries']=['fftw3f'] fftw_opt['libraries'].extend(['fftw3f']) fftw_opt['library_dirs'].extend(['/usr/lib']) except: fftw_opt=dict(libraries=['fftw3f']) if 'library_dirs' not in fftw_opt: fftw_opt['library_dirs']=[] if 'include_dirs' not in fftw_opt: fftw_opt['include_dirs']=[] config = Configuration('spi', parent_package, top_path) #-ftrap=common if 1 == 0: f2py_options = ['--debug-capi'] else: f2py_options=[] flink_args = compiler_args #-ffixed-form define_macros=[('SP_LIBFFTW3', 1)]+compiler_defs, library_options=dict(macros=[('SP_LIBFFTW3', 1)]+compiler_defs, extra_f77_compile_args=compiler_args, extra_f90_compile_args=compiler_args)#extra_f77_compiler_args=['-fdefault-real-8'],, ('SP_MP', 1) #extra_f90_compiler_args=['-fdefault-real-8']) config.add_library('spiutil', sources=['spiutil.F90', 'spider/tfd.F90', 'spider/fq_q.F90', 'spider/fq3_p.F90', 'spider/parabl.F90', 'spider/pksr3.F90', 'spider/fftw3.F90', 'spider/ccrs.F90', 'spider/apcc.F90', 'spider/quadri.F90', 'spider/rtsq.F90', 'spider/cald.F90', 'spider/bldr.F90', 'spider/fmrs.F90', 'spider/fmrs_2.F90', 'spider/besi1.F90', 'spider/wpro_n.F90', 'spider/prepcub.F90', 'spider/fint.F90', 'spider/fint3.F90', 'spider/betai.F90', 'spider/gammln.F90', 'spider/betacf.F90', 'spider/histe.F90', 'spider/interp_fbs3.F90', 'spider/interp_fbs.F90', 'spider/fbs2.F90', 'spider/fbs3.F90'], depends=['spider/CMBLOCK.INC', 'spider/FFTW3.INC'], **library_options) #, 'fmrs_info.mod', 'type_kinds.mod' fftlibs = fftw_opt['libraries']+compiler_libraries del fftw_opt['libraries'] config.add_extension('_spider_reconstruct', sources=['backproject_nn4.f90', 'backproject_bp3f.f90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=flink_args, library_dirs=fftw_opt['library_dirs']) config.add_extension('_spider_reproject', sources=['reproject.F90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=flink_args, library_dirs=fftw_opt['library_dirs']) config.add_extension('_spider_interpolate', sources=['interpolate.F90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=flink_args, library_dirs=fftw_opt['library_dirs']) config.add_extension('_spider_ctf', sources=['ctf.F90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=flink_args, library_dirs=fftw_opt['library_dirs']) #config.add_extension('_spider_interpolate', sources=['interpolate.F90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=compiler_args, library_dirs=fftw_opt['library_dirs']) #-fdefault-real-8 #rot_src = 'spider_rotate_dist_wrap.cpp' if os.path.exists(os.path.join(os.path.dirname(__file__), 'spider_rotate_dist_wrap.cpp')) else 'rotate.i' #config.add_extension('_spider_rotate_dist', sources=[rot_src], define_macros=[('__STDC_FORMAT_MACROS', 1)]+ccompiler_defs, depends=['rotate.hpp'], swig_opts=['-c++'], libraries=['spiutil']+fftlibs, extra_compile_args=ccompiler_args, extra_link_args=compiler_args, library_dirs=fftw_opt['library_dirs']) config.add_extension('_spider_rotate', sources=['rotate.F90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=flink_args, library_dirs=fftw_opt['library_dirs']) #config.add_extension('_spider_align', sources=['align.F90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=flink_args, library_dirs=fftw_opt['library_dirs']) config.add_extension('_spider_filter', sources=['filter.F90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=flink_args, library_dirs=fftw_opt['library_dirs']) config.add_include_dirs(os.path.dirname(__file__)) config.add_include_dirs(os.path.join(os.path.dirname(__file__), 'spider')) config.add_include_dirs(fftw_opt['include_dirs']) return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration # We need this because libcstat.a is linked to lapack, which can # be a fortran library, and the linker needs this information. from numpy.distutils.system_info import get_info lapack_info = get_info('lapack_opt',0) if 'libraries' not in lapack_info: # But on OSX that may not give us what we need, so try with 'lapack' # instead. NOTE: scipy.linalg uses lapack_opt, not 'lapack'... lapack_info = get_info('lapack',0) config = Configuration('registration', parent_package, top_path) config.add_data_dir('tests') config.add_data_dir('benchmarks') config.add_include_dirs(config.name.replace('.', os.sep)) config.add_extension( 'registration_module', sources=['registration_module.pyx', 'iconic.c'], libraries = ['cstat'], extra_info=lapack_info, ) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info config = Configuration('tpmc',parent_package,top_path) ## cmake files config.add_data_files('FindTpmc.cmake') ## header files config.add_data_files(constants['header_dir'] + '/*.hh') from os.path import join config.add_include_dirs(join('tpmc', 'include')) config.add_include_dirs(join('tpmc', 'lut')) ## libtpmc_tables config.add_installed_library('tpmc_tables', sources = ['tpmc/src/marchingcubestables.cc', 'tpmc/src/geometrytype.cc', 'tpmc/src/referenceelements.cc', 'tpmc/src/aberthfunctor.cc', generate_lut], install_dir = constants['lib_dir'], build_info = {'extra_compiler_args':['-std=c++11']}) ## read version info from file config.get_version('tpmc/__version__.py') ## tpmc-config script config.add_scripts('tpmc-config') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, dot_join from numpy.distutils.system_info import get_info config = Configuration('core', parent_package, top_path) local_dir = config.local_path codegen_dir = join(local_dir, 'code_generators') if is_released(config): warnings.simplefilter('error', MismatchCAPIWarning) # Check whether we have a mismatch between the set C API VERSION and the # actual C API VERSION check_api_version(C_API_VERSION, codegen_dir) generate_umath_py = join(codegen_dir, 'generate_umath.py') n = dot_join(config.name, 'generate_umath') generate_umath = npy_load_module('_'.join(n.split('.')), generate_umath_py, ('.py', 'U', 1)) header_dir = 'include/numpy' # this is relative to config.path_in_package cocache = CallOnceOnly() def generate_config_h(ext, build_dir): target = join(build_dir, header_dir, 'config.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir) # Check math library and C99 math funcs availability mathlibs = check_mathlib(config_cmd) moredefs.append(('MATHLIB', ','.join(mathlibs))) check_math_capabilities(config_cmd, ext, moredefs, mathlibs) moredefs.extend(cocache.check_ieee_macros(config_cmd)[0]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0]) # Signal check if is_npy_no_signal(): moredefs.append('__NPY_PRIVATE_NO_SIGNAL') # Windows checks if sys.platform == 'win32' or os.name == 'nt': win32_checks(moredefs) # C99 restrict keyword moredefs.append(('NPY_RESTRICT', config_cmd.check_restrict())) # Inline check inline = config_cmd.check_inline() # Use relaxed stride checking if NPY_RELAXED_STRIDES_CHECKING: moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1)) # Use bogus stride debug aid when relaxed strides are enabled if NPY_RELAXED_STRIDES_DEBUG: moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1)) # Get long double representation rep = check_long_double_representation(config_cmd) moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1)) if check_for_right_shift_internal_compiler_error(config_cmd): moredefs.append('NPY_DO_NOT_OPTIMIZE_LONG_right_shift') moredefs.append('NPY_DO_NOT_OPTIMIZE_ULONG_right_shift') moredefs.append('NPY_DO_NOT_OPTIMIZE_LONGLONG_right_shift') moredefs.append('NPY_DO_NOT_OPTIMIZE_ULONGLONG_right_shift') # Generate the config.h file from moredefs with open(target, 'w') as target_f: for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # define inline to our keyword, or nothing target_f.write('#ifndef __cplusplus\n') if inline == 'inline': target_f.write('/* #undef inline */\n') else: target_f.write('#define inline %s\n' % inline) target_f.write('#endif\n') # add the guard to make sure config.h is never included directly, # but always through npy_config.h target_f.write( textwrap.dedent(""" #ifndef _NPY_NPY_CONFIG_H_ #error config.h should never be included directly, include npy_config.h instead #endif """)) log.info('File: %s' % target) with open(target) as target_f: log.info(target_f.read()) log.info('EOF') else: mathlibs = [] with open(target) as target_f: for line in target_f: s = '#define MATHLIB' if line.startswith(s): value = line[len(s):].strip() if value: mathlibs.extend(value.split(',')) # Ugly: this can be called within a library and not an extension, # in which case there is no libraries attributes (and none is # needed). if hasattr(ext, 'libraries'): ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) return target def generate_numpyconfig_h(ext, build_dir): """Depends on config.h: generate_config_h has to be called before !""" # put common include directory in build_dir on search path # allows using code generation in headers config.add_include_dirs(join(build_dir, "src", "common")) config.add_include_dirs(join(build_dir, "src", "npymath")) target = join(build_dir, header_dir, '_numpyconfig.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir) if is_npy_no_signal(): moredefs.append(('NPY_NO_SIGNAL', 1)) if is_npy_no_smp(): moredefs.append(('NPY_NO_SMP', 1)) else: moredefs.append(('NPY_NO_SMP', 0)) mathlibs = check_mathlib(config_cmd) moredefs.extend(cocache.check_ieee_macros(config_cmd)[1]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1]) if NPY_RELAXED_STRIDES_CHECKING: moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1)) if NPY_RELAXED_STRIDES_DEBUG: moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1)) # Check whether we can use inttypes (C99) formats if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']): moredefs.append(('NPY_USE_C99_FORMATS', 1)) # visibility check hidden_visibility = visibility_define(config_cmd) moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility)) # Add the C API/ABI versions moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION)) moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION)) # Add moredefs to header with open(target, 'w') as target_f: for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # Define __STDC_FORMAT_MACROS target_f.write( textwrap.dedent(""" #ifndef __STDC_FORMAT_MACROS #define __STDC_FORMAT_MACROS 1 #endif """)) # Dump the numpyconfig.h header to stdout log.info('File: %s' % target) with open(target) as target_f: log.info(target_f.read()) log.info('EOF') config.add_data_files((header_dir, target)) return target def generate_api_func(module_name): def generate_api(ext, build_dir): script = join(codegen_dir, module_name + '.py') sys.path.insert(0, codegen_dir) try: m = __import__(module_name) log.info('executing %s', script) h_file, c_file, doc_file = m.generate_api( os.path.join(build_dir, header_dir)) finally: del sys.path[0] config.add_data_files((header_dir, h_file), (header_dir, doc_file)) return (h_file, ) return generate_api generate_numpy_api = generate_api_func('generate_numpy_api') generate_ufunc_api = generate_api_func('generate_ufunc_api') config.add_include_dirs(join(local_dir, "src", "common")) config.add_include_dirs(join(local_dir, "src")) config.add_include_dirs(join(local_dir)) config.add_data_dir('include/numpy') config.add_include_dirs(join('src', 'npymath')) config.add_include_dirs(join('src', 'multiarray')) config.add_include_dirs(join('src', 'umath')) config.add_include_dirs(join('src', 'npysort')) config.add_define_macros([ ("NPY_INTERNAL_BUILD", "1") ]) # this macro indicates that Numpy build is in process config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")]) if sys.platform[:3] == "aix": config.add_define_macros([("_LARGE_FILES", None)]) else: config.add_define_macros([("_FILE_OFFSET_BITS", "64")]) config.add_define_macros([('_LARGEFILE_SOURCE', '1')]) config.add_define_macros([('_LARGEFILE64_SOURCE', '1')]) config.numpy_include_dirs.extend(config.paths('include')) deps = [ join('src', 'npymath', '_signbit.c'), join('include', 'numpy', '*object.h'), join(codegen_dir, 'genapi.py'), ] ####################################################################### # npymath library # ####################################################################### subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")]) def get_mathlib_info(*args): # Another ugly hack: the mathlib info is known once build_src is run, # but we cannot use add_installed_pkg_config here either, so we only # update the substitution dictionary during npymath build config_cmd = config.get_config_cmd() # Check that the toolchain works, to fail early if it doesn't # (avoid late errors with MATHLIB which are confusing if the # compiler does not work). st = config_cmd.try_link('int main(void) { return 0;}') if not st: # rerun the failing command in verbose mode config_cmd.compiler.verbose = True config_cmd.try_link('int main(void) { return 0;}') raise RuntimeError( "Broken toolchain: cannot link a simple C program") mlibs = check_mathlib(config_cmd) posix_mlib = ' '.join(['-l%s' % l for l in mlibs]) msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs]) subst_dict["posix_mathlib"] = posix_mlib subst_dict["msvc_mathlib"] = msvc_mlib npymath_sources = [ join('src', 'npymath', 'npy_math_internal.h.src'), join('src', 'npymath', 'npy_math.c'), join('src', 'npymath', 'ieee754.c.src'), join('src', 'npymath', 'npy_math_complex.c.src'), join('src', 'npymath', 'halffloat.c') ] # Must be true for CRT compilers but not MinGW/cygwin. See gh-9977. # Intel and Clang also don't seem happy with /GL is_msvc = (platform.platform().startswith('Windows') and platform.python_compiler().startswith('MS')) config.add_installed_library( 'npymath', sources=npymath_sources + [get_mathlib_info], install_dir='lib', build_info={ 'include_dirs': [], # empty list required for creating npy_math_internal.h 'extra_compiler_args': (['/GL-'] if is_msvc else []), }) config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config", subst_dict) config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config", subst_dict) ####################################################################### # npysort library # ####################################################################### # This library is created for the build but it is not installed npysort_sources = [ join('src', 'common', 'npy_sort.h.src'), join('src', 'npysort', 'quicksort.c.src'), join('src', 'npysort', 'mergesort.c.src'), join('src', 'npysort', 'timsort.c.src'), join('src', 'npysort', 'heapsort.c.src'), join('src', 'npysort', 'radixsort.c.src'), join('src', 'common', 'npy_partition.h.src'), join('src', 'npysort', 'selection.c.src'), join('src', 'common', 'npy_binsearch.h.src'), join('src', 'npysort', 'binsearch.c.src'), ] config.add_library('npysort', sources=npysort_sources, include_dirs=[]) ####################################################################### # multiarray_tests module # ####################################################################### config.add_extension('_multiarray_tests', sources=[ join('src', 'multiarray', '_multiarray_tests.c.src'), join('src', 'common', 'mem_overlap.c') ], depends=[ join('src', 'common', 'mem_overlap.h'), join('src', 'common', 'npy_extint128.h') ], libraries=['npymath']) ####################################################################### # _multiarray_umath module - common part # ####################################################################### common_deps = [ join('src', 'common', 'array_assign.h'), join('src', 'common', 'binop_override.h'), join('src', 'common', 'cblasfuncs.h'), join('src', 'common', 'lowlevel_strided_loops.h'), join('src', 'common', 'mem_overlap.h'), join('src', 'common', 'npy_cblas.h'), join('src', 'common', 'npy_config.h'), join('src', 'common', 'npy_ctypes.h'), join('src', 'common', 'npy_extint128.h'), join('src', 'common', 'npy_import.h'), join('src', 'common', 'npy_longdouble.h'), join('src', 'common', 'templ_common.h.src'), join('src', 'common', 'ucsnarrow.h'), join('src', 'common', 'ufunc_override.h'), join('src', 'common', 'umathmodule.h'), join('src', 'common', 'numpyos.h'), ] common_src = [ join('src', 'common', 'array_assign.c'), join('src', 'common', 'mem_overlap.c'), join('src', 'common', 'npy_longdouble.c'), join('src', 'common', 'templ_common.h.src'), join('src', 'common', 'ucsnarrow.c'), join('src', 'common', 'ufunc_override.c'), join('src', 'common', 'numpyos.c'), join('src', 'common', 'npy_cpu_features.c.src'), ] if os.environ.get('NPY_USE_BLAS_ILP64', "0") != "0": blas_info = get_info('blas_ilp64_opt', 2) else: blas_info = get_info('blas_opt', 0) have_blas = blas_info and ('HAVE_CBLAS', None) in blas_info.get( 'define_macros', []) if have_blas: extra_info = blas_info # These files are also in MANIFEST.in so that they are always in # the source distribution independently of HAVE_CBLAS. common_src.extend([ join('src', 'common', 'cblasfuncs.c'), join('src', 'common', 'python_xerbla.c'), ]) if uses_accelerate_framework(blas_info): common_src.extend(get_sgemv_fix()) else: extra_info = {} ####################################################################### # _multiarray_umath module - multiarray part # ####################################################################### multiarray_deps = [ join('src', 'multiarray', 'arrayobject.h'), join('src', 'multiarray', 'arraytypes.h'), join('src', 'multiarray', 'arrayfunction_override.h'), join('src', 'multiarray', 'npy_buffer.h'), join('src', 'multiarray', 'calculation.h'), join('src', 'multiarray', 'common.h'), join('src', 'multiarray', 'convert_datatype.h'), join('src', 'multiarray', 'convert.h'), join('src', 'multiarray', 'conversion_utils.h'), join('src', 'multiarray', 'ctors.h'), join('src', 'multiarray', 'descriptor.h'), join('src', 'multiarray', 'dragon4.h'), join('src', 'multiarray', 'getset.h'), join('src', 'multiarray', 'hashdescr.h'), join('src', 'multiarray', 'iterators.h'), join('src', 'multiarray', 'mapping.h'), join('src', 'multiarray', 'methods.h'), join('src', 'multiarray', 'multiarraymodule.h'), join('src', 'multiarray', 'nditer_impl.h'), join('src', 'multiarray', 'number.h'), join('src', 'multiarray', 'refcount.h'), join('src', 'multiarray', 'scalartypes.h'), join('src', 'multiarray', 'sequence.h'), join('src', 'multiarray', 'shape.h'), join('src', 'multiarray', 'strfuncs.h'), join('src', 'multiarray', 'typeinfo.h'), join('src', 'multiarray', 'usertypes.h'), join('src', 'multiarray', 'vdot.h'), join('include', 'numpy', 'arrayobject.h'), join('include', 'numpy', '_neighborhood_iterator_imp.h'), join('include', 'numpy', 'npy_endian.h'), join('include', 'numpy', 'arrayscalars.h'), join('include', 'numpy', 'noprefix.h'), join('include', 'numpy', 'npy_interrupt.h'), join('include', 'numpy', 'npy_3kcompat.h'), join('include', 'numpy', 'npy_math.h'), join('include', 'numpy', 'halffloat.h'), join('include', 'numpy', 'npy_common.h'), join('include', 'numpy', 'npy_os.h'), join('include', 'numpy', 'utils.h'), join('include', 'numpy', 'ndarrayobject.h'), join('include', 'numpy', 'npy_cpu.h'), join('include', 'numpy', 'numpyconfig.h'), join('include', 'numpy', 'ndarraytypes.h'), join('include', 'numpy', 'npy_1_7_deprecated_api.h'), # add library sources as distuils does not consider libraries # dependencies ] + npysort_sources + npymath_sources multiarray_src = [ join('src', 'multiarray', 'alloc.c'), join('src', 'multiarray', 'arrayobject.c'), join('src', 'multiarray', 'arraytypes.c.src'), join('src', 'multiarray', 'array_assign_scalar.c'), join('src', 'multiarray', 'array_assign_array.c'), join('src', 'multiarray', 'arrayfunction_override.c'), join('src', 'multiarray', 'buffer.c'), join('src', 'multiarray', 'calculation.c'), join('src', 'multiarray', 'compiled_base.c'), join('src', 'multiarray', 'common.c'), join('src', 'multiarray', 'convert.c'), join('src', 'multiarray', 'convert_datatype.c'), join('src', 'multiarray', 'conversion_utils.c'), join('src', 'multiarray', 'ctors.c'), join('src', 'multiarray', 'datetime.c'), join('src', 'multiarray', 'datetime_strings.c'), join('src', 'multiarray', 'datetime_busday.c'), join('src', 'multiarray', 'datetime_busdaycal.c'), join('src', 'multiarray', 'descriptor.c'), join('src', 'multiarray', 'dragon4.c'), join('src', 'multiarray', 'dtype_transfer.c'), join('src', 'multiarray', 'einsum.c.src'), join('src', 'multiarray', 'flagsobject.c'), join('src', 'multiarray', 'getset.c'), join('src', 'multiarray', 'hashdescr.c'), join('src', 'multiarray', 'item_selection.c'), join('src', 'multiarray', 'iterators.c'), join('src', 'multiarray', 'lowlevel_strided_loops.c.src'), join('src', 'multiarray', 'mapping.c'), join('src', 'multiarray', 'methods.c'), join('src', 'multiarray', 'multiarraymodule.c'), join('src', 'multiarray', 'nditer_templ.c.src'), join('src', 'multiarray', 'nditer_api.c'), join('src', 'multiarray', 'nditer_constr.c'), join('src', 'multiarray', 'nditer_pywrap.c'), join('src', 'multiarray', 'number.c'), join('src', 'multiarray', 'refcount.c'), join('src', 'multiarray', 'sequence.c'), join('src', 'multiarray', 'shape.c'), join('src', 'multiarray', 'scalarapi.c'), join('src', 'multiarray', 'scalartypes.c.src'), join('src', 'multiarray', 'strfuncs.c'), join('src', 'multiarray', 'temp_elide.c'), join('src', 'multiarray', 'typeinfo.c'), join('src', 'multiarray', 'usertypes.c'), join('src', 'multiarray', 'vdot.c'), ] ####################################################################### # _multiarray_umath module - umath part # ####################################################################### def generate_umath_c(ext, build_dir): target = join(build_dir, header_dir, '__umath_generated.c') dir = os.path.dirname(target) if not os.path.exists(dir): os.makedirs(dir) script = generate_umath_py if newer(script, target): with open(target, 'w') as f: f.write( generate_umath.make_code(generate_umath.defdict, generate_umath.__file__)) return [] umath_src = [ join('src', 'umath', 'umathmodule.c'), join('src', 'umath', 'reduction.c'), join('src', 'umath', 'funcs.inc.src'), join('src', 'umath', 'simd.inc.src'), join('src', 'umath', 'loops.h.src'), join('src', 'umath', 'loops.c.src'), join('src', 'umath', 'matmul.h.src'), join('src', 'umath', 'matmul.c.src'), join('src', 'umath', 'clip.h.src'), join('src', 'umath', 'clip.c.src'), join('src', 'umath', 'ufunc_object.c'), join('src', 'umath', 'extobj.c'), join('src', 'umath', 'scalarmath.c.src'), join('src', 'umath', 'ufunc_type_resolution.c'), join('src', 'umath', 'override.c'), ] umath_deps = [ generate_umath_py, join('include', 'numpy', 'npy_math.h'), join('include', 'numpy', 'halffloat.h'), join('src', 'multiarray', 'common.h'), join('src', 'multiarray', 'number.h'), join('src', 'common', 'templ_common.h.src'), join('src', 'umath', 'simd.inc.src'), join('src', 'umath', 'override.h'), join(codegen_dir, 'generate_ufunc_api.py'), ] config.add_extension( '_multiarray_umath', sources=multiarray_src + umath_src + npymath_sources + common_src + [ generate_config_h, generate_numpyconfig_h, generate_numpy_api, join(codegen_dir, 'generate_numpy_api.py'), join('*.py'), generate_umath_c, generate_ufunc_api, ], depends=deps + multiarray_deps + umath_deps + common_deps, libraries=['npymath', 'npysort'], extra_info=extra_info) ####################################################################### # umath_tests module # ####################################################################### config.add_extension('_umath_tests', sources=[join('src', 'umath', '_umath_tests.c.src')]) ####################################################################### # custom rational dtype module # ####################################################################### config.add_extension( '_rational_tests', sources=[join('src', 'umath', '_rational_tests.c.src')]) ####################################################################### # struct_ufunc_test module # ####################################################################### config.add_extension( '_struct_ufunc_tests', sources=[join('src', 'umath', '_struct_ufunc_tests.c.src')]) ####################################################################### # operand_flag_tests module # ####################################################################### config.add_extension( '_operand_flag_tests', sources=[join('src', 'umath', '_operand_flag_tests.c.src')]) config.add_subpackage('tests') config.add_data_dir('tests/data') config.make_svn_version_py() return config
from distutils import ccompiler compiler = ccompiler.new_compiler() compiler.set_include_dirs([ os.environ['BOOST_ROOT'], os.environ['EIGEN_ROOT'], "pysim/cppsource", ]) cpplibdir = self.distutils_dir_name() compiler.set_library_dirs([cpplibdir]) compiler.set_libraries(["cppsystemlib"]) compiler.compile(["msvc/cpp_runner/main.cpp"]) compiler.link_executable(["msvc/cpp_runner/main.obj"], "cpp_runner") config = Configuration() config.add_include_dirs(['pysim/cppsource', numpy.get_include()]) extracompileargs = [] if sys.platform == "win32": config.add_include_dirs( [os.environ['BOOST_ROOT'], os.environ['EIGEN_ROOT']]) elif sys.platform in ("linux", "darwin"): extracompileargs.append("-std=c++11") extracompileargs.append("-std=c++14") extracompileargs.append("-I/usr/include/eigen3") config.add_installed_library( "cppsystemlib", [ 'pysim/cppsource/CppSystem.cpp',
def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration, dot_join from numpy.distutils.system_info import get_info, default_lib_dirs config = Configuration("core", parent_package, top_path) local_dir = config.local_path codegen_dir = join(local_dir, "code_generators") if is_released(config): warnings.simplefilter("error", MismatchCAPIWarning) # Check whether we have a mismatch between the set C API VERSION and the # actual C API VERSION check_api_version(C_API_VERSION, codegen_dir) generate_umath_py = join(codegen_dir, "generate_umath.py") n = dot_join(config.name, "generate_umath") generate_umath = imp.load_module( "_".join(n.split(".")), open(generate_umath_py, "U"), generate_umath_py, (".py", "U", 1) ) header_dir = "include/numpy" # this is relative to config.path_in_package cocache = CallOnceOnly() def generate_config_h(ext, build_dir): target = join(build_dir, header_dir, "config.h") d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info("Generating %s", target) # Check sizeof moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir) # Check math library and C99 math funcs availability mathlibs = check_mathlib(config_cmd) moredefs.append(("MATHLIB", ",".join(mathlibs))) check_math_capabilities(config_cmd, moredefs, mathlibs) moredefs.extend(cocache.check_ieee_macros(config_cmd)[0]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0]) # Signal check if is_npy_no_signal(): moredefs.append("__NPY_PRIVATE_NO_SIGNAL") # Windows checks if sys.platform == "win32" or os.name == "nt": win32_checks(moredefs) # Inline check inline = config_cmd.check_inline() # Check whether we need our own wide character support if not config_cmd.check_decl("Py_UNICODE_WIDE", headers=["Python.h"]): PYTHON_HAS_UNICODE_WIDE = True else: PYTHON_HAS_UNICODE_WIDE = False if ENABLE_SEPARATE_COMPILATION: moredefs.append(("ENABLE_SEPARATE_COMPILATION", 1)) if NPY_RELAXED_STRIDES_CHECKING: moredefs.append(("NPY_RELAXED_STRIDES_CHECKING", 1)) # Get long double representation if sys.platform != "darwin": rep = check_long_double_representation(config_cmd) if rep in [ "INTEL_EXTENDED_12_BYTES_LE", "INTEL_EXTENDED_16_BYTES_LE", "MOTOROLA_EXTENDED_12_BYTES_BE", "IEEE_QUAD_LE", "IEEE_QUAD_BE", "IEEE_DOUBLE_LE", "IEEE_DOUBLE_BE", "DOUBLE_DOUBLE_BE", "DOUBLE_DOUBLE_LE", ]: moredefs.append(("HAVE_LDOUBLE_%s" % rep, 1)) else: raise ValueError("Unrecognized long double format: %s" % rep) # Py3K check if sys.version_info[0] == 3: moredefs.append(("NPY_PY3K", 1)) # Generate the config.h file from moredefs target_f = open(target, "w") for d in moredefs: if isinstance(d, str): target_f.write("#define %s\n" % (d)) else: target_f.write("#define %s %s\n" % (d[0], d[1])) # define inline to our keyword, or nothing target_f.write("#ifndef __cplusplus\n") if inline == "inline": target_f.write("/* #undef inline */\n") else: target_f.write("#define inline %s\n" % inline) target_f.write("#endif\n") # add the guard to make sure config.h is never included directly, # but always through npy_config.h target_f.write( """ #ifndef _NPY_NPY_CONFIG_H_ #error config.h should never be included directly, include npy_config.h instead #endif """ ) target_f.close() print("File:", target) target_f = open(target) print(target_f.read()) target_f.close() print("EOF") else: mathlibs = [] target_f = open(target) for line in target_f: s = "#define MATHLIB" if line.startswith(s): value = line[len(s) :].strip() if value: mathlibs.extend(value.split(",")) target_f.close() # Ugly: this can be called within a library and not an extension, # in which case there is no libraries attributes (and none is # needed). if hasattr(ext, "libraries"): ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) return target def generate_numpyconfig_h(ext, build_dir): """Depends on config.h: generate_config_h has to be called before !""" # put private include directory in build_dir on search path # allows using code generation in headers headers config.add_include_dirs(join(build_dir, "src", "private")) target = join(build_dir, header_dir, "_numpyconfig.h") d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info("Generating %s", target) # Check sizeof ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir) if is_npy_no_signal(): moredefs.append(("NPY_NO_SIGNAL", 1)) if is_npy_no_smp(): moredefs.append(("NPY_NO_SMP", 1)) else: moredefs.append(("NPY_NO_SMP", 0)) mathlibs = check_mathlib(config_cmd) moredefs.extend(cocache.check_ieee_macros(config_cmd)[1]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1]) if ENABLE_SEPARATE_COMPILATION: moredefs.append(("NPY_ENABLE_SEPARATE_COMPILATION", 1)) if NPY_RELAXED_STRIDES_CHECKING: moredefs.append(("NPY_RELAXED_STRIDES_CHECKING", 1)) # Check wether we can use inttypes (C99) formats if config_cmd.check_decl("PRIdPTR", headers=["inttypes.h"]): moredefs.append(("NPY_USE_C99_FORMATS", 1)) # visibility check hidden_visibility = visibility_define(config_cmd) moredefs.append(("NPY_VISIBILITY_HIDDEN", hidden_visibility)) # Add the C API/ABI versions moredefs.append(("NPY_ABI_VERSION", "0x%.8X" % C_ABI_VERSION)) moredefs.append(("NPY_API_VERSION", "0x%.8X" % C_API_VERSION)) # Add moredefs to header target_f = open(target, "w") for d in moredefs: if isinstance(d, str): target_f.write("#define %s\n" % (d)) else: target_f.write("#define %s %s\n" % (d[0], d[1])) # Define __STDC_FORMAT_MACROS target_f.write( """ #ifndef __STDC_FORMAT_MACROS #define __STDC_FORMAT_MACROS 1 #endif """ ) target_f.close() # Dump the numpyconfig.h header to stdout print("File: %s" % target) target_f = open(target) print(target_f.read()) target_f.close() print("EOF") config.add_data_files((header_dir, target)) return target def generate_api_func(module_name): def generate_api(ext, build_dir): script = join(codegen_dir, module_name + ".py") sys.path.insert(0, codegen_dir) try: m = __import__(module_name) log.info("executing %s", script) h_file, c_file, doc_file = m.generate_api(os.path.join(build_dir, header_dir)) finally: del sys.path[0] config.add_data_files((header_dir, h_file), (header_dir, doc_file)) return (h_file,) return generate_api generate_numpy_api = generate_api_func("generate_numpy_api") generate_ufunc_api = generate_api_func("generate_ufunc_api") config.add_include_dirs(join(local_dir, "src", "private")) config.add_include_dirs(join(local_dir, "src")) config.add_include_dirs(join(local_dir)) config.add_data_files("include/numpy/*.h") config.add_include_dirs(join("src", "npymath")) config.add_include_dirs(join("src", "multiarray")) config.add_include_dirs(join("src", "umath")) config.add_include_dirs(join("src", "npysort")) config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")]) config.add_define_macros([("_FILE_OFFSET_BITS", "64")]) config.add_define_macros([("_LARGEFILE_SOURCE", "1")]) config.add_define_macros([("_LARGEFILE64_SOURCE", "1")]) config.numpy_include_dirs.extend(config.paths("include")) deps = [ join("src", "npymath", "_signbit.c"), join("include", "numpy", "*object.h"), "include/numpy/fenv/fenv.c", "include/numpy/fenv/fenv.h", join(codegen_dir, "genapi.py"), ] # Don't install fenv unless we need them. if sys.platform == "cygwin": config.add_data_dir("include/numpy/fenv") ####################################################################### # dummy module # ####################################################################### # npymath needs the config.h and numpyconfig.h files to be generated, but # build_clib cannot handle generate_config_h and generate_numpyconfig_h # (don't ask). Because clib are generated before extensions, we have to # explicitly add an extension which has generate_config_h and # generate_numpyconfig_h as sources *before* adding npymath. config.add_extension( "_dummy", sources=[join("src", "dummymodule.c"), generate_config_h, generate_numpyconfig_h, generate_numpy_api] ) ####################################################################### # npymath library # ####################################################################### subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")]) def get_mathlib_info(*args): # Another ugly hack: the mathlib info is known once build_src is run, # but we cannot use add_installed_pkg_config here either, so we only # update the substition dictionary during npymath build config_cmd = config.get_config_cmd() # Check that the toolchain works, to fail early if it doesn't # (avoid late errors with MATHLIB which are confusing if the # compiler does not work). st = config_cmd.try_link("int main(void) { return 0;}") if not st: raise RuntimeError("Broken toolchain: cannot link a simple C program") mlibs = check_mathlib(config_cmd) posix_mlib = " ".join(["-l%s" % l for l in mlibs]) msvc_mlib = " ".join(["%s.lib" % l for l in mlibs]) subst_dict["posix_mathlib"] = posix_mlib subst_dict["msvc_mathlib"] = msvc_mlib npymath_sources = [ join("src", "npymath", "npy_math.c.src"), join("src", "npymath", "ieee754.c.src"), join("src", "npymath", "npy_math_complex.c.src"), join("src", "npymath", "halffloat.c"), ] config.add_installed_library("npymath", sources=npymath_sources + [get_mathlib_info], install_dir="lib") config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config", subst_dict) config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config", subst_dict) ####################################################################### # npysort library # ####################################################################### # This library is created for the build but it is not installed npysort_sources = [ join("src", "npysort", "quicksort.c.src"), join("src", "npysort", "mergesort.c.src"), join("src", "npysort", "heapsort.c.src"), join("src", "private", "npy_partition.h.src"), join("src", "npysort", "selection.c.src"), join("src", "private", "npy_binsearch.h.src"), join("src", "npysort", "binsearch.c.src"), ] config.add_library("npysort", sources=npysort_sources, include_dirs=[]) ####################################################################### # multiarray module # ####################################################################### # Multiarray version: this function is needed to build foo.c from foo.c.src # when foo.c is included in another file and as such not in the src # argument of build_ext command def generate_multiarray_templated_sources(ext, build_dir): from numpy.distutils.misc_util import get_cmd subpath = join("src", "multiarray") sources = [ join(local_dir, subpath, "scalartypes.c.src"), join(local_dir, subpath, "arraytypes.c.src"), join(local_dir, subpath, "nditer_templ.c.src"), join(local_dir, subpath, "lowlevel_strided_loops.c.src"), join(local_dir, subpath, "einsum.c.src"), ] # numpy.distutils generate .c from .c.src in weird directories, we have # to add them there as they depend on the build_dir config.add_include_dirs(join(build_dir, subpath)) cmd = get_cmd("build_src") cmd.ensure_finalized() cmd.template_sources(sources, ext) multiarray_deps = ( [ join("src", "multiarray", "arrayobject.h"), join("src", "multiarray", "arraytypes.h"), join("src", "multiarray", "array_assign.h"), join("src", "multiarray", "buffer.h"), join("src", "multiarray", "calculation.h"), join("src", "multiarray", "common.h"), join("src", "multiarray", "convert_datatype.h"), join("src", "multiarray", "convert.h"), join("src", "multiarray", "conversion_utils.h"), join("src", "multiarray", "ctors.h"), join("src", "multiarray", "descriptor.h"), join("src", "multiarray", "getset.h"), join("src", "multiarray", "hashdescr.h"), join("src", "multiarray", "iterators.h"), join("src", "multiarray", "mapping.h"), join("src", "multiarray", "methods.h"), join("src", "multiarray", "multiarraymodule.h"), join("src", "multiarray", "nditer_impl.h"), join("src", "multiarray", "numpymemoryview.h"), join("src", "multiarray", "number.h"), join("src", "multiarray", "numpyos.h"), join("src", "multiarray", "refcount.h"), join("src", "multiarray", "scalartypes.h"), join("src", "multiarray", "sequence.h"), join("src", "multiarray", "shape.h"), join("src", "multiarray", "ucsnarrow.h"), join("src", "multiarray", "usertypes.h"), join("src", "private", "lowlevel_strided_loops.h"), join("include", "numpy", "arrayobject.h"), join("include", "numpy", "_neighborhood_iterator_imp.h"), join("include", "numpy", "npy_endian.h"), join("include", "numpy", "arrayscalars.h"), join("include", "numpy", "noprefix.h"), join("include", "numpy", "npy_interrupt.h"), join("include", "numpy", "npy_3kcompat.h"), join("include", "numpy", "npy_math.h"), join("include", "numpy", "halffloat.h"), join("include", "numpy", "npy_common.h"), join("include", "numpy", "npy_os.h"), join("include", "numpy", "utils.h"), join("include", "numpy", "ndarrayobject.h"), join("include", "numpy", "npy_cpu.h"), join("include", "numpy", "numpyconfig.h"), join("include", "numpy", "ndarraytypes.h"), join("include", "numpy", "npy_1_7_deprecated_api.h"), join("include", "numpy", "_numpyconfig.h.in"), # add library sources as distuils does not consider libraries # dependencies ] + npysort_sources + npymath_sources ) multiarray_src = [ join("src", "multiarray", "alloc.c"), join("src", "multiarray", "arrayobject.c"), join("src", "multiarray", "arraytypes.c.src"), join("src", "multiarray", "array_assign.c"), join("src", "multiarray", "array_assign_scalar.c"), join("src", "multiarray", "array_assign_array.c"), join("src", "multiarray", "buffer.c"), join("src", "multiarray", "calculation.c"), join("src", "multiarray", "common.c"), join("src", "multiarray", "convert.c"), join("src", "multiarray", "convert_datatype.c"), join("src", "multiarray", "conversion_utils.c"), join("src", "multiarray", "ctors.c"), join("src", "multiarray", "datetime.c"), join("src", "multiarray", "datetime_strings.c"), join("src", "multiarray", "datetime_busday.c"), join("src", "multiarray", "datetime_busdaycal.c"), join("src", "multiarray", "descriptor.c"), join("src", "multiarray", "dtype_transfer.c"), join("src", "multiarray", "einsum.c.src"), join("src", "multiarray", "flagsobject.c"), join("src", "multiarray", "getset.c"), join("src", "multiarray", "hashdescr.c"), join("src", "multiarray", "item_selection.c"), join("src", "multiarray", "iterators.c"), join("src", "multiarray", "lowlevel_strided_loops.c.src"), join("src", "multiarray", "mapping.c"), join("src", "multiarray", "methods.c"), join("src", "multiarray", "multiarraymodule.c"), join("src", "multiarray", "nditer_templ.c.src"), join("src", "multiarray", "nditer_api.c"), join("src", "multiarray", "nditer_constr.c"), join("src", "multiarray", "nditer_pywrap.c"), join("src", "multiarray", "number.c"), join("src", "multiarray", "numpymemoryview.c"), join("src", "multiarray", "numpyos.c"), join("src", "multiarray", "refcount.c"), join("src", "multiarray", "sequence.c"), join("src", "multiarray", "shape.c"), join("src", "multiarray", "scalarapi.c"), join("src", "multiarray", "scalartypes.c.src"), join("src", "multiarray", "usertypes.c"), join("src", "multiarray", "ucsnarrow.c"), ] if not ENABLE_SEPARATE_COMPILATION: multiarray_deps.extend(multiarray_src) multiarray_src = [join("src", "multiarray", "multiarraymodule_onefile.c")] multiarray_src.append(generate_multiarray_templated_sources) config.add_extension( "multiarray", sources=multiarray_src + [ generate_config_h, generate_numpyconfig_h, generate_numpy_api, join(codegen_dir, "generate_numpy_api.py"), join("*.py"), ], depends=deps + multiarray_deps, libraries=["npymath", "npysort"], ) ####################################################################### # umath module # ####################################################################### # umath version: this function is needed to build foo.c from foo.c.src # when foo.c is included in another file and as such not in the src # argument of build_ext command def generate_umath_templated_sources(ext, build_dir): from numpy.distutils.misc_util import get_cmd subpath = join("src", "umath") # NOTE: For manual template conversion of loops.h.src, read the note # in that file. sources = [join(local_dir, subpath, "loops.c.src"), join(local_dir, subpath, "simd.inc.src")] # numpy.distutils generate .c from .c.src in weird directories, we have # to add them there as they depend on the build_dir config.add_include_dirs(join(build_dir, subpath)) cmd = get_cmd("build_src") cmd.ensure_finalized() cmd.template_sources(sources, ext) def generate_umath_c(ext, build_dir): target = join(build_dir, header_dir, "__umath_generated.c") dir = os.path.dirname(target) if not os.path.exists(dir): os.makedirs(dir) script = generate_umath_py if newer(script, target): f = open(target, "w") f.write(generate_umath.make_code(generate_umath.defdict, generate_umath.__file__)) f.close() return [] umath_src = [ join("src", "umath", "umathmodule.c"), join("src", "umath", "reduction.c"), join("src", "umath", "funcs.inc.src"), join("src", "umath", "simd.inc.src"), join("src", "umath", "loops.c.src"), join("src", "umath", "ufunc_object.c"), join("src", "umath", "ufunc_type_resolution.c"), ] umath_deps = [ generate_umath_py, join("src", "multiarray", "common.h"), join("src", "umath", "simd.inc.src"), join(codegen_dir, "generate_ufunc_api.py"), join("src", "private", "ufunc_override.h"), ] + npymath_sources if not ENABLE_SEPARATE_COMPILATION: umath_deps.extend(umath_src) umath_src = [join("src", "umath", "umathmodule_onefile.c")] umath_src.append(generate_umath_templated_sources) umath_src.append(join("src", "umath", "funcs.inc.src")) umath_src.append(join("src", "umath", "simd.inc.src")) config.add_extension( "umath", sources=umath_src + [generate_config_h, generate_numpyconfig_h, generate_umath_c, generate_ufunc_api], depends=deps + umath_deps, libraries=["npymath"], ) ####################################################################### # scalarmath module # ####################################################################### config.add_extension( "scalarmath", sources=[ join("src", "scalarmathmodule.c.src"), join("src", "private", "scalarmathmodule.h.src"), generate_config_h, generate_numpyconfig_h, generate_numpy_api, generate_ufunc_api, ], depends=deps + npymath_sources, libraries=["npymath"], ) ####################################################################### # _dotblas module # ####################################################################### # Configure blasdot blas_info = get_info("blas_opt", 0) # blas_info = {} def get_dotblas_sources(ext, build_dir): if blas_info: if ("NO_ATLAS_INFO", 1) in blas_info.get("define_macros", []): return None # dotblas needs ATLAS, Fortran compiled blas will not be sufficient. return ext.depends[:1] return None # no extension module will be built config.add_extension( "_dotblas", sources=[get_dotblas_sources], depends=[join("blasdot", "_dotblas.c"), join("blasdot", "cblas.h")], include_dirs=["blasdot"], extra_info=blas_info, ) ####################################################################### # umath_tests module # ####################################################################### config.add_extension("umath_tests", sources=[join("src", "umath", "umath_tests.c.src")]) ####################################################################### # custom rational dtype module # ####################################################################### config.add_extension("test_rational", sources=[join("src", "umath", "test_rational.c.src")]) ####################################################################### # struct_ufunc_test module # ####################################################################### config.add_extension("struct_ufunc_test", sources=[join("src", "umath", "struct_ufunc_test.c.src")]) ####################################################################### # multiarray_tests module # ####################################################################### config.add_extension("multiarray_tests", sources=[join("src", "multiarray", "multiarray_tests.c.src")]) ####################################################################### # operand_flag_tests module # ####################################################################### config.add_extension("operand_flag_tests", sources=[join("src", "umath", "operand_flag_tests.c.src")]) config.add_data_dir("tests") config.add_data_dir("tests/data") config.make_svn_version_py() return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from numpy.distutils.system_info import get_info config = Configuration('labs', parent_package, top_path) # cstat library config.add_include_dirs(os.path.join(LIBS,'fff')) config.add_include_dirs(os.path.join(LIBS,'wrapper')) config.add_include_dirs(get_numpy_include_dirs()) sources = [os.path.join(LIBS,'fff','*.c')] sources.append(os.path.join(LIBS,'wrapper','*.c')) # Link with lapack if found on the system # XXX: We need to better sort out the use of get_info() for Lapack, because # using 'lapack' and 'lapack_opt' returns different things even comparing # Ubuntu 8.10 machines on 32 vs 64 bit setups. On OSX # get_info('lapack_opt') does not return the keys: 'libraries' and # 'library_dirs', but get_info('lapack') does. # # For now this code should do the right thing on OSX and linux, but we # should ask on the numpy list for clarification on the proper approach. # XXX: If you modify these lines, remember to pass the information # along to the different .so in the neurospin build system. # First, try 'lapack_info', as that seems to provide more details on Linux # (both 32 and 64 bits): lapack_info = get_info('lapack_opt', 0) if 'libraries' not in lapack_info: # But on OSX that may not give us what we need, so try with 'lapack' # instead. NOTE: scipy.linalg uses lapack_opt, not 'lapack'... lapack_info = get_info('lapack', 0) # If no lapack install is found, we use the rescue lapack lite # distribution included in the package (sources have been # translated to C using f2c) if not lapack_info: log.warn('No lapack installation found, using lapack lite distribution') sources.append(os.path.join(LIBS,'lapack_lite','*.c')) library_dirs = [] libraries = [] # Best-case scenario: lapack found else: library_dirs = lapack_info['library_dirs'] libraries = lapack_info['libraries'] if 'include_dirs' in lapack_info: config.add_include_dirs(lapack_info['include_dirs']) # Information message print('LAPACK build options:') print('library_dirs: %s ' % library_dirs) print('libraries: %s ' % libraries) print('lapack_info: %s ' % lapack_info) config.add_library('cstat', sources=sources, library_dirs=library_dirs, libraries=libraries, extra_info=lapack_info) # Subpackages config.add_subpackage('bindings') config.add_subpackage('glm') config.add_subpackage('group') config.add_subpackage('spatial_models') config.add_subpackage('utils') config.add_subpackage('viz_tools') config.add_subpackage('datasets') config.add_subpackage('tests') config.make_config_py() # installs __config__.py return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, dot_join from numpy.distutils.system_info import get_info config = Configuration('core', parent_package, top_path) local_dir = config.local_path codegen_dir = join(local_dir, 'code_generators') if is_released(config): warnings.simplefilter('error', MismatchCAPIWarning) # Check whether we have a mismatch between the set C API VERSION and the # actual C API VERSION check_api_version(C_API_VERSION, codegen_dir) generate_umath_py = join(codegen_dir, 'generate_umath.py') n = dot_join(config.name, 'generate_umath') generate_umath = imp.load_module('_'.join(n.split('.')), open(generate_umath_py, 'U'), generate_umath_py, ('.py', 'U', 1)) header_dir = 'include/numpy' # this is relative to config.path_in_package cocache = CallOnceOnly() def generate_config_h(ext, build_dir): target = join(build_dir, header_dir, 'config.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir) # Check math library and C99 math funcs availability mathlibs = check_mathlib(config_cmd) moredefs.append(('MATHLIB', ','.join(mathlibs))) check_math_capabilities(config_cmd, moredefs, mathlibs) moredefs.extend(cocache.check_ieee_macros(config_cmd)[0]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0]) # Signal check if is_npy_no_signal(): moredefs.append('__NPY_PRIVATE_NO_SIGNAL') # Windows checks if sys.platform == 'win32' or os.name == 'nt': win32_checks(moredefs) # C99 restrict keyword moredefs.append(('NPY_RESTRICT', config_cmd.check_restrict())) # Inline check inline = config_cmd.check_inline() # Check whether we need our own wide character support if not config_cmd.check_decl('Py_UNICODE_WIDE', headers=['Python.h']): PYTHON_HAS_UNICODE_WIDE = True else: PYTHON_HAS_UNICODE_WIDE = False if NPY_RELAXED_STRIDES_CHECKING: moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1)) # Get long double representation if sys.platform != 'darwin': rep = check_long_double_representation(config_cmd) if rep in [ 'INTEL_EXTENDED_12_BYTES_LE', 'INTEL_EXTENDED_16_BYTES_LE', 'MOTOROLA_EXTENDED_12_BYTES_BE', 'IEEE_QUAD_LE', 'IEEE_QUAD_BE', 'IEEE_DOUBLE_LE', 'IEEE_DOUBLE_BE', 'DOUBLE_DOUBLE_BE', 'DOUBLE_DOUBLE_LE' ]: moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1)) else: raise ValueError("Unrecognized long double format: %s" % rep) # Py3K check if sys.version_info[0] == 3: moredefs.append(('NPY_PY3K', 1)) # Generate the config.h file from moredefs target_f = open(target, 'w') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # define inline to our keyword, or nothing target_f.write('#ifndef __cplusplus\n') if inline == 'inline': target_f.write('/* #undef inline */\n') else: target_f.write('#define inline %s\n' % inline) target_f.write('#endif\n') # add the guard to make sure config.h is never included directly, # but always through npy_config.h target_f.write(""" #ifndef _NPY_NPY_CONFIG_H_ #error config.h should never be included directly, include npy_config.h instead #endif """) target_f.close() print('File:', target) target_f = open(target) print(target_f.read()) target_f.close() print('EOF') else: mathlibs = [] target_f = open(target) for line in target_f: s = '#define MATHLIB' if line.startswith(s): value = line[len(s):].strip() if value: mathlibs.extend(value.split(',')) target_f.close() # Ugly: this can be called within a library and not an extension, # in which case there is no libraries attributes (and none is # needed). if hasattr(ext, 'libraries'): ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) return target def generate_numpyconfig_h(ext, build_dir): """Depends on config.h: generate_config_h has to be called before !""" # put private include directory in build_dir on search path # allows using code generation in headers headers config.add_include_dirs(join(build_dir, "src", "private")) target = join(build_dir, header_dir, '_numpyconfig.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir) if is_npy_no_signal(): moredefs.append(('NPY_NO_SIGNAL', 1)) if is_npy_no_smp(): moredefs.append(('NPY_NO_SMP', 1)) else: moredefs.append(('NPY_NO_SMP', 0)) mathlibs = check_mathlib(config_cmd) moredefs.extend(cocache.check_ieee_macros(config_cmd)[1]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1]) if NPY_RELAXED_STRIDES_CHECKING: moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1)) # Check wether we can use inttypes (C99) formats if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']): moredefs.append(('NPY_USE_C99_FORMATS', 1)) # visibility check hidden_visibility = visibility_define(config_cmd) moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility)) # Add the C API/ABI versions moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION)) moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION)) # Add moredefs to header target_f = open(target, 'w') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # Define __STDC_FORMAT_MACROS target_f.write(""" #ifndef __STDC_FORMAT_MACROS #define __STDC_FORMAT_MACROS 1 #endif """) target_f.close() # Dump the numpyconfig.h header to stdout print('File: %s' % target) target_f = open(target) print(target_f.read()) target_f.close() print('EOF') config.add_data_files((header_dir, target)) return target def generate_api_func(module_name): def generate_api(ext, build_dir): script = join(codegen_dir, module_name + '.py') sys.path.insert(0, codegen_dir) try: m = __import__(module_name) log.info('executing %s', script) h_file, c_file, doc_file = m.generate_api( os.path.join(build_dir, header_dir)) finally: del sys.path[0] config.add_data_files((header_dir, h_file), (header_dir, doc_file)) return (h_file, ) return generate_api generate_numpy_api = generate_api_func('generate_numpy_api') generate_ufunc_api = generate_api_func('generate_ufunc_api') config.add_include_dirs(join(local_dir, "src", "private")) config.add_include_dirs(join(local_dir, "src")) config.add_include_dirs(join(local_dir)) config.add_data_files('include/numpy/*.h') config.add_include_dirs(join('src', 'npymath')) config.add_include_dirs(join('src', 'multiarray')) config.add_include_dirs(join('src', 'umath')) config.add_include_dirs(join('src', 'npysort')) config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")]) config.add_define_macros([("_FILE_OFFSET_BITS", "64")]) config.add_define_macros([('_LARGEFILE_SOURCE', '1')]) config.add_define_macros([('_LARGEFILE64_SOURCE', '1')]) config.numpy_include_dirs.extend(config.paths('include')) deps = [ join('src', 'npymath', '_signbit.c'), join('include', 'numpy', '*object.h'), join(codegen_dir, 'genapi.py'), ] ####################################################################### # dummy module # ####################################################################### # npymath needs the config.h and numpyconfig.h files to be generated, but # build_clib cannot handle generate_config_h and generate_numpyconfig_h # (don't ask). Because clib are generated before extensions, we have to # explicitly add an extension which has generate_config_h and # generate_numpyconfig_h as sources *before* adding npymath. config.add_extension('_dummy', sources=[ join('src', 'dummymodule.c'), generate_config_h, generate_numpyconfig_h, generate_numpy_api ]) ####################################################################### # npymath library # ####################################################################### subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")]) def get_mathlib_info(*args): # Another ugly hack: the mathlib info is known once build_src is run, # but we cannot use add_installed_pkg_config here either, so we only # update the substition dictionary during npymath build config_cmd = config.get_config_cmd() # Check that the toolchain works, to fail early if it doesn't # (avoid late errors with MATHLIB which are confusing if the # compiler does not work). st = config_cmd.try_link('int main(void) { return 0;}') if not st: raise RuntimeError( "Broken toolchain: cannot link a simple C program") mlibs = check_mathlib(config_cmd) posix_mlib = ' '.join(['-l%s' % l for l in mlibs]) msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs]) subst_dict["posix_mathlib"] = posix_mlib subst_dict["msvc_mathlib"] = msvc_mlib npymath_sources = [ join('src', 'npymath', 'npy_math.c.src'), join('src', 'npymath', 'ieee754.c.src'), join('src', 'npymath', 'npy_math_complex.c.src'), join('src', 'npymath', 'halffloat.c') ] config.add_installed_library('npymath', sources=npymath_sources + [get_mathlib_info], install_dir='lib') config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config", subst_dict) config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config", subst_dict) ####################################################################### # npysort library # ####################################################################### # This library is created for the build but it is not installed npysort_sources = [ join('src', 'npysort', 'quicksort.c.src'), join('src', 'npysort', 'mergesort.c.src'), join('src', 'npysort', 'heapsort.c.src'), join('src', 'private', 'npy_partition.h.src'), join('src', 'npysort', 'selection.c.src'), join('src', 'private', 'npy_binsearch.h.src'), join('src', 'npysort', 'binsearch.c.src'), ] config.add_library('npysort', sources=npysort_sources, include_dirs=[]) ####################################################################### # multiarray module # ####################################################################### # Multiarray version: this function is needed to build foo.c from foo.c.src # when foo.c is included in another file and as such not in the src # argument of build_ext command def generate_multiarray_templated_sources(ext, build_dir): from numpy.distutils.misc_util import get_cmd subpath = join('src', 'multiarray') sources = [ join(local_dir, subpath, 'scalartypes.c.src'), join(local_dir, subpath, 'arraytypes.c.src'), join(local_dir, subpath, 'nditer_templ.c.src'), join(local_dir, subpath, 'lowlevel_strided_loops.c.src'), join(local_dir, subpath, 'einsum.c.src'), join(local_dir, 'src', 'private', 'templ_common.h.src') ] # numpy.distutils generate .c from .c.src in weird directories, we have # to add them there as they depend on the build_dir config.add_include_dirs(join(build_dir, subpath)) cmd = get_cmd('build_src') cmd.ensure_finalized() cmd.template_sources(sources, ext) multiarray_deps = [ join('src', 'multiarray', 'arrayobject.h'), join('src', 'multiarray', 'arraytypes.h'), join('src', 'multiarray', 'array_assign.h'), join('src', 'multiarray', 'buffer.h'), join('src', 'multiarray', 'calculation.h'), join('src', 'multiarray', 'cblasfuncs.h'), join('src', 'multiarray', 'common.h'), join('src', 'multiarray', 'convert_datatype.h'), join('src', 'multiarray', 'convert.h'), join('src', 'multiarray', 'conversion_utils.h'), join('src', 'multiarray', 'ctors.h'), join('src', 'multiarray', 'deferredarray.c'), join('src', 'multiarray', 'descriptor.h'), join('src', 'multiarray', 'getset.h'), join('src', 'multiarray', 'hashdescr.h'), join('src', 'multiarray', 'iterators.h'), join('src', 'multiarray', 'mapping.h'), join('src', 'multiarray', 'methods.h'), join('src', 'multiarray', 'multiarraymodule.h'), join('src', 'multiarray', 'nditer_impl.h'), join('src', 'multiarray', 'numpymemoryview.h'), join('src', 'multiarray', 'number.h'), join('src', 'multiarray', 'numpyos.h'), join('src', 'multiarray', 'refcount.h'), join('src', 'multiarray', 'scalartypes.h'), join('src', 'multiarray', 'sequence.h'), join('src', 'multiarray', 'shape.h'), join('src', 'multiarray', 'ucsnarrow.h'), join('src', 'multiarray', 'usertypes.h'), join('src', 'multiarray', 'vdot.h'), join('src', 'private', 'npy_config.h'), join('src', 'private', 'templ_common.h.src'), join('src', 'private', 'lowlevel_strided_loops.h'), join('src', 'private', 'mem_overlap.h'), join('src', 'private', 'npy_extint128.h'), join('include', 'numpy', 'arrayobject.h'), join('include', 'numpy', '_neighborhood_iterator_imp.h'), join('include', 'numpy', 'deferredarray.h'), join('include', 'numpy', 'npy_endian.h'), join('include', 'numpy', 'arrayscalars.h'), join('include', 'numpy', 'noprefix.h'), join('include', 'numpy', 'npy_interrupt.h'), join('include', 'numpy', 'npy_3kcompat.h'), join('include', 'numpy', 'npy_math.h'), join('include', 'numpy', 'halffloat.h'), join('include', 'numpy', 'npy_common.h'), join('include', 'numpy', 'npy_os.h'), join('include', 'numpy', 'utils.h'), join('include', 'numpy', 'ndarrayobject.h'), join('include', 'numpy', 'npy_cpu.h'), join('include', 'numpy', 'numpyconfig.h'), join('include', 'numpy', 'ndarraytypes.h'), join('include', 'numpy', 'npy_1_7_deprecated_api.h'), join('include', 'numpy', '_numpyconfig.h.in'), # add library sources as distuils does not consider libraries # dependencies ] + npysort_sources + npymath_sources multiarray_src = [ join('src', 'multiarray', 'alloc.c'), join('src', 'multiarray', 'arrayobject.c'), join('src', 'multiarray', 'arraytypes.c.src'), join('src', 'multiarray', 'array_assign.c'), join('src', 'multiarray', 'array_assign_scalar.c'), join('src', 'multiarray', 'array_assign_array.c'), join('src', 'multiarray', 'buffer.c'), join('src', 'multiarray', 'calculation.c'), join('src', 'multiarray', 'compiled_base.c'), join('src', 'multiarray', 'common.c'), join('src', 'multiarray', 'convert.c'), join('src', 'multiarray', 'convert_datatype.c'), join('src', 'multiarray', 'conversion_utils.c'), join('src', 'multiarray', 'ctors.c'), join('src', 'multiarray', 'datetime.c'), join('src', 'multiarray', 'datetime_strings.c'), join('src', 'multiarray', 'datetime_busday.c'), join('src', 'multiarray', 'datetime_busdaycal.c'), join('src', 'multiarray', 'descriptor.c'), join('src', 'multiarray', 'dtype_transfer.c'), join('src', 'multiarray', 'einsum.c.src'), join('src', 'multiarray', 'flagsobject.c'), join('src', 'multiarray', 'getset.c'), join('src', 'multiarray', 'hashdescr.c'), join('src', 'multiarray', 'item_selection.c'), join('src', 'multiarray', 'iterators.c'), join('src', 'multiarray', 'lowlevel_strided_loops.c.src'), join('src', 'multiarray', 'mapping.c'), join('src', 'multiarray', 'methods.c'), join('src', 'multiarray', 'multiarraymodule.c'), join('src', 'multiarray', 'nditer_templ.c.src'), join('src', 'multiarray', 'nditer_api.c'), join('src', 'multiarray', 'nditer_constr.c'), join('src', 'multiarray', 'nditer_pywrap.c'), join('src', 'multiarray', 'number.c'), join('src', 'multiarray', 'numpymemoryview.c'), join('src', 'multiarray', 'numpyos.c'), join('src', 'multiarray', 'refcount.c'), join('src', 'multiarray', 'sequence.c'), join('src', 'multiarray', 'shape.c'), join('src', 'multiarray', 'scalarapi.c'), join('src', 'multiarray', 'scalartypes.c.src'), join('src', 'multiarray', 'usertypes.c'), join('src', 'multiarray', 'ucsnarrow.c'), join('src', 'multiarray', 'vdot.c'), join('src', 'private', 'templ_common.h.src'), join('src', 'private', 'mem_overlap.c'), ] blas_info = get_info('blas_opt', 0) if blas_info and ('HAVE_CBLAS', None) in blas_info.get( 'define_macros', []): extra_info = blas_info # These files are also in MANIFEST.in so that they are always in # the source distribution independently of HAVE_CBLAS. multiarray_src.extend([ join('src', 'multiarray', 'cblasfuncs.c'), join('src', 'multiarray', 'python_xerbla.c'), ]) if uses_accelerate_framework(blas_info): multiarray_src.extend(get_sgemv_fix()) else: extra_info = {} config.add_extension( 'multiarray', sources=multiarray_src + [ generate_config_h, generate_numpyconfig_h, generate_numpy_api, join(codegen_dir, 'generate_numpy_api.py'), join('*.py') ], depends=deps + multiarray_deps, libraries=['npymath', 'npysort'], extra_info=extra_info) ####################################################################### # umath module # ####################################################################### # umath version: this function is needed to build foo.c from foo.c.src # when foo.c is included in another file and as such not in the src # argument of build_ext command def generate_umath_templated_sources(ext, build_dir): from numpy.distutils.misc_util import get_cmd subpath = join('src', 'umath') sources = [ join(local_dir, subpath, 'loops.h.src'), join(local_dir, subpath, 'loops.c.src'), join(local_dir, subpath, 'scalarmath.c.src'), join(local_dir, subpath, 'simd.inc.src') ] # numpy.distutils generate .c from .c.src in weird directories, we have # to add them there as they depend on the build_dir config.add_include_dirs(join(build_dir, subpath)) cmd = get_cmd('build_src') cmd.ensure_finalized() cmd.template_sources(sources, ext) def generate_umath_c(ext, build_dir): target = join(build_dir, header_dir, '__umath_generated.c') dir = os.path.dirname(target) if not os.path.exists(dir): os.makedirs(dir) script = generate_umath_py if newer(script, target): f = open(target, 'w') f.write( generate_umath.make_code(generate_umath.defdict, generate_umath.__file__)) f.close() return [] umath_src = [ join('src', 'umath', 'umathmodule.c'), join('src', 'umath', 'reduction.c'), join('src', 'umath', 'funcs.inc.src'), join('src', 'umath', 'simd.inc.src'), join('src', 'umath', 'loops.h.src'), join('src', 'umath', 'loops.c.src'), join('src', 'umath', 'ufunc_object.c'), join('src', 'umath', 'scalarmath.c.src'), join('src', 'umath', 'ufunc_type_resolution.c') ] umath_deps = [ generate_umath_py, join('src', 'multiarray', 'common.h'), join('src', 'private', 'templ_common.h.src'), join('src', 'umath', 'simd.inc.src'), join(codegen_dir, 'generate_ufunc_api.py'), join('src', 'private', 'ufunc_override.h') ] + npymath_sources os.environ['MULTIARRAY_LIB_PATH'] = os.path.join([ x for x in sys.path if x.endswith('site-packages') ][0], 'numpy-' + config.version + '-py2.7-linux-x86_64.egg', 'numpy', 'core', 'multiarray.so') config.add_extension('umath', sources=umath_src + [ generate_config_h, generate_numpyconfig_h, generate_umath_c, generate_ufunc_api ], depends=deps + umath_deps, libraries=['npymath'], extra_objects=[os.environ['MULTIARRAY_LIB_PATH']]) ####################################################################### # umath_tests module # ####################################################################### config.add_extension('umath_tests', sources=[join('src', 'umath', 'umath_tests.c.src')]) ####################################################################### # custom rational dtype module # ####################################################################### config.add_extension('test_rational', sources=[join('src', 'umath', 'test_rational.c.src')]) ####################################################################### # struct_ufunc_test module # ####################################################################### config.add_extension( 'struct_ufunc_test', sources=[join('src', 'umath', 'struct_ufunc_test.c.src')]) ####################################################################### # multiarray_tests module # ####################################################################### config.add_extension('multiarray_tests', sources=[ join('src', 'multiarray', 'multiarray_tests.c.src'), join('src', 'private', 'mem_overlap.c') ], depends=[ join('src', 'private', 'mem_overlap.h'), join('src', 'private', 'npy_extint128.h') ]) ####################################################################### # operand_flag_tests module # ####################################################################### config.add_extension( 'operand_flag_tests', sources=[join('src', 'umath', 'operand_flag_tests.c.src')]) config.add_data_dir('tests') config.add_data_dir('tests/data') config.make_svn_version_py() return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration,dot_join from numpy.distutils.system_info import get_info, default_lib_dirs config = Configuration('core',parent_package,top_path) local_dir = config.local_path codegen_dir = join(local_dir,'code_generators') if is_released(config): warnings.simplefilter('error', MismatchCAPIWarning) # Check whether we have a mismatch between the set C API VERSION and the # actual C API VERSION check_api_version(C_API_VERSION, codegen_dir) generate_umath_py = join(codegen_dir,'generate_umath.py') n = dot_join(config.name,'generate_umath') generate_umath = imp.load_module('_'.join(n.split('.')), open(generate_umath_py,'U'),generate_umath_py, ('.py','U',1)) header_dir = 'include/numpy' # this is relative to config.path_in_package cocache = CallOnceOnly() def generate_config_h(ext, build_dir): target = join(build_dir,header_dir,'config.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__,target): config_cmd = config.get_config_cmd() log.info('Generating %s',target) # Check sizeof moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir) # Check math library and C99 math funcs availability mathlibs = check_mathlib(config_cmd) moredefs.append(('MATHLIB',','.join(mathlibs))) check_math_capabilities(config_cmd, moredefs, mathlibs) moredefs.extend(cocache.check_ieee_macros(config_cmd)[0]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0]) # Signal check if is_npy_no_signal(): moredefs.append('__NPY_PRIVATE_NO_SIGNAL') # Windows checks if sys.platform=='win32' or os.name=='nt': win32_checks(moredefs) # Inline check inline = config_cmd.check_inline() # Check whether we need our own wide character support if not config_cmd.check_decl('Py_UNICODE_WIDE', headers=['Python.h']): PYTHON_HAS_UNICODE_WIDE = True else: PYTHON_HAS_UNICODE_WIDE = False if ENABLE_SEPARATE_COMPILATION: moredefs.append(('ENABLE_SEPARATE_COMPILATION', 1)) # Get long double representation if sys.platform != 'darwin': rep = check_long_double_representation(config_cmd) if rep in ['INTEL_EXTENDED_12_BYTES_LE', 'INTEL_EXTENDED_16_BYTES_LE', 'MOTOROLA_EXTENDED_12_BYTES_BE', 'IEEE_QUAD_LE', 'IEEE_QUAD_BE', 'IEEE_DOUBLE_LE', 'IEEE_DOUBLE_BE', 'DOUBLE_DOUBLE_BE']: moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1)) else: raise ValueError("Unrecognized long double format: %s" % rep) # Py3K check if sys.version_info[0] == 3: moredefs.append(('NPY_PY3K', 1)) # Generate the config.h file from moredefs target_f = open(target, 'w') for d in moredefs: if isinstance(d,str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0],d[1])) # define inline to our keyword, or nothing target_f.write('#ifndef __cplusplus\n') if inline == 'inline': target_f.write('/* #undef inline */\n') else: target_f.write('#define inline %s\n' % inline) target_f.write('#endif\n') # add the guard to make sure config.h is never included directly, # but always through npy_config.h target_f.write(""" #ifndef _NPY_NPY_CONFIG_H_ #error config.h should never be included directly, include npy_config.h instead #endif """) target_f.close() print('File:',target) target_f = open(target) print(target_f.read()) target_f.close() print('EOF') else: mathlibs = [] target_f = open(target) for line in target_f.readlines(): s = '#define MATHLIB' if line.startswith(s): value = line[len(s):].strip() if value: mathlibs.extend(value.split(',')) target_f.close() # Ugly: this can be called within a library and not an extension, # in which case there is no libraries attributes (and none is # needed). if hasattr(ext, 'libraries'): ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) return target def generate_numpyconfig_h(ext, build_dir): """Depends on config.h: generate_config_h has to be called before !""" target = join(build_dir,header_dir,'_numpyconfig.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__,target): config_cmd = config.get_config_cmd() log.info('Generating %s',target) # Check sizeof ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir) if is_npy_no_signal(): moredefs.append(('NPY_NO_SIGNAL', 1)) if is_npy_no_smp(): moredefs.append(('NPY_NO_SMP', 1)) else: moredefs.append(('NPY_NO_SMP', 0)) mathlibs = check_mathlib(config_cmd) moredefs.extend(cocache.check_ieee_macros(config_cmd)[1]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1]) if ENABLE_SEPARATE_COMPILATION: moredefs.append(('NPY_ENABLE_SEPARATE_COMPILATION', 1)) # Check wether we can use inttypes (C99) formats if config_cmd.check_decl('PRIdPTR', headers = ['inttypes.h']): moredefs.append(('NPY_USE_C99_FORMATS', 1)) # visibility check hidden_visibility = visibility_define(config_cmd) moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility)) # Add the C API/ABI versions moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION)) moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION)) # Add moredefs to header target_f = open(target, 'w') for d in moredefs: if isinstance(d,str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0],d[1])) # Define __STDC_FORMAT_MACROS target_f.write(""" #ifndef __STDC_FORMAT_MACROS #define __STDC_FORMAT_MACROS 1 #endif """) target_f.close() # Dump the numpyconfig.h header to stdout print('File: %s' % target) target_f = open(target) print(target_f.read()) target_f.close() print('EOF') config.add_data_files((header_dir, target)) return target def generate_api_func(module_name): def generate_api(ext, build_dir): script = join(codegen_dir, module_name + '.py') sys.path.insert(0, codegen_dir) try: m = __import__(module_name) log.info('executing %s', script) h_file, c_file, doc_file = m.generate_api(os.path.join(build_dir, header_dir)) finally: del sys.path[0] config.add_data_files((header_dir, h_file), (header_dir, doc_file)) return (h_file,) return generate_api generate_numpy_api = generate_api_func('generate_numpy_api') generate_ufunc_api = generate_api_func('generate_ufunc_api') config.add_include_dirs(join(local_dir, "src", "private")) config.add_include_dirs(join(local_dir, "src")) config.add_include_dirs(join(local_dir)) # Multiarray version: this function is needed to build foo.c from foo.c.src # when foo.c is included in another file and as such not in the src # argument of build_ext command def generate_multiarray_templated_sources(ext, build_dir): from numpy.distutils.misc_util import get_cmd subpath = join('src', 'multiarray') sources = [join(local_dir, subpath, 'scalartypes.c.src'), join(local_dir, subpath, 'arraytypes.c.src'), join(local_dir, subpath, 'nditer.c.src'), join(local_dir, subpath, 'lowlevel_strided_loops.c.src'), join(local_dir, subpath, 'einsum.c.src')] # numpy.distutils generate .c from .c.src in weird directories, we have # to add them there as they depend on the build_dir config.add_include_dirs(join(build_dir, subpath)) cmd = get_cmd('build_src') cmd.ensure_finalized() cmd.template_sources(sources, ext) # umath version: this function is needed to build foo.c from foo.c.src # when foo.c is included in another file and as such not in the src # argument of build_ext command def generate_umath_templated_sources(ext, build_dir): from numpy.distutils.misc_util import get_cmd subpath = join('src', 'umath') # NOTE: For manual template conversion of loops.h.src, read the note # in that file. sources = [join(local_dir, subpath, 'loops.c.src'), join(local_dir, subpath, 'umathmodule.c.src')] # numpy.distutils generate .c from .c.src in weird directories, we have # to add them there as they depend on the build_dir config.add_include_dirs(join(build_dir, subpath)) cmd = get_cmd('build_src') cmd.ensure_finalized() cmd.template_sources(sources, ext) def generate_umath_c(ext,build_dir): target = join(build_dir,header_dir,'__umath_generated.c') dir = os.path.dirname(target) if not os.path.exists(dir): os.makedirs(dir) script = generate_umath_py if newer(script,target): f = open(target,'w') f.write(generate_umath.make_code(generate_umath.defdict, generate_umath.__file__)) f.close() return [] config.add_data_files('include/numpy/*.h') config.add_include_dirs(join('src', 'npymath')) config.add_include_dirs(join('src', 'multiarray')) config.add_include_dirs(join('src', 'umath')) config.numpy_include_dirs.extend(config.paths('include')) deps = [join('src','npymath','_signbit.c'), join('include','numpy','*object.h'), 'include/numpy/fenv/fenv.c', 'include/numpy/fenv/fenv.h', join(codegen_dir,'genapi.py'), ] # Don't install fenv unless we need them. if sys.platform == 'cygwin': config.add_data_dir('include/numpy/fenv') config.add_extension('_sort', sources=[join('src','_sortmodule.c.src'), generate_config_h, generate_numpyconfig_h, generate_numpy_api, ], libraries=['npymath']) # npymath needs the config.h and numpyconfig.h files to be generated, but # build_clib cannot handle generate_config_h and generate_numpyconfig_h # (don't ask). Because clib are generated before extensions, we have to # explicitly add an extension which has generate_config_h and # generate_numpyconfig_h as sources *before* adding npymath. subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")]) def get_mathlib_info(*args): # Another ugly hack: the mathlib info is known once build_src is run, # but we cannot use add_installed_pkg_config here either, so we only # updated the substition dictionary during npymath build config_cmd = config.get_config_cmd() # Check that the toolchain works, to fail early if it doesn't # (avoid late errors with MATHLIB which are confusing if the # compiler does not work). st = config_cmd.try_link('int main(void) { return 0;}') if not st: raise RuntimeError("Broken toolchain: cannot link a simple C program") mlibs = check_mathlib(config_cmd) posix_mlib = ' '.join(['-l%s' % l for l in mlibs]) msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs]) subst_dict["posix_mathlib"] = posix_mlib subst_dict["msvc_mathlib"] = msvc_mlib config.add_installed_library('npymath', sources=[join('src', 'npymath', 'npy_math.c.src'), join('src', 'npymath', 'ieee754.c.src'), join('src', 'npymath', 'npy_math_complex.c.src'), join('src', 'npymath', 'halffloat.c'), get_mathlib_info], install_dir='lib') config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config", subst_dict) config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config", subst_dict) multiarray_deps = [ join('src', 'multiarray', 'arrayobject.h'), join('src', 'multiarray', 'arraytypes.h'), join('src', 'multiarray', 'buffer.h'), join('src', 'multiarray', 'calculation.h'), join('src', 'multiarray', 'common.h'), join('src', 'multiarray', 'convert_datatype.h'), join('src', 'multiarray', 'convert.h'), join('src', 'multiarray', 'conversion_utils.h'), join('src', 'multiarray', 'ctors.h'), join('src', 'multiarray', 'descriptor.h'), join('src', 'multiarray', 'getset.h'), join('src', 'multiarray', 'hashdescr.h'), join('src', 'multiarray', 'iterators.h'), join('src', 'multiarray', 'mapping.h'), join('src', 'multiarray', 'methods.h'), join('src', 'multiarray', 'multiarraymodule.h'), join('src', 'multiarray', 'numpymemoryview.h'), join('src', 'multiarray', 'number.h'), join('src', 'multiarray', 'numpyos.h'), join('src', 'multiarray', 'refcount.h'), join('src', 'multiarray', 'scalartypes.h'), join('src', 'multiarray', 'sequence.h'), join('src', 'multiarray', 'shape.h'), join('src', 'multiarray', 'ucsnarrow.h'), join('src', 'multiarray', 'usertypes.h'), join('src', 'private', 'lowlevel_strided_loops.h')] multiarray_src = [join('src', 'multiarray', 'multiarraymodule.c'), join('src', 'multiarray', 'hashdescr.c'), join('src', 'multiarray', 'arrayobject.c'), join('src', 'multiarray', 'numpymemoryview.c'), join('src', 'multiarray', 'buffer.c'), join('src', 'multiarray', 'datetime.c'), join('src', 'multiarray', 'numpyos.c'), join('src', 'multiarray', 'conversion_utils.c'), join('src', 'multiarray', 'flagsobject.c'), join('src', 'multiarray', 'descriptor.c'), join('src', 'multiarray', 'iterators.c'), join('src', 'multiarray', 'mapping.c'), join('src', 'multiarray', 'number.c'), join('src', 'multiarray', 'getset.c'), join('src', 'multiarray', 'sequence.c'), join('src', 'multiarray', 'methods.c'), join('src', 'multiarray', 'ctors.c'), join('src', 'multiarray', 'convert_datatype.c'), join('src', 'multiarray', 'convert.c'), join('src', 'multiarray', 'shape.c'), join('src', 'multiarray', 'item_selection.c'), join('src', 'multiarray', 'calculation.c'), join('src', 'multiarray', 'common.c'), join('src', 'multiarray', 'usertypes.c'), join('src', 'multiarray', 'scalarapi.c'), join('src', 'multiarray', 'refcount.c'), join('src', 'multiarray', 'arraytypes.c.src'), join('src', 'multiarray', 'scalartypes.c.src'), join('src', 'multiarray', 'nditer.c.src'), join('src', 'multiarray', 'lowlevel_strided_loops.c.src'), join('src', 'multiarray', 'dtype_transfer.c'), join('src', 'multiarray', 'nditer_pywrap.c'), join('src', 'multiarray', 'einsum.c.src'), join('src', 'multiarray', 'ucsnarrow.c')] umath_src = [join('src', 'umath', 'umathmodule.c.src'), join('src', 'umath', 'funcs.inc.src'), join('src', 'umath', 'loops.c.src'), join('src', 'umath', 'ufunc_object.c')] umath_deps = [generate_umath_py, join(codegen_dir,'generate_ufunc_api.py')] if not ENABLE_SEPARATE_COMPILATION: multiarray_deps.extend(multiarray_src) multiarray_src = [join('src', 'multiarray', 'multiarraymodule_onefile.c')] multiarray_src.append(generate_multiarray_templated_sources) umath_deps.extend(umath_src) umath_src = [join('src', 'umath', 'umathmodule_onefile.c')] umath_src.append(generate_umath_templated_sources) umath_src.append(join('src', 'umath', 'funcs.inc.src')) config.add_extension('multiarray', sources = multiarray_src + [generate_config_h, generate_numpyconfig_h, generate_numpy_api, join(codegen_dir,'generate_numpy_api.py'), join('*.py')], depends = deps + multiarray_deps, libraries=['npymath']) config.add_extension('umath', sources = [generate_config_h, generate_numpyconfig_h, generate_umath_c, generate_ufunc_api, ] + umath_src, depends = deps + umath_deps, libraries=['npymath'], ) config.add_extension('scalarmath', sources=[join('src','scalarmathmodule.c.src'), generate_config_h, generate_numpyconfig_h, generate_numpy_api, generate_ufunc_api], libraries=['npymath'], ) # Configure blasdot blas_info = get_info('blas_opt',0) #blas_info = {} def get_dotblas_sources(ext, build_dir): if blas_info: #if ('NO_ATLAS_INFO',1) in blas_info.get('define_macros',[]): # return None # dotblas needs ATLAS, Fortran compiled blas will not be sufficient. return ext.depends[:1] return None # no extension module will be built config.add_extension('_dotblas', sources = [get_dotblas_sources], depends=[join('blasdot','_dotblas.c'), join('blasdot','cblas.h'), ], include_dirs = ['blasdot'], extra_info = blas_info ) config.add_extension('umath_tests', sources = [join('src','umath', 'umath_tests.c.src')]) config.add_extension('multiarray_tests', sources = [join('src', 'multiarray', 'multiarray_tests.c.src')]) config.add_data_dir('tests') config.add_data_dir('tests/data') config.make_svn_version_py() return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, dot_join from numpy.distutils.system_info import get_info, default_lib_dirs config = Configuration('core', parent_package, top_path) local_dir = config.local_path codegen_dir = join(local_dir, 'code_generators') if is_released(config): warnings.simplefilter('error', MismatchCAPIWarning) # Check whether we have a mismatch between the set C API VERSION and the # actual C API VERSION check_api_version(C_API_VERSION, codegen_dir) generate_umath_py = join(codegen_dir, 'generate_umath.py') n = dot_join(config.name, 'generate_umath') generate_umath = imp.load_module('_'.join(n.split('.')), open(generate_umath_py, 'U'), generate_umath_py, ('.py', 'U', 1)) header_dir = 'include/numpy' # this is relative to config.path_in_package cocache = CallOnceOnly() def generate_config_h(ext, build_dir): target = join(build_dir, header_dir, 'config.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir) # Check math library and C99 math funcs availability mathlibs = check_mathlib(config_cmd) moredefs.append(('MATHLIB', ','.join(mathlibs))) check_math_capabilities(config_cmd, moredefs, mathlibs) moredefs.extend(cocache.check_ieee_macros(config_cmd)[0]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0]) # Signal check if is_npy_no_signal(): moredefs.append('__NPY_PRIVATE_NO_SIGNAL') # Windows checks if sys.platform == 'win32' or os.name == 'nt': win32_checks(moredefs) # Inline check inline = config_cmd.check_inline() # Check whether we need our own wide character support if not config_cmd.check_decl('Py_UNICODE_WIDE', headers=['Python.h']): PYTHON_HAS_UNICODE_WIDE = True else: PYTHON_HAS_UNICODE_WIDE = False if ENABLE_SEPARATE_COMPILATION: moredefs.append(('ENABLE_SEPARATE_COMPILATION', 1)) # Get long double representation if sys.platform != 'darwin': rep = check_long_double_representation(config_cmd) if rep in [ 'INTEL_EXTENDED_12_BYTES_LE', 'INTEL_EXTENDED_16_BYTES_LE', 'IEEE_QUAD_LE', 'IEEE_QUAD_BE', 'IEEE_DOUBLE_LE', 'IEEE_DOUBLE_BE', 'DOUBLE_DOUBLE_BE' ]: moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1)) else: raise ValueError("Unrecognized long double format: %s" % rep) # Py3K check if sys.version_info[0] == 3: moredefs.append(('NPY_PY3K', 1)) # Generate the config.h file from moredefs target_f = open(target, 'w') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # define inline to our keyword, or nothing target_f.write('#ifndef __cplusplus\n') if inline == 'inline': target_f.write('/* #undef inline */\n') else: target_f.write('#define inline %s\n' % inline) target_f.write('#endif\n') # add the guard to make sure config.h is never included directly, # but always through npy_config.h target_f.write(""" #ifndef _NPY_NPY_CONFIG_H_ #error config.h should never be included directly, include npy_config.h instead #endif """) target_f.close() print('File:', target) target_f = open(target) print(target_f.read()) target_f.close() print('EOF') else: mathlibs = [] target_f = open(target) for line in target_f.readlines(): s = '#define MATHLIB' if line.startswith(s): value = line[len(s):].strip() if value: mathlibs.extend(value.split(',')) target_f.close() # Ugly: this can be called within a library and not an extension, # in which case there is no libraries attributes (and none is # needed). if hasattr(ext, 'libraries'): ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) return target def generate_numpyconfig_h(ext, build_dir): """Depends on config.h: generate_config_h has to be called before !""" target = join(build_dir, header_dir, '_numpyconfig.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir) if is_npy_no_signal(): moredefs.append(('NPY_NO_SIGNAL', 1)) if is_npy_no_smp(): moredefs.append(('NPY_NO_SMP', 1)) else: moredefs.append(('NPY_NO_SMP', 0)) mathlibs = check_mathlib(config_cmd) moredefs.extend(cocache.check_ieee_macros(config_cmd)[1]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1]) if ENABLE_SEPARATE_COMPILATION: moredefs.append(('NPY_ENABLE_SEPARATE_COMPILATION', 1)) # Check wether we can use inttypes (C99) formats if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']): moredefs.append(('NPY_USE_C99_FORMATS', 1)) # visibility check hidden_visibility = visibility_define(config_cmd) moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility)) # Add the C API/ABI versions moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION)) moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION)) # Add moredefs to header target_f = open(target, 'w') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # Define __STDC_FORMAT_MACROS target_f.write(""" #ifndef __STDC_FORMAT_MACROS #define __STDC_FORMAT_MACROS 1 #endif """) target_f.close() # Dump the numpyconfig.h header to stdout print('File: %s' % target) target_f = open(target) print(target_f.read()) target_f.close() print('EOF') config.add_data_files((header_dir, target)) return target def generate_api_func(module_name): def generate_api(ext, build_dir): script = join(codegen_dir, module_name + '.py') sys.path.insert(0, codegen_dir) try: m = __import__(module_name) log.info('executing %s', script) h_file, c_file, doc_file = m.generate_api( os.path.join(build_dir, header_dir)) finally: del sys.path[0] config.add_data_files((header_dir, h_file), (header_dir, doc_file)) return (h_file, ) return generate_api generate_numpy_api = generate_api_func('generate_numpy_api') generate_ufunc_api = generate_api_func('generate_ufunc_api') config.add_include_dirs(join(local_dir, "src", "private")) config.add_include_dirs(join(local_dir, "src")) config.add_include_dirs(join(local_dir)) # Multiarray version: this function is needed to build foo.c from foo.c.src # when foo.c is included in another file and as such not in the src # argument of build_ext command def generate_multiarray_templated_sources(ext, build_dir): from numpy.distutils.misc_util import get_cmd subpath = join('src', 'multiarray') sources = [ join(local_dir, subpath, 'scalartypes.c.src'), join(local_dir, subpath, 'arraytypes.c.src'), join(local_dir, subpath, 'nditer.c.src'), join(local_dir, subpath, 'lowlevel_strided_loops.c.src'), join(local_dir, subpath, 'einsum.c.src') ] # numpy.distutils generate .c from .c.src in weird directories, we have # to add them there as they depend on the build_dir config.add_include_dirs(join(build_dir, subpath)) cmd = get_cmd('build_src') cmd.ensure_finalized() cmd.template_sources(sources, ext) # umath version: this function is needed to build foo.c from foo.c.src # when foo.c is included in another file and as such not in the src # argument of build_ext command def generate_umath_templated_sources(ext, build_dir): from numpy.distutils.misc_util import get_cmd subpath = join('src', 'umath') # NOTE: For manual template conversion of loops.h.src, read the note # in that file. sources = [ join(local_dir, subpath, 'loops.c.src'), join(local_dir, subpath, 'umathmodule.c.src') ] # numpy.distutils generate .c from .c.src in weird directories, we have # to add them there as they depend on the build_dir config.add_include_dirs(join(build_dir, subpath)) cmd = get_cmd('build_src') cmd.ensure_finalized() cmd.template_sources(sources, ext) def generate_umath_c(ext, build_dir): target = join(build_dir, header_dir, '__umath_generated.c') dir = os.path.dirname(target) if not os.path.exists(dir): os.makedirs(dir) script = generate_umath_py if newer(script, target): f = open(target, 'w') f.write( generate_umath.make_code(generate_umath.defdict, generate_umath.__file__)) f.close() return [] config.add_data_files('include/numpy/*.h') config.add_include_dirs(join('src', 'npymath')) config.add_include_dirs(join('src', 'multiarray')) config.add_include_dirs(join('src', 'umath')) config.numpy_include_dirs.extend(config.paths('include')) deps = [ join('src', 'npymath', '_signbit.c'), join('include', 'numpy', '*object.h'), 'include/numpy/fenv/fenv.c', 'include/numpy/fenv/fenv.h', join(codegen_dir, 'genapi.py'), ] # Don't install fenv unless we need them. if sys.platform == 'cygwin': config.add_data_dir('include/numpy/fenv') config.add_extension('_sort', sources=[ join('src', '_sortmodule.c.src'), generate_config_h, generate_numpyconfig_h, generate_numpy_api, ], libraries=['npymath']) # npymath needs the config.h and numpyconfig.h files to be generated, but # build_clib cannot handle generate_config_h and generate_numpyconfig_h # (don't ask). Because clib are generated before extensions, we have to # explicitly add an extension which has generate_config_h and # generate_numpyconfig_h as sources *before* adding npymath. subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")]) def get_mathlib_info(*args): # Another ugly hack: the mathlib info is known once build_src is run, # but we cannot use add_installed_pkg_config here either, so we only # updated the substition dictionary during npymath build config_cmd = config.get_config_cmd() # Check that the toolchain works, to fail early if it doesn't # (avoid late errors with MATHLIB which are confusing if the # compiler does not work). st = config_cmd.try_link('int main(void) { return 0;}') if not st: raise RuntimeError( "Broken toolchain: cannot link a simple C program") mlibs = check_mathlib(config_cmd) posix_mlib = ' '.join(['-l%s' % l for l in mlibs]) msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs]) subst_dict["posix_mathlib"] = posix_mlib subst_dict["msvc_mathlib"] = msvc_mlib config.add_installed_library('npymath', sources=[ join('src', 'npymath', 'npy_math.c.src'), join('src', 'npymath', 'ieee754.c.src'), join('src', 'npymath', 'npy_math_complex.c.src'), join('src', 'npymath', 'halffloat.c'), get_mathlib_info ], install_dir='lib') config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config", subst_dict) config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config", subst_dict) multiarray_deps = [ join('src', 'multiarray', 'arrayobject.h'), join('src', 'multiarray', 'arraytypes.h'), join('src', 'multiarray', 'buffer.h'), join('src', 'multiarray', 'calculation.h'), join('src', 'multiarray', 'common.h'), join('src', 'multiarray', 'convert_datatype.h'), join('src', 'multiarray', 'convert.h'), join('src', 'multiarray', 'conversion_utils.h'), join('src', 'multiarray', 'ctors.h'), join('src', 'multiarray', 'descriptor.h'), join('src', 'multiarray', 'getset.h'), join('src', 'multiarray', 'hashdescr.h'), join('src', 'multiarray', 'iterators.h'), join('src', 'multiarray', 'mapping.h'), join('src', 'multiarray', 'methods.h'), join('src', 'multiarray', 'multiarraymodule.h'), join('src', 'multiarray', 'numpymemoryview.h'), join('src', 'multiarray', 'number.h'), join('src', 'multiarray', 'numpyos.h'), join('src', 'multiarray', 'refcount.h'), join('src', 'multiarray', 'scalartypes.h'), join('src', 'multiarray', 'sequence.h'), join('src', 'multiarray', 'shape.h'), join('src', 'multiarray', 'ucsnarrow.h'), join('src', 'multiarray', 'usertypes.h'), join('src', 'private', 'lowlevel_strided_loops.h') ] multiarray_src = [ join('src', 'multiarray', 'multiarraymodule.c'), join('src', 'multiarray', 'hashdescr.c'), join('src', 'multiarray', 'arrayobject.c'), join('src', 'multiarray', 'numpymemoryview.c'), join('src', 'multiarray', 'buffer.c'), join('src', 'multiarray', 'datetime.c'), join('src', 'multiarray', 'numpyos.c'), join('src', 'multiarray', 'conversion_utils.c'), join('src', 'multiarray', 'flagsobject.c'), join('src', 'multiarray', 'descriptor.c'), join('src', 'multiarray', 'iterators.c'), join('src', 'multiarray', 'mapping.c'), join('src', 'multiarray', 'number.c'), join('src', 'multiarray', 'getset.c'), join('src', 'multiarray', 'sequence.c'), join('src', 'multiarray', 'methods.c'), join('src', 'multiarray', 'ctors.c'), join('src', 'multiarray', 'convert_datatype.c'), join('src', 'multiarray', 'convert.c'), join('src', 'multiarray', 'shape.c'), join('src', 'multiarray', 'item_selection.c'), join('src', 'multiarray', 'calculation.c'), join('src', 'multiarray', 'common.c'), join('src', 'multiarray', 'usertypes.c'), join('src', 'multiarray', 'scalarapi.c'), join('src', 'multiarray', 'refcount.c'), join('src', 'multiarray', 'arraytypes.c.src'), join('src', 'multiarray', 'scalartypes.c.src'), join('src', 'multiarray', 'nditer.c.src'), join('src', 'multiarray', 'lowlevel_strided_loops.c.src'), join('src', 'multiarray', 'dtype_transfer.c'), join('src', 'multiarray', 'nditer_pywrap.c'), join('src', 'multiarray', 'einsum.c.src') ] if PYTHON_HAS_UNICODE_WIDE: multiarray_src.append(join('src', 'multiarray', 'ucsnarrow.c')) umath_src = [ join('src', 'umath', 'umathmodule.c.src'), join('src', 'umath', 'funcs.inc.src'), join('src', 'umath', 'loops.c.src'), join('src', 'umath', 'ufunc_object.c') ] umath_deps = [ generate_umath_py, join(codegen_dir, 'generate_ufunc_api.py') ] if not ENABLE_SEPARATE_COMPILATION: multiarray_deps.extend(multiarray_src) multiarray_src = [ join('src', 'multiarray', 'multiarraymodule_onefile.c') ] multiarray_src.append(generate_multiarray_templated_sources) umath_deps.extend(umath_src) umath_src = [join('src', 'umath', 'umathmodule_onefile.c')] umath_src.append(generate_umath_templated_sources) umath_src.append(join('src', 'umath', 'funcs.inc.src')) config.add_extension( 'multiarray', sources=multiarray_src + [ generate_config_h, generate_numpyconfig_h, generate_numpy_api, join(codegen_dir, 'generate_numpy_api.py'), join('*.py') ], depends=deps + multiarray_deps, libraries=['npymath']) config.add_extension( 'umath', sources=[ generate_config_h, generate_numpyconfig_h, generate_umath_c, generate_ufunc_api, ] + umath_src, depends=deps + umath_deps, libraries=['npymath'], ) config.add_extension( 'scalarmath', sources=[ join('src', 'scalarmathmodule.c.src'), generate_config_h, generate_numpyconfig_h, generate_numpy_api, generate_ufunc_api ], libraries=['npymath'], ) # Configure blasdot blas_info = get_info('blas_opt', 0) #blas_info = {} def get_dotblas_sources(ext, build_dir): if blas_info: if ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', []): return None # dotblas needs ATLAS, Fortran compiled blas will not be sufficient. return ext.depends[:1] return None # no extension module will be built config.add_extension('_dotblas', sources=[get_dotblas_sources], depends=[ join('blasdot', '_dotblas.c'), join('blasdot', 'cblas.h'), ], include_dirs=['blasdot'], extra_info=blas_info) config.add_extension('umath_tests', sources=[join('src', 'umath', 'umath_tests.c.src')]) config.add_extension( 'multiarray_tests', sources=[join('src', 'multiarray', 'multiarray_tests.c.src')]) config.add_data_dir('tests') config.add_data_dir('tests/data') config.make_svn_version_py() return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, dot_join from numpy.distutils.system_info import get_info, default_lib_dirs config = Configuration('core', parent_package, top_path) local_dir = config.local_path codegen_dir = join(local_dir, 'code_generators') generate_umath_py = join(codegen_dir, 'generate_umath.py') n = dot_join(config.name, 'generate_umath') generate_umath = imp.load_module('_'.join(n.split('.')), open(generate_umath_py, 'U'), generate_umath_py, ('.py', 'U', 1)) header_dir = 'include/numpy' # this is relative to config.path_in_package def generate_config_h(ext, build_dir): target = join(build_dir, 'config.h') if newer(__file__, target): config_cmd = config.get_config_cmd() print 'Generating', target # tc = generate_testcode(target) from distutils import sysconfig python_include = sysconfig.get_python_inc() python_h = join(python_include, 'Python.h') if not os.path.isfile(python_h): raise SystemError,\ "Non-existing %s. Perhaps you need to install"\ " python-dev|python-devel." % (python_h) result = config_cmd.try_run(tc, include_dirs=[python_include], library_dirs=default_lib_dirs) if not result: raise SystemError,"Failed to test configuration. "\ "See previous error messages for more information." # Python 2.3 causes a segfault when # trying to re-acquire the thread-state # which is done in error-handling # ufunc code. NPY_ALLOW_C_API and friends # cause the segfault. So, we disable threading # for now. if sys.version[:5] < '2.4.2': nosmp = 1 else: # Perhaps a fancier check is in order here. # so that threads are only enabled if there # are actually multiple CPUS? -- but # threaded code can be nice even on a single # CPU so that long-calculating code doesn't # block. try: nosmp = os.environ['NPY_NOSMP'] nosmp = 1 except KeyError: nosmp = 0 if nosmp: moredefs = [('NPY_ALLOW_THREADS', '0')] else: moredefs = [] # mathlibs = [] tc = testcode_mathlib() mathlibs_choices = [[], ['m'], ['cpml']] mathlib = os.environ.get('MATHLIB') if mathlib: mathlibs_choices.insert(0, mathlib.split(',')) for libs in mathlibs_choices: if config_cmd.try_run(tc, libraries=libs): mathlibs = libs break else: raise EnvironmentError("math library missing; rerun " "setup.py after setting the " "MATHLIB env variable") ext.libraries.extend(mathlibs) moredefs.append(('MATHLIB', ','.join(mathlibs))) def check_func(func_name): return config_cmd.check_func(func_name, libraries=mathlibs, decl=False, headers=['math.h']) for func_name, defsymbol in FUNCTIONS_TO_CHECK: if check_func(func_name): moredefs.append(defsymbol) if sys.platform == 'win32': moredefs.append('NPY_NO_SIGNAL') if sys.version[:3] < '2.4': if config_cmd.check_func('strtod', decl=False, headers=['stdlib.h']): moredefs.append(('PyOS_ascii_strtod', 'strtod')) target_f = open(target, 'a') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) if not nosmp: # default is to use WITH_THREAD target_f.write( '#ifdef WITH_THREAD\n#define NPY_ALLOW_THREADS 1\n#else\n#define NPY_ALLOW_THREADS 0\n#endif\n' ) target_f.close() else: mathlibs = [] target_f = open(target) for line in target_f.readlines(): s = '#define MATHLIB' if line.startswith(s): value = line[len(s):].strip() if value: mathlibs.extend(value.split(',')) target_f.close() ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) config.add_data_files((header_dir, target)) return target def generate_api_func(module_name): def generate_api(ext, build_dir): script = join(codegen_dir, module_name + '.py') sys.path.insert(0, codegen_dir) try: m = __import__(module_name) print 'executing', script h_file, c_file, doc_file = m.generate_api(build_dir) finally: del sys.path[0] config.add_data_files((header_dir, h_file), (header_dir, doc_file)) return (h_file, ) return generate_api generate_array_api = generate_api_func('generate_array_api') generate_ufunc_api = generate_api_func('generate_ufunc_api') def generate_umath_c(ext, build_dir): target = join(build_dir, '__umath_generated.c') script = generate_umath_py if newer(script, target): f = open(target, 'w') f.write( generate_umath.make_code(generate_umath.defdict, generate_umath.__file__)) f.close() return [] config.add_data_files('include/numpy/*.h') config.add_include_dirs('src') config.numpy_include_dirs.extend(config.paths('include')) deps = [ join('src', 'arrayobject.c'), join('src', 'arraymethods.c'), join('src', 'scalartypes.inc.src'), join('src', 'arraytypes.inc.src'), join('src', '_signbit.c'), join('src', '_isnan.c'), join('src', 'ucsnarrow.c'), join('include', 'numpy', '*object.h'), 'include/numpy/fenv/fenv.c', 'include/numpy/fenv/fenv.h', join(codegen_dir, 'genapi.py'), join(codegen_dir, '*.txt') ] # Don't install fenv unless we need them. if sys.platform == 'cygwin': config.add_data_dir('include/numpy/fenv') config.add_extension( 'multiarray', sources=[ join('src', 'multiarraymodule.c'), generate_config_h, generate_array_api, join('src', 'scalartypes.inc.src'), join('src', 'arraytypes.inc.src'), join(codegen_dir, 'generate_array_api.py'), join('*.py') ], depends=deps, ) config.add_extension( 'umath', sources=[ generate_config_h, join('src', 'umathmodule.c.src'), generate_umath_c, generate_ufunc_api, join('src', 'scalartypes.inc.src'), join('src', 'arraytypes.inc.src'), ], depends=[ join('src', 'ufuncobject.c'), generate_umath_py, join(codegen_dir, 'generate_ufunc_api.py'), ] + deps, ) config.add_extension( '_sort', sources=[ join('src', '_sortmodule.c.src'), generate_config_h, generate_array_api, ], ) config.add_extension( 'scalarmath', sources=[ join('src', 'scalarmathmodule.c.src'), generate_config_h, generate_array_api, generate_ufunc_api ], ) # Configure blasdot blas_info = get_info('blas_opt', 0) #blas_info = {} def get_dotblas_sources(ext, build_dir): if blas_info: return ext.depends[:1] return None # no extension module will be built config.add_extension('_dotblas', sources=[get_dotblas_sources], depends=[ join('blasdot', '_dotblas.c'), join('blasdot', 'cblas.h'), ], include_dirs=['blasdot'], extra_info=blas_info) config.add_data_dir('tests') config.make_svn_version_py() return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration, dot_join from numpy.distutils.system_info import get_info config = Configuration('core', parent_package, top_path) local_dir = config.local_path codegen_dir = join(local_dir, 'code_generators') if is_released(config): warnings.simplefilter('error', MismatchCAPIWarning) # Check whether we have a mismatch between the set C API VERSION and the # actual C API VERSION check_api_version(C_API_VERSION, codegen_dir) generate_umath_py = join(codegen_dir, 'generate_umath.py') n = dot_join(config.name, 'generate_umath') generate_umath = npy_load_module('_'.join(n.split('.')), generate_umath_py, ('.py', 'U', 1)) header_dir = 'include/numpy' # this is relative to config.path_in_package cocache = CallOnceOnly() def generate_config_h(ext, build_dir): target = join(build_dir, header_dir, 'config.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir) # Check math library and C99 math funcs availability mathlibs = check_mathlib(config_cmd) moredefs.append(('MATHLIB', ','.join(mathlibs))) check_math_capabilities(config_cmd, moredefs, mathlibs) moredefs.extend(cocache.check_ieee_macros(config_cmd)[0]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0]) # Signal check if is_npy_no_signal(): moredefs.append('__NPY_PRIVATE_NO_SIGNAL') # Windows checks if sys.platform == 'win32' or os.name == 'nt': win32_checks(moredefs) # C99 restrict keyword moredefs.append(('NPY_RESTRICT', config_cmd.check_restrict())) # Inline check inline = config_cmd.check_inline() # Use relaxed stride checking if NPY_RELAXED_STRIDES_CHECKING: moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1)) # Use bogus stride debug aid when relaxed strides are enabled if NPY_RELAXED_STRIDES_DEBUG: moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1)) # Get long double representation if sys.platform != 'darwin': rep = check_long_double_representation(config_cmd) if rep in ['INTEL_EXTENDED_12_BYTES_LE', 'INTEL_EXTENDED_16_BYTES_LE', 'MOTOROLA_EXTENDED_12_BYTES_BE', 'IEEE_QUAD_LE', 'IEEE_QUAD_BE', 'IEEE_DOUBLE_LE', 'IEEE_DOUBLE_BE', 'DOUBLE_DOUBLE_BE', 'DOUBLE_DOUBLE_LE']: moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1)) else: raise ValueError("Unrecognized long double format: %s" % rep) # Py3K check if sys.version_info[0] == 3: moredefs.append(('NPY_PY3K', 1)) # Generate the config.h file from moredefs target_f = open(target, 'w') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # define inline to our keyword, or nothing target_f.write('#ifndef __cplusplus\n') if inline == 'inline': target_f.write('/* #undef inline */\n') else: target_f.write('#define inline %s\n' % inline) target_f.write('#endif\n') # add the guard to make sure config.h is never included directly, # but always through npy_config.h target_f.write(""" #ifndef _NPY_NPY_CONFIG_H_ #error config.h should never be included directly, include npy_config.h instead #endif """) target_f.close() print('File:', target) target_f = open(target) print(target_f.read()) target_f.close() print('EOF') else: mathlibs = [] target_f = open(target) for line in target_f: s = '#define MATHLIB' if line.startswith(s): value = line[len(s):].strip() if value: mathlibs.extend(value.split(',')) target_f.close() # Ugly: this can be called within a library and not an extension, # in which case there is no libraries attributes (and none is # needed). if hasattr(ext, 'libraries'): ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) return target def generate_numpyconfig_h(ext, build_dir): """Depends on config.h: generate_config_h has to be called before !""" # put private include directory in build_dir on search path # allows using code generation in headers headers config.add_include_dirs(join(build_dir, "src", "private")) config.add_include_dirs(join(build_dir, "src", "npymath")) target = join(build_dir, header_dir, '_numpyconfig.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir) if is_npy_no_signal(): moredefs.append(('NPY_NO_SIGNAL', 1)) if is_npy_no_smp(): moredefs.append(('NPY_NO_SMP', 1)) else: moredefs.append(('NPY_NO_SMP', 0)) mathlibs = check_mathlib(config_cmd) moredefs.extend(cocache.check_ieee_macros(config_cmd)[1]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1]) if NPY_RELAXED_STRIDES_CHECKING: moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1)) if NPY_RELAXED_STRIDES_DEBUG: moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1)) # Check wether we can use inttypes (C99) formats if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']): moredefs.append(('NPY_USE_C99_FORMATS', 1)) # visibility check hidden_visibility = visibility_define(config_cmd) moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility)) # Add the C API/ABI versions moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION)) moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION)) # Add moredefs to header target_f = open(target, 'w') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # Define __STDC_FORMAT_MACROS target_f.write(""" #ifndef __STDC_FORMAT_MACROS #define __STDC_FORMAT_MACROS 1 #endif """) target_f.close() # Dump the numpyconfig.h header to stdout print('File: %s' % target) target_f = open(target) print(target_f.read()) target_f.close() print('EOF') config.add_data_files((header_dir, target)) return target def generate_api_func(module_name): def generate_api(ext, build_dir): script = join(codegen_dir, module_name + '.py') sys.path.insert(0, codegen_dir) try: m = __import__(module_name) log.info('executing %s', script) h_file, c_file, doc_file = m.generate_api(os.path.join(build_dir, header_dir)) finally: del sys.path[0] config.add_data_files((header_dir, h_file), (header_dir, doc_file)) return (h_file,) return generate_api generate_numpy_api = generate_api_func('generate_numpy_api') generate_ufunc_api = generate_api_func('generate_ufunc_api') config.add_include_dirs(join(local_dir, "src", "private")) config.add_include_dirs(join(local_dir, "src")) config.add_include_dirs(join(local_dir)) config.add_data_files('include/numpy/*.h') config.add_include_dirs(join('src', 'npymath')) config.add_include_dirs(join('src', 'multiarray')) config.add_include_dirs(join('src', 'umath')) config.add_include_dirs(join('src', 'npysort')) config.add_define_macros([("NPY_INTERNAL_BUILD", "1")]) # this macro indicates that Numpy build is in process config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")]) if sys.platform[:3] == "aix": config.add_define_macros([("_LARGE_FILES", None)]) else: config.add_define_macros([("_FILE_OFFSET_BITS", "64")]) config.add_define_macros([('_LARGEFILE_SOURCE', '1')]) config.add_define_macros([('_LARGEFILE64_SOURCE', '1')]) config.numpy_include_dirs.extend(config.paths('include')) deps = [join('src', 'npymath', '_signbit.c'), join('include', 'numpy', '*object.h'), join(codegen_dir, 'genapi.py'), ] ####################################################################### # dummy module # ####################################################################### # npymath needs the config.h and numpyconfig.h files to be generated, but # build_clib cannot handle generate_config_h and generate_numpyconfig_h # (don't ask). Because clib are generated before extensions, we have to # explicitly add an extension which has generate_config_h and # generate_numpyconfig_h as sources *before* adding npymath. config.add_extension('_dummy', sources=[join('src', 'dummymodule.c'), generate_config_h, generate_numpyconfig_h, generate_numpy_api] ) ####################################################################### # npymath library # ####################################################################### subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")]) def get_mathlib_info(*args): # Another ugly hack: the mathlib info is known once build_src is run, # but we cannot use add_installed_pkg_config here either, so we only # update the substition dictionary during npymath build config_cmd = config.get_config_cmd() # Check that the toolchain works, to fail early if it doesn't # (avoid late errors with MATHLIB which are confusing if the # compiler does not work). st = config_cmd.try_link('int main(void) { return 0;}') if not st: raise RuntimeError("Broken toolchain: cannot link a simple C program") mlibs = check_mathlib(config_cmd) posix_mlib = ' '.join(['-l%s' % l for l in mlibs]) msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs]) subst_dict["posix_mathlib"] = posix_mlib subst_dict["msvc_mathlib"] = msvc_mlib npymath_sources = [join('src', 'npymath', 'npy_math_internal.h.src'), join('src', 'npymath', 'npy_math.c'), join('src', 'npymath', 'ieee754.c.src'), join('src', 'npymath', 'npy_math_complex.c.src'), join('src', 'npymath', 'halffloat.c') ] # Must be true for CRT compilers but not MinGW/cygwin. See gh-9977. is_msvc = platform.system() == 'Windows' config.add_installed_library('npymath', sources=npymath_sources + [get_mathlib_info], install_dir='lib', build_info={ 'include_dirs' : [], # empty list required for creating npy_math_internal.h 'extra_compiler_args' : (['/GL-'] if is_msvc else []), }) config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config", subst_dict) config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config", subst_dict) ####################################################################### # npysort library # ####################################################################### # This library is created for the build but it is not installed npysort_sources = [join('src', 'npysort', 'quicksort.c.src'), join('src', 'npysort', 'mergesort.c.src'), join('src', 'npysort', 'heapsort.c.src'), join('src', 'private', 'npy_partition.h.src'), join('src', 'npysort', 'selection.c.src'), join('src', 'private', 'npy_binsearch.h.src'), join('src', 'npysort', 'binsearch.c.src'), ] config.add_library('npysort', sources=npysort_sources, include_dirs=[]) ####################################################################### # multiarray module # ####################################################################### multiarray_deps = [ join('src', 'multiarray', 'arrayobject.h'), join('src', 'multiarray', 'arraytypes.h'), join('src', 'multiarray', 'array_assign.h'), join('src', 'multiarray', 'buffer.h'), join('src', 'multiarray', 'calculation.h'), join('src', 'multiarray', 'cblasfuncs.h'), join('src', 'multiarray', 'common.h'), join('src', 'multiarray', 'convert_datatype.h'), join('src', 'multiarray', 'convert.h'), join('src', 'multiarray', 'conversion_utils.h'), join('src', 'multiarray', 'ctors.h'), join('src', 'multiarray', 'descriptor.h'), join('src', 'multiarray', 'dragon4.h'), join('src', 'multiarray', 'getset.h'), join('src', 'multiarray', 'hashdescr.h'), join('src', 'multiarray', 'iterators.h'), join('src', 'multiarray', 'mapping.h'), join('src', 'multiarray', 'methods.h'), join('src', 'multiarray', 'multiarraymodule.h'), join('src', 'multiarray', 'nditer_impl.h'), join('src', 'multiarray', 'number.h'), join('src', 'multiarray', 'numpyos.h'), join('src', 'multiarray', 'refcount.h'), join('src', 'multiarray', 'scalartypes.h'), join('src', 'multiarray', 'sequence.h'), join('src', 'multiarray', 'shape.h'), join('src', 'multiarray', 'strfuncs.h'), join('src', 'multiarray', 'ucsnarrow.h'), join('src', 'multiarray', 'usertypes.h'), join('src', 'multiarray', 'vdot.h'), join('src', 'private', 'npy_config.h'), join('src', 'private', 'templ_common.h.src'), join('src', 'private', 'lowlevel_strided_loops.h'), join('src', 'private', 'mem_overlap.h'), join('src', 'private', 'npy_longdouble.h'), join('src', 'private', 'ufunc_override.h'), join('src', 'private', 'binop_override.h'), join('src', 'private', 'npy_extint128.h'), join('include', 'numpy', 'arrayobject.h'), join('include', 'numpy', '_neighborhood_iterator_imp.h'), join('include', 'numpy', 'npy_endian.h'), join('include', 'numpy', 'arrayscalars.h'), join('include', 'numpy', 'noprefix.h'), join('include', 'numpy', 'npy_interrupt.h'), join('include', 'numpy', 'npy_3kcompat.h'), join('include', 'numpy', 'npy_math.h'), join('include', 'numpy', 'halffloat.h'), join('include', 'numpy', 'npy_common.h'), join('include', 'numpy', 'npy_os.h'), join('include', 'numpy', 'utils.h'), join('include', 'numpy', 'ndarrayobject.h'), join('include', 'numpy', 'npy_cpu.h'), join('include', 'numpy', 'numpyconfig.h'), join('include', 'numpy', 'ndarraytypes.h'), join('include', 'numpy', 'npy_1_7_deprecated_api.h'), # add library sources as distuils does not consider libraries # dependencies ] + npysort_sources + npymath_sources multiarray_src = [ join('src', 'multiarray', 'alloc.c'), join('src', 'multiarray', 'arrayobject.c'), join('src', 'multiarray', 'arraytypes.c.src'), join('src', 'multiarray', 'array_assign.c'), join('src', 'multiarray', 'array_assign_scalar.c'), join('src', 'multiarray', 'array_assign_array.c'), join('src', 'multiarray', 'buffer.c'), join('src', 'multiarray', 'calculation.c'), join('src', 'multiarray', 'compiled_base.c'), join('src', 'multiarray', 'common.c'), join('src', 'multiarray', 'convert.c'), join('src', 'multiarray', 'convert_datatype.c'), join('src', 'multiarray', 'conversion_utils.c'), join('src', 'multiarray', 'ctors.c'), join('src', 'multiarray', 'datetime.c'), join('src', 'multiarray', 'datetime_strings.c'), join('src', 'multiarray', 'datetime_busday.c'), join('src', 'multiarray', 'datetime_busdaycal.c'), join('src', 'multiarray', 'descriptor.c'), join('src', 'multiarray', 'dragon4.c'), join('src', 'multiarray', 'dtype_transfer.c'), join('src', 'multiarray', 'einsum.c.src'), join('src', 'multiarray', 'flagsobject.c'), join('src', 'multiarray', 'getset.c'), join('src', 'multiarray', 'hashdescr.c'), join('src', 'multiarray', 'item_selection.c'), join('src', 'multiarray', 'iterators.c'), join('src', 'multiarray', 'lowlevel_strided_loops.c.src'), join('src', 'multiarray', 'mapping.c'), join('src', 'multiarray', 'methods.c'), join('src', 'multiarray', 'multiarraymodule.c'), join('src', 'multiarray', 'nditer_templ.c.src'), join('src', 'multiarray', 'nditer_api.c'), join('src', 'multiarray', 'nditer_constr.c'), join('src', 'multiarray', 'nditer_pywrap.c'), join('src', 'multiarray', 'number.c'), join('src', 'multiarray', 'numpyos.c'), join('src', 'multiarray', 'refcount.c'), join('src', 'multiarray', 'sequence.c'), join('src', 'multiarray', 'shape.c'), join('src', 'multiarray', 'scalarapi.c'), join('src', 'multiarray', 'scalartypes.c.src'), join('src', 'multiarray', 'strfuncs.c'), join('src', 'multiarray', 'temp_elide.c'), join('src', 'multiarray', 'usertypes.c'), join('src', 'multiarray', 'ucsnarrow.c'), join('src', 'multiarray', 'vdot.c'), join('src', 'private', 'templ_common.h.src'), join('src', 'private', 'mem_overlap.c'), join('src', 'private', 'npy_longdouble.c'), join('src', 'private', 'ufunc_override.c'), ] blas_info = get_info('blas_opt', 0) if blas_info and ('HAVE_CBLAS', None) in blas_info.get('define_macros', []): extra_info = blas_info # These files are also in MANIFEST.in so that they are always in # the source distribution independently of HAVE_CBLAS. multiarray_src.extend([join('src', 'multiarray', 'cblasfuncs.c'), join('src', 'multiarray', 'python_xerbla.c'), ]) if uses_accelerate_framework(blas_info): multiarray_src.extend(get_sgemv_fix()) else: extra_info = {} config.add_extension('multiarray', sources=multiarray_src + [generate_config_h, generate_numpyconfig_h, generate_numpy_api, join(codegen_dir, 'generate_numpy_api.py'), join('*.py')], depends=deps + multiarray_deps, libraries=['npymath', 'npysort'], extra_info=extra_info) ####################################################################### # umath module # ####################################################################### def generate_umath_c(ext, build_dir): target = join(build_dir, header_dir, '__umath_generated.c') dir = os.path.dirname(target) if not os.path.exists(dir): os.makedirs(dir) script = generate_umath_py if newer(script, target): f = open(target, 'w') f.write(generate_umath.make_code(generate_umath.defdict, generate_umath.__file__)) f.close() return [] umath_src = [ join('src', 'umath', 'umathmodule.c'), join('src', 'umath', 'reduction.c'), join('src', 'umath', 'funcs.inc.src'), join('src', 'umath', 'simd.inc.src'), join('src', 'umath', 'loops.h.src'), join('src', 'umath', 'loops.c.src'), join('src', 'umath', 'ufunc_object.c'), join('src', 'umath', 'extobj.c'), join('src', 'umath', 'scalarmath.c.src'), join('src', 'umath', 'ufunc_type_resolution.c'), join('src', 'umath', 'override.c'), join('src', 'private', 'mem_overlap.c'), join('src', 'private', 'npy_longdouble.c'), join('src', 'private', 'ufunc_override.c')] umath_deps = [ generate_umath_py, join('include', 'numpy', 'npy_math.h'), join('include', 'numpy', 'halffloat.h'), join('src', 'multiarray', 'common.h'), join('src', 'private', 'templ_common.h.src'), join('src', 'umath', 'simd.inc.src'), join('src', 'umath', 'override.h'), join(codegen_dir, 'generate_ufunc_api.py'), join('src', 'private', 'lowlevel_strided_loops.h'), join('src', 'private', 'mem_overlap.h'), join('src', 'private', 'npy_longdouble.h'), join('src', 'private', 'ufunc_override.h'), join('src', 'private', 'binop_override.h')] + npymath_sources config.add_extension('umath', sources=umath_src + [generate_config_h, generate_numpyconfig_h, generate_umath_c, generate_ufunc_api], depends=deps + umath_deps, libraries=['npymath'], ) ####################################################################### # umath_tests module # ####################################################################### config.add_extension('umath_tests', sources=[join('src', 'umath', 'umath_tests.c.src')]) ####################################################################### # custom rational dtype module # ####################################################################### config.add_extension('test_rational', sources=[join('src', 'umath', 'test_rational.c.src')]) ####################################################################### # struct_ufunc_test module # ####################################################################### config.add_extension('struct_ufunc_test', sources=[join('src', 'umath', 'struct_ufunc_test.c.src')]) ####################################################################### # multiarray_tests module # ####################################################################### config.add_extension('multiarray_tests', sources=[join('src', 'multiarray', 'multiarray_tests.c.src'), join('src', 'private', 'mem_overlap.c')], depends=[join('src', 'private', 'mem_overlap.h'), join('src', 'private', 'npy_extint128.h')], libraries=['npymath']) ####################################################################### # operand_flag_tests module # ####################################################################### config.add_extension('operand_flag_tests', sources=[join('src', 'umath', 'operand_flag_tests.c.src')]) config.add_data_dir('tests') config.add_data_dir('tests/data') config.make_svn_version_py() return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, dot_join from numpy.distutils.system_info import get_info, default_lib_dirs config = Configuration('core', parent_package, top_path) local_dir = config.local_path codegen_dir = join(local_dir, 'code_generators') generate_umath_py = join(codegen_dir, 'generate_umath.py') n = dot_join(config.name, 'generate_umath') generate_umath = imp.load_module('_'.join(n.split('.')), open(generate_umath_py, 'U'), generate_umath_py, ('.py', 'U', 1)) header_dir = 'include/numpy' # this is relative to config.path_in_package def generate_config_h(ext, build_dir): target = join(build_dir, 'config.h') if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) tc = generate_testcode(target) from distutils import sysconfig python_include = sysconfig.get_python_inc() python_h = join(python_include, 'Python.h') if not os.path.isfile(python_h): raise SystemError,\ "Non-existing %s. Perhaps you need to install"\ " python-dev|python-devel." % (python_h) result = config_cmd.try_run(tc, include_dirs=[python_include], library_dirs=default_lib_dirs) if not result: raise SystemError,"Failed to test configuration. "\ "See previous error messages for more information." moredefs = [] # mathlibs = [] tc = testcode_mathlib() mathlibs_choices = [[], ['m'], ['cpml']] mathlib = os.environ.get('MATHLIB') if mathlib: mathlibs_choices.insert(0, mathlib.split(',')) for libs in mathlibs_choices: if config_cmd.try_run(tc, libraries=libs): mathlibs = libs break else: raise EnvironmentError("math library missing; rerun " "setup.py after setting the " "MATHLIB env variable") ext.libraries.extend(mathlibs) moredefs.append(('MATHLIB', ','.join(mathlibs))) def check_func(func_name): return config_cmd.check_func(func_name, libraries=mathlibs, decl=False, headers=['math.h']) for func_name, defsymbol in FUNCTIONS_TO_CHECK: if check_func(func_name): moredefs.append(defsymbol) if is_npy_no_signal(): moredefs.append('__NPY_PRIVATE_NO_SIGNAL') if sys.platform == 'win32' or os.name == 'nt': from numpy.distutils.misc_util import get_build_architecture a = get_build_architecture() print 'BUILD_ARCHITECTURE: %r, os.name=%r, sys.platform=%r' % ( a, os.name, sys.platform) if a == 'AMD64': moredefs.append('DISTUTILS_USE_SDK') if sys.version[:3] < '2.4': if config_cmd.check_func('strtod', decl=False, headers=['stdlib.h']): moredefs.append(('PyOS_ascii_strtod', 'strtod')) target_f = open(target, 'a') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) target_f.close() print 'File:', target target_f = open(target) print target_f.read() target_f.close() print 'EOF' else: mathlibs = [] target_f = open(target) for line in target_f.readlines(): s = '#define MATHLIB' if line.startswith(s): value = line[len(s):].strip() if value: mathlibs.extend(value.split(',')) target_f.close() ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) return target def generate_numpyconfig_h(ext, build_dir): """Depends on config.h: generate_config_h has to be called before !""" target = join(build_dir, 'numpyconfig.h') if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) testcode = generate_numpyconfig_code(target) from distutils import sysconfig python_include = sysconfig.get_python_inc() python_h = join(python_include, 'Python.h') if not os.path.isfile(python_h): raise SystemError,\ "Non-existing %s. Perhaps you need to install"\ " python-dev|python-devel." % (python_h) config.numpy_include_dirs result = config_cmd.try_run(testcode, include_dirs = [python_include] + \ config.numpy_include_dirs, library_dirs = default_lib_dirs) if not result: raise SystemError,"Failed to generate numpy configuration. "\ "See previous error messages for more information." print 'File: %s' % target target_f = open(target) print target_f.read() target_f.close() print 'EOF' config.add_data_files((header_dir, target)) return target def generate_api_func(module_name): def generate_api(ext, build_dir): script = join(codegen_dir, module_name + '.py') sys.path.insert(0, codegen_dir) try: m = __import__(module_name) log.info('executing %s', script) h_file, c_file, doc_file = m.generate_api(build_dir) finally: del sys.path[0] config.add_data_files((header_dir, h_file), (header_dir, doc_file)) return (h_file, ) return generate_api generate_array_api = generate_api_func('generate_array_api') generate_ufunc_api = generate_api_func('generate_ufunc_api') def generate_umath_c(ext, build_dir): target = join(build_dir, '__umath_generated.c') script = generate_umath_py if newer(script, target): f = open(target, 'w') f.write( generate_umath.make_code(generate_umath.defdict, generate_umath.__file__)) f.close() return [] config.add_data_files('include/numpy/*.h') config.add_include_dirs('src') config.numpy_include_dirs.extend(config.paths('include')) deps = [ join('src', 'arrayobject.c'), join('src', 'arraymethods.c'), join('src', 'scalartypes.inc.src'), join('src', 'arraytypes.inc.src'), join('src', '_signbit.c'), join('src', '_isnan.c'), join('src', 'ucsnarrow.c'), join('include', 'numpy', '*object.h'), 'include/numpy/fenv/fenv.c', 'include/numpy/fenv/fenv.h', join(codegen_dir, 'genapi.py'), join(codegen_dir, '*.txt') ] # Don't install fenv unless we need them. if sys.platform == 'cygwin': config.add_data_dir('include/numpy/fenv') config.add_extension( 'multiarray', sources=[ join('src', 'multiarraymodule.c'), generate_config_h, generate_numpyconfig_h, generate_array_api, join('src', 'scalartypes.inc.src'), join('src', 'arraytypes.inc.src'), join(codegen_dir, 'generate_array_api.py'), join('*.py') ], depends=deps, ) config.add_extension( 'umath', sources=[ generate_config_h, generate_numpyconfig_h, join('src', 'umathmodule.c.src'), generate_umath_c, generate_ufunc_api, join('src', 'scalartypes.inc.src'), join('src', 'arraytypes.inc.src'), ], depends=[ join('src', 'ufuncobject.c'), generate_umath_py, join(codegen_dir, 'generate_ufunc_api.py'), ] + deps, ) config.add_extension( '_sort', sources=[ join('src', '_sortmodule.c.src'), generate_config_h, generate_numpyconfig_h, generate_array_api, ], ) config.add_extension( 'scalarmath', sources=[ join('src', 'scalarmathmodule.c.src'), generate_config_h, generate_numpyconfig_h, generate_array_api, generate_ufunc_api ], ) # Configure blasdot blas_info = get_info('blas_opt', 0) #blas_info = {} def get_dotblas_sources(ext, build_dir): if blas_info: if ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', []): return None # dotblas needs ATLAS, Fortran compiled blas will not be sufficient. return ext.depends[:1] return None # no extension module will be built config.add_extension('_dotblas', sources=[get_dotblas_sources], depends=[ join('blasdot', '_dotblas.c'), join('blasdot', 'cblas.h'), ], include_dirs=['blasdot'], extra_info=blas_info) config.add_data_dir('tests') config.add_data_dir('tests/data') config.make_svn_version_py() return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, dot_join from numpy.distutils.command.scons import get_scons_pkg_build_dir from numpy.distutils.system_info import get_info, default_lib_dirs config = Configuration('core', parent_package, top_path) local_dir = config.local_path header_dir = 'include/numpy' # this is relative to config.path_in_package config.add_subpackage('code_generators') # List of files to register to numpy.distutils dot_blas_src = [join('blasdot', '_dotblas.c'), join('blasdot', 'cblas.h')] api_definition = [ join('code_generators', 'numpy_api_order.txt'), join('code_generators', 'ufunc_api_order.txt') ] core_src = [ join('src', basename(i)) for i in glob.glob(join(local_dir, 'src', '*.c')) ] core_src += [ join('src', basename(i)) for i in glob.glob(join(local_dir, 'src', '*.src')) ] source_files = dot_blas_src + api_definition + core_src + \ [join(header_dir, 'numpyconfig.h.in')] # Add generated files to distutils... def add_config_header(): scons_build_dir = get_scons_build_dir() # XXX: I really have to think about how to communicate path info # between scons and distutils, and set the options at one single # location. target = join(get_scons_pkg_build_dir(config.name), 'config.h') incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) def add_numpyconfig_header(): scons_build_dir = get_scons_build_dir() # XXX: I really have to think about how to communicate path info # between scons and distutils, and set the options at one single # location. target = join(get_scons_pkg_build_dir(config.name), 'include/numpy/numpyconfig.h') incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) config.add_data_files((header_dir, target)) def add_array_api(): scons_build_dir = get_scons_build_dir() # XXX: I really have to think about how to communicate path info # between scons and distutils, and set the options at one single # location. h_file = join(get_scons_pkg_build_dir(config.name), 'include/numpy/__multiarray_api.h') t_file = join(get_scons_pkg_build_dir(config.name), 'include/numpy/multiarray_api.txt') config.add_data_files((header_dir, h_file), (header_dir, t_file)) def add_ufunc_api(): scons_build_dir = get_scons_build_dir() # XXX: I really have to think about how to communicate path info # between scons and distutils, and set the options at one single # location. h_file = join(get_scons_pkg_build_dir(config.name), 'include/numpy/__ufunc_api.h') t_file = join(get_scons_pkg_build_dir(config.name), 'include/numpy/ufunc_api.txt') config.add_data_files((header_dir, h_file), (header_dir, t_file)) def add_generated_files(*args, **kw): add_config_header() add_numpyconfig_header() add_array_api() add_ufunc_api() config.add_sconscript('SConstruct', post_hook=add_generated_files, source_files=source_files) config.add_scons_installed_library('npymath', 'lib') config.add_data_files('include/numpy/*.h') config.add_include_dirs('src') config.numpy_include_dirs.extend(config.paths('include')) # Don't install fenv unless we need them. if sys.platform == 'cygwin': config.add_data_dir('include/numpy/fenv') config.add_data_dir('tests') config.make_svn_version_py() return config
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from numpy.distutils.system_info import get_info config = Configuration('neurospin', parent_package, top_path) # This is also from numpy/random/setup.py - ftime fix defs = [] if needs_mingw_ftime_workaround(): defs.append(("NPY_NEEDS_MINGW_TIME_WORKAROUND", None)) # cstat library config.add_include_dirs(os.path.join(LIBS,'fff')) config.add_include_dirs(os.path.join(LIBS,'randomkit')) config.add_include_dirs(os.path.join(LIBS,'wrapper')) config.add_include_dirs(get_numpy_include_dirs()) sources = [os.path.join(LIBS,'fff','*.c')] sources.append(os.path.join(LIBS,'wrapper','*.c')) # FIXME: the following external library 'mtrand' (C) is copied from # numpy, and included in the fff library for installation simplicity. # If numpy happens to expose its API one day, it would be neat to link # with them rather than copying the source code. # # numpy-trunk/numpy/random/mtrand/ sources.append(os.path.join(LIBS,'randomkit','*.c')) # Link with lapack if found on the system # XXX: We need to better sort out the use of get_info() for Lapack, because # using 'lapack' and 'lapack_opt' returns different things even comparing # Ubuntu 8.10 machines on 32 vs 64 bit setups. On OSX # get_info('lapack_opt') does not return the keys: 'libraries' and # 'library_dirs', but get_info('lapack') does. # # For now this code should do the right thing on OSX and linux, but we # should ask on the numpy list for clarification on the proper approach. # XXX: If you modify these lines, remember to pass the information # along to the different .so in the neurospin build system. # First, try 'lapack_info', as that seems to provide more details on Linux # (both 32 and 64 bits): lapack_info = get_info('lapack_opt', 0) if 'libraries' not in lapack_info: # But on OSX that may not give us what we need, so try with 'lapack' # instead. NOTE: scipy.linalg uses lapack_opt, not 'lapack'... lapack_info = get_info('lapack', 0) # If no lapack install is found, we use the rescue lapack lite # distribution included in the package (sources have been # translated to C using f2c) if not lapack_info: log.warn('No lapack installation found, using lapack lite distribution') sources.append(os.path.join(LIBS,'lapack_lite','*.c')) library_dirs = [] libraries = [] # Best-case scenario: lapack found else: library_dirs = lapack_info['library_dirs'] libraries = lapack_info['libraries'] if 'include_dirs' in lapack_info: config.add_include_dirs(lapack_info['include_dirs']) # Information message print('LAPACK build options:') print('library_dirs: %s ' % library_dirs) print('libraries: %s ' % libraries) print('lapack_info: %s ' % lapack_info) config.add_library('cstat', sources=sources, macros=defs, library_dirs=library_dirs, libraries=libraries, extra_info=lapack_info) # Subpackages config.add_subpackage('bindings') config.add_subpackage('clustering') config.add_subpackage('eda') config.add_subpackage('glm') config.add_subpackage('graph') config.add_subpackage('group') config.add_subpackage('scripts') config.add_subpackage('spatial_models') config.add_subpackage('utils') config.add_subpackage('viz_tools') config.add_subpackage('datasets') config.add_subpackage('image') config.add_subpackage('segmentation') config.add_subpackage('registration') config.make_config_py() # installs __config__.py return config
def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration, dot_join from numpy.distutils.system_info import get_info, default_lib_dirs config = Configuration("core", parent_package, top_path) local_dir = config.local_path codegen_dir = join(local_dir, "code_generators") if is_released(config): warnings.simplefilter("error", MismatchCAPIWarning) # Check whether we have a mismatch between the set C API VERSION and the # actual C API VERSION check_api_version(C_API_VERSION, codegen_dir) generate_umath_py = join(codegen_dir, "generate_umath.py") n = dot_join(config.name, "generate_umath") generate_umath = imp.load_module( "_".join(n.split(".")), open(generate_umath_py, "U"), generate_umath_py, (".py", "U", 1) ) header_dir = "include/numpy" # this is relative to config.path_in_package cocache = CallOnceOnly() def generate_config_h(ext, build_dir): target = join(build_dir, header_dir, "config.h") d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info("Generating %s", target) # Check that the toolchain works, to fail early if it doesn't # (avoid late errors with MATHLIB which are confusing if the # compiler does not work). config_cmd.try_link("int main(void) { return 0;}") # Check sizeof moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir) # Check math library and C99 math funcs availability mathlibs = check_mathlib(config_cmd) moredefs.append(("MATHLIB", ",".join(mathlibs))) check_math_capabilities(config_cmd, moredefs, mathlibs) moredefs.extend(cocache.check_ieee_macros(config_cmd)[0]) # Signal check if is_npy_no_signal(): moredefs.append("__NPY_PRIVATE_NO_SIGNAL") # Windows checks if sys.platform == "win32" or os.name == "nt": win32_checks(moredefs) # Inline check inline = config_cmd.check_inline() # Check whether we need our own wide character support if not config_cmd.check_decl("Py_UNICODE_WIDE", headers=["Python.h"]): PYTHON_HAS_UNICODE_WIDE = True else: PYTHON_HAS_UNICODE_WIDE = False if ENABLE_SEPARATE_COMPILATION: moredefs.append(("ENABLE_SEPARATE_COMPILATION", 1)) # Generate the config.h file from moredefs target_f = open(target, "w") for d in moredefs: if isinstance(d, str): target_f.write("#define %s\n" % (d)) else: target_f.write("#define %s %s\n" % (d[0], d[1])) # define inline to our keyword, or nothing target_f.write("#ifndef __cplusplus\n") if inline == "inline": target_f.write("/* #undef inline */\n") else: target_f.write("#define inline %s\n" % inline) target_f.write("#endif\n") target_f.close() print "File:", target target_f = open(target) print target_f.read() target_f.close() print "EOF" else: mathlibs = [] target_f = open(target) for line in target_f.readlines(): s = "#define MATHLIB" if line.startswith(s): value = line[len(s) :].strip() if value: mathlibs.extend(value.split(",")) target_f.close() # Ugly: this can be called within a library and not an extension, # in which case there is no libraries attributes (and none is # needed). if hasattr(ext, "libraries"): ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) return target def generate_numpyconfig_h(ext, build_dir): """Depends on config.h: generate_config_h has to be called before !""" target = join(build_dir, header_dir, "numpyconfig.h") d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info("Generating %s", target) # Check sizeof ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir) if is_npy_no_signal(): moredefs.append(("NPY_NO_SIGNAL", 1)) if is_npy_no_smp(): moredefs.append(("NPY_NO_SMP", 1)) else: moredefs.append(("NPY_NO_SMP", 0)) moredefs.extend(cocache.check_ieee_macros(config_cmd)[1]) if ENABLE_SEPARATE_COMPILATION: moredefs.append(("NPY_ENABLE_SEPARATE_COMPILATION", 1)) # Check wether we can use inttypes (C99) formats if config_cmd.check_decl("PRIdPTR", headers=["inttypes.h"]): moredefs.append(("NPY_USE_C99_FORMATS", 1)) # Inline check inline = config_cmd.check_inline() # visibility check hidden_visibility = visibility_define(config_cmd) moredefs.append(("NPY_VISIBILITY_HIDDEN", hidden_visibility)) # Add the C API/ABI versions moredefs.append(("NPY_ABI_VERSION", "0x%.8X" % C_ABI_VERSION)) moredefs.append(("NPY_API_VERSION", "0x%.8X" % C_API_VERSION)) # Add moredefs to header target_f = open(target, "w") for d in moredefs: if isinstance(d, str): target_f.write("#define %s\n" % (d)) else: target_f.write("#define %s %s\n" % (d[0], d[1])) # define NPY_INLINE to recognized keyword target_f.write("#define NPY_INLINE %s\n" % inline) # Define __STDC_FORMAT_MACROS target_f.write( """ #ifndef __STDC_FORMAT_MACROS #define __STDC_FORMAT_MACROS 1 #endif """ ) target_f.close() # Dump the numpyconfig.h header to stdout print "File: %s" % target target_f = open(target) print target_f.read() target_f.close() print "EOF" config.add_data_files((header_dir, target)) return target def generate_api_func(module_name): def generate_api(ext, build_dir): script = join(codegen_dir, module_name + ".py") sys.path.insert(0, codegen_dir) try: m = __import__(module_name) log.info("executing %s", script) h_file, c_file, doc_file = m.generate_api(os.path.join(build_dir, header_dir)) finally: del sys.path[0] config.add_data_files((header_dir, h_file), (header_dir, doc_file)) return (h_file,) return generate_api generate_numpy_api = generate_api_func("generate_numpy_api") generate_ufunc_api = generate_api_func("generate_ufunc_api") config.add_include_dirs(join(local_dir, "src")) config.add_include_dirs(join(local_dir)) # Multiarray version: this function is needed to build foo.c from foo.c.src # when foo.c is included in another file and as such not in the src # argument of build_ext command def generate_multiarray_templated_sources(ext, build_dir): from numpy.distutils.misc_util import get_cmd subpath = join("src", "multiarray") sources = [join(local_dir, subpath, "scalartypes.c.src"), join(local_dir, subpath, "arraytypes.c.src")] # numpy.distutils generate .c from .c.src in weird directories, we have # to add them there as they depend on the build_dir config.add_include_dirs(join(build_dir, subpath)) cmd = get_cmd("build_src") cmd.ensure_finalized() cmd.template_sources(sources, ext) # umath version: this function is needed to build foo.c from foo.c.src # when foo.c is included in another file and as such not in the src # argument of build_ext command def generate_umath_templated_sources(ext, build_dir): from numpy.distutils.misc_util import get_cmd subpath = join("src", "umath") sources = [join(local_dir, subpath, "loops.c.src"), join(local_dir, subpath, "umathmodule.c.src")] # numpy.distutils generate .c from .c.src in weird directories, we have # to add them there as they depend on the build_dir config.add_include_dirs(join(build_dir, subpath)) cmd = get_cmd("build_src") cmd.ensure_finalized() cmd.template_sources(sources, ext) def generate_umath_c(ext, build_dir): target = join(build_dir, header_dir, "__umath_generated.c") dir = os.path.dirname(target) if not os.path.exists(dir): os.makedirs(dir) script = generate_umath_py if newer(script, target): f = open(target, "w") f.write(generate_umath.make_code(generate_umath.defdict, generate_umath.__file__)) f.close() return [] config.add_data_files("include/numpy/*.h") config.add_include_dirs(join("src", "npymath")) config.add_include_dirs(join("src", "multiarray")) config.add_include_dirs(join("src", "umath")) config.numpy_include_dirs.extend(config.paths("include")) deps = [ join("src", "npymath", "_signbit.c"), join("include", "numpy", "*object.h"), "include/numpy/fenv/fenv.c", "include/numpy/fenv/fenv.h", join(codegen_dir, "genapi.py"), join(codegen_dir, "*.txt"), ] # Don't install fenv unless we need them. if sys.platform == "cygwin": config.add_data_dir("include/numpy/fenv") config.add_extension( "_sort", sources=[join("src", "_sortmodule.c.src"), generate_config_h, generate_numpyconfig_h, generate_numpy_api], ) # npymath needs the config.h and numpyconfig.h files to be generated, but # build_clib cannot handle generate_config_h and generate_numpyconfig_h # (don't ask). Because clib are generated before extensions, we have to # explicitly add an extension which has generate_config_h and # generate_numpyconfig_h as sources *before* adding npymath. subst_dict = dict([("sep", os.path.sep)]) def get_mathlib_info(*args): # Another ugly hack: the mathlib info is known once build_src is run, # but we cannot use add_installed_pkg_config here either, so we only # updated the substition dictionary during npymath build config_cmd = config.get_config_cmd() mlibs = check_mathlib(config_cmd) posix_mlib = " ".join(["-l%s" % l for l in mlibs]) msvc_mlib = " ".join(["%s.lib" % l for l in mlibs]) subst_dict["posix_mathlib"] = posix_mlib subst_dict["msvc_mathlib"] = msvc_mlib config.add_installed_library( "npymath", sources=[join("src", "npymath", "npy_math.c.src"), get_mathlib_info], install_dir="lib" ) config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config", subst_dict) config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config", subst_dict) multiarray_deps = [ join("src", "multiarray", "arrayobject.h"), join("src", "multiarray", "arraytypes.h"), join("src", "multiarray", "buffer.h"), join("src", "multiarray", "calculation.h"), join("src", "multiarray", "common.h"), join("src", "multiarray", "convert_datatype.h"), join("src", "multiarray", "convert.h"), join("src", "multiarray", "conversion_utils.h"), join("src", "multiarray", "ctors.h"), join("src", "multiarray", "descriptor.h"), join("src", "multiarray", "getset.h"), join("src", "multiarray", "hashdescr.h"), join("src", "multiarray", "iterators.h"), join("src", "multiarray", "mapping.h"), join("src", "multiarray", "methods.h"), join("src", "multiarray", "multiarraymodule.h"), join("src", "multiarray", "number.h"), join("src", "multiarray", "numpyos.h"), join("src", "multiarray", "refcount.h"), join("src", "multiarray", "scalartypes.h"), join("src", "multiarray", "sequence.h"), join("src", "multiarray", "shape.h"), join("src", "multiarray", "ucsnarrow.h"), join("src", "multiarray", "usertypes.h"), ] multiarray_src = [ join("src", "multiarray", "multiarraymodule.c"), join("src", "multiarray", "hashdescr.c"), join("src", "multiarray", "arrayobject.c"), join("src", "multiarray", "buffer.c"), join("src", "multiarray", "datetime.c"), join("src", "multiarray", "numpyos.c"), join("src", "multiarray", "conversion_utils.c"), join("src", "multiarray", "flagsobject.c"), join("src", "multiarray", "descriptor.c"), join("src", "multiarray", "iterators.c"), join("src", "multiarray", "mapping.c"), join("src", "multiarray", "number.c"), join("src", "multiarray", "getset.c"), join("src", "multiarray", "sequence.c"), join("src", "multiarray", "methods.c"), join("src", "multiarray", "ctors.c"), join("src", "multiarray", "convert_datatype.c"), join("src", "multiarray", "convert.c"), join("src", "multiarray", "shape.c"), join("src", "multiarray", "item_selection.c"), join("src", "multiarray", "calculation.c"), join("src", "multiarray", "common.c"), join("src", "multiarray", "usertypes.c"), join("src", "multiarray", "scalarapi.c"), join("src", "multiarray", "refcount.c"), join("src", "multiarray", "arraytypes.c.src"), join("src", "multiarray", "scalartypes.c.src"), ] if PYTHON_HAS_UNICODE_WIDE: multiarray_src.append(join("src", "multiarray", "ucsnarrow.c")) umath_src = [ join("src", "umath", "umathmodule.c.src"), join("src", "umath", "funcs.inc.src"), join("src", "umath", "loops.c.src"), join("src", "umath", "ufunc_object.c"), ] umath_deps = [generate_umath_py, join(codegen_dir, "generate_ufunc_api.py")] if not ENABLE_SEPARATE_COMPILATION: multiarray_deps.extend(multiarray_src) multiarray_src = [join("src", "multiarray", "multiarraymodule_onefile.c")] multiarray_src.append(generate_multiarray_templated_sources) umath_deps.extend(umath_src) umath_src = [join("src", "umath", "umathmodule_onefile.c")] umath_src.append(generate_umath_templated_sources) umath_src.append(join("src", "umath", "funcs.inc.src")) config.add_extension( "multiarray", sources=multiarray_src + [ generate_config_h, generate_numpyconfig_h, generate_numpy_api, join(codegen_dir, "generate_numpy_api.py"), join("*.py"), ], depends=deps + multiarray_deps, libraries=["npymath"], ) config.add_extension( "umath", sources=[generate_config_h, generate_numpyconfig_h, generate_umath_c, generate_ufunc_api] + umath_src, depends=deps + umath_deps, libraries=["npymath"], ) config.add_extension( "scalarmath", sources=[ join("src", "scalarmathmodule.c.src"), generate_config_h, generate_numpyconfig_h, generate_numpy_api, generate_ufunc_api, ], ) # Configure blasdot blas_info = get_info("blas_opt", 0) # blas_info = {} def get_dotblas_sources(ext, build_dir): if blas_info: if ("NO_ATLAS_INFO", 1) in blas_info.get("define_macros", []): return None # dotblas needs ATLAS, Fortran compiled blas will not be sufficient. return ext.depends[:1] return None # no extension module will be built config.add_extension( "_dotblas", sources=[get_dotblas_sources], depends=[join("blasdot", "_dotblas.c"), join("blasdot", "cblas.h")], include_dirs=["blasdot"], extra_info=blas_info, ) config.add_extension("umath_tests", sources=[join("src", "umath", "umath_tests.c.src")]) config.add_extension("multiarray_tests", sources=[join("src", "multiarray", "multiarray_tests.c.src")]) config.add_data_dir("tests") config.add_data_dir("tests/data") config.make_svn_version_py() return config