예제 #1
0
파일: setup.py 프로젝트: dismalpy/dismalpy
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, get_info

    config = Configuration('ssm', parent_package, top_path)

    info = get_info("npymath")
    config.add_extension('_statespace',
                         include_dirs=['dismalpy/src'],
                         sources=['_statespace.c'], extra_info=info)
    config.add_extension('_kalman_filter',
                         include_dirs=['dismalpy/src'],
                         sources=['_kalman_filter.c'], extra_info=info)
    config.add_extension('_kalman_smoother',
                         include_dirs=['dismalpy/src'],
                         sources=['_kalman_smoother.c'], extra_info=info)
    config.add_extension('_simulation_smoother',
                         include_dirs=['dismalpy/src'],
                         sources=['_simulation_smoother.c'], extra_info=info)
    config.add_extension('_tools',
                         include_dirs=['dismalpy/src'],
                         sources=['_tools.c'])
    config.add_subpackage('compat')
    config.add_data_dir('tests')

    config.add_subpackage('_filters')
    config.add_subpackage('_smoothers')
    config.make_config_py()
    return config
예제 #2
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.misc_util import scons_generate_config_py

    pkgname = 'numpy'
    config = Configuration(pkgname, parent_package, top_path,
                           setup_name = 'setupscons.py')
    config.add_subpackage('distutils')
    config.add_subpackage('testing')
    config.add_subpackage('f2py')
    config.add_subpackage('core')
    config.add_subpackage('lib')
    config.add_subpackage('oldnumeric')
    config.add_subpackage('numarray')
    config.add_subpackage('fft')
    config.add_subpackage('linalg')
    config.add_subpackage('random')
    config.add_subpackage('ma')
    config.add_subpackage('matrixlib')
    config.add_data_dir('doc')
    config.add_data_dir('tests')

    def add_config(*args, **kw):
        # Generate __config__, handle inplace issues.
        if kw['scons_cmd'].inplace:
            target = pjoin(kw['pkg_name'], '__config__.py')
        else:
            target = pjoin(kw['scons_cmd'].build_lib, kw['pkg_name'],
                           '__config__.py')
        scons_generate_config_py(target)
    config.add_sconscript(None, post_hook = add_config)

    return config
예제 #3
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info
    config = Configuration('utilities', parent_package, top_path)

    config.add_data_dir('tests')
    config.add_data_dir(join('tests','data'))

    config.add_extension('sparse_ext',
                         sources='sparse_ext.c')

    config.add_extension('sparse_matrix_ext',
                         sources=['sparse_matrix_ext.c', 'sparse_dok.c'])


    config.add_extension('util_ext',
                         sources='util_ext.c')

    if sys.platform == 'darwin':
        extra_args = None
    else:
        extra_args = ['-fopenmp']

    config.add_extension('cg_ext',
                         sources='cg_ext.c',
                         extra_compile_args=extra_args,
                         extra_link_args=extra_args)

    config.add_extension('quad_tree_ext',
                         sources=['quad_tree_ext.c', 'quad_tree.c'])
    

    return config
예제 #4
0
def configuration(parent_package="", top_path=None):
    from numpy.distutils.misc_util import Configuration

    config = Configuration("f2py_ext", parent_package, top_path)
    config.add_extension("fib2", ["src/fib2.pyf", "src/fib1.f"])
    config.add_data_dir("tests")
    return config
예제 #5
0
파일: setup.py 프로젝트: Teva/scikits.image
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs

    config = Configuration('morphology', parent_package, top_path)
    config.add_data_dir('tests')

    cython(['ccomp.pyx'], working_path=base_path)
    cython(['cmorph.pyx'], working_path=base_path)
    cython(['_watershed.pyx'], working_path=base_path)
    cython(['_skeletonize.pyx'], working_path=base_path)
    cython(['_pnpoly.pyx'], working_path=base_path)
    cython(['_convex_hull.pyx'], working_path=base_path)

    config.add_extension('ccomp', sources=['ccomp.c'],
                         include_dirs=[get_numpy_include_dirs()])
    config.add_extension('cmorph', sources=['cmorph.c'],
                         include_dirs=[get_numpy_include_dirs()])
    config.add_extension('_watershed', sources=['_watershed.c'],
                         include_dirs=[get_numpy_include_dirs()])
    config.add_extension('_skeletonize', sources=['_skeletonize.c'],
                         include_dirs=[get_numpy_include_dirs()])
    config.add_extension('_pnpoly', sources=['_pnpoly.c'],
                         include_dirs=[get_numpy_include_dirs()])
    config.add_extension('_convex_hull', sources=['_convex_hull.c'],
                         include_dirs=[get_numpy_include_dirs()])

    return config
예제 #6
0
파일: setup.py 프로젝트: BranYang/scipy
def configuration(parent_package='',top_path=None):
    from scipy._build_utils.system_info import get_info, NotFoundError
    from numpy.distutils.misc_util import Configuration
    from scipy._build_utils import get_g77_abi_wrappers

    config = Configuration('isolve',parent_package,top_path)

    lapack_opt = get_info('lapack_opt')

    # iterative methods
    methods = ['BiCGREVCOM.f.src',
               'BiCGSTABREVCOM.f.src',
               'CGREVCOM.f.src',
               'CGSREVCOM.f.src',
#               'ChebyREVCOM.f.src',
               'GMRESREVCOM.f.src',
#               'JacobiREVCOM.f.src',
               'QMRREVCOM.f.src',
#               'SORREVCOM.f.src'
               ]

    Util = ['getbreak.f.src']
    sources = Util + methods + ['_iterative.pyf.src']
    sources = [join('iterative', x) for x in sources]
    sources += get_g77_abi_wrappers(lapack_opt)

    config.add_extension('_iterative',
                         sources=sources,
                         extra_info=lapack_opt)

    config.add_data_dir('tests')

    return config
예제 #7
0
def configuration(parent_package='',top_path=None):
    
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info
    
    config = Configuration('fit_interpolate', parent_package, top_path)

    config.add_data_dir('tests')


    #util_dir = os.path.abspath(join(os.path.dirname(__file__),'..','utilities'))
    
    util_dir = join('..','utilities')
    
    util_srcs = [join(util_dir,'quad_tree.c'),
                 join(util_dir,'sparse_dok.c'),
                 join(util_dir,'sparse_csr.c')]
    
    if sys.platform == 'darwin':
        extra_args = None
    else:
        extra_args = ['-fopenmp']

    config.add_extension('fitsmooth',
                         sources=['fitsmooth.c']+util_srcs,
                         include_dirs=[util_dir],
                         extra_compile_args=extra_args,
                         extra_link_args=extra_args)


    return config
예제 #8
0
파일: setup.py 프로젝트: demianw/nipype
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration

    config = Configuration('interfaces', parent_package, top_path)

    config.add_subpackage('afni')
    config.add_subpackage('ants')
    config.add_subpackage('camino')
    config.add_subpackage('camino2trackvis')
    config.add_subpackage('cmtk')
    config.add_subpackage('diffusion_toolkit')
    config.add_subpackage('dipy')
    config.add_subpackage('elastix')
    config.add_subpackage('freesurfer')
    config.add_subpackage('fsl')
    config.add_subpackage('mne')
    config.add_subpackage('mrtrix')
    config.add_subpackage('mrtrix3')
    config.add_subpackage('nipy')
    config.add_subpackage('spm')
    config.add_subpackage('slicer')
    config.add_subpackage('mipav')

    config.add_data_dir('script_templates')
    config.add_data_dir('tests')

    return config
예제 #9
0
파일: setup.py 프로젝트: AldenJurling/scipy
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    config = Configuration('stats', parent_package, top_path)

    config.add_data_dir('tests')

    statlib_src = [join('statlib', '*.f')]
    config.add_library('statlib', sources=statlib_src)

    # add statlib module
    config.add_extension('statlib',
        sources=['statlib.pyf'],
        f2py_options=['--no-wrap-functions'],
        libraries=['statlib'],
        depends=statlib_src
    )

    # add _stats module
    config.add_extension('_stats',
        sources=['_stats.c'],
    )

    # add mvn module
    config.add_extension('mvn',
        sources=['mvn.pyf','mvndst.f'],
    )

    return config
예제 #10
0
파일: setup.py 프로젝트: Horta/numpy
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    config = Configuration('testing', parent_package, top_path)

    config.add_subpackage('_private')
    config.add_data_dir('tests')
    return config
예제 #11
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs

    config = Configuration('filter', parent_package, top_path)
    config.add_data_dir('tests')

    cython(['_ctmf.pyx'], working_path=base_path)
    cython(['_denoise_cy.pyx'], working_path=base_path)
    cython(['rank/core_cy.pyx'], working_path=base_path)
    cython(['rank/generic_cy.pyx'], working_path=base_path)
    cython(['rank/percentile_cy.pyx'], working_path=base_path)
    cython(['rank/bilateral_cy.pyx'], working_path=base_path)

    config.add_extension('_ctmf', sources=['_ctmf.c'],
                         include_dirs=[get_numpy_include_dirs()])
    config.add_extension('_denoise_cy', sources=['_denoise_cy.c'],
        include_dirs=[get_numpy_include_dirs(), '../_shared'])
    config.add_extension('rank.core_cy', sources=['rank/core_cy.c'],
        include_dirs=[get_numpy_include_dirs()])
    config.add_extension('rank.generic_cy', sources=['rank/generic_cy.c'],
        include_dirs=[get_numpy_include_dirs()])
    config.add_extension(
        'rank.percentile_cy', sources=['rank/percentile_cy.c'],
        include_dirs=[get_numpy_include_dirs()])
    config.add_extension(
        'rank.bilateral_cy', sources=['rank/bilateral_cy.c'],
        include_dirs=[get_numpy_include_dirs()])

    return config
예제 #12
0
def configuration(parent_package='', top_path=None):
    if os.path.exists('MANIFEST'):
        os.remove('MANIFEST')

    from numpy.distutils.misc_util import Configuration
    config = Configuration(None, parent_package, top_path)

    # main modules
    config.add_subpackage('pypreprocess')

    # spm loader
    config.add_subpackage('pypreprocess/spm_loader')

    # extrenal dependecies
    config.add_subpackage('pypreprocess/external')
    config.add_subpackage('pypreprocess/external/tempita')
    config.add_subpackage('pypreprocess/external/nistats')

    # plugin for generating reports
    config.add_subpackage('pypreprocess/reporting')
    config.add_data_dir("pypreprocess/reporting/template_reports")
    config.add_data_dir("pypreprocess/reporting/css")
    config.add_data_dir("pypreprocess/reporting/js")
    config.add_data_dir("pypreprocess/reporting/icons")
    config.add_data_dir("pypreprocess/reporting/images")

    return config
예제 #13
0
파일: setup.py 프로젝트: CRP/statsmodels
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration

    config = Configuration('statsmodels', parent_package, top_path)

    # these are subpackages because they have Cython code
    config.add_subpackage('nonparametric')
    config.add_subpackage('tsa')

    #TODO: delegate the non-test stuff to subpackages
    config.add_data_files('sandbox/panel/test_data.txt')

    curdir = os.path.abspath(os.path.dirname(__file__))

    extradatafiles = [os.path.relpath(os.path.join(r,d),start=curdir)
                      for r,ds,f in os.walk(os.path.join(curdir, 'datasets'))
                      for d in f if not os.path.splitext(d)[1] in
                          ['.py', '.pyc']]
    for f in extradatafiles:
        config.add_data_files(f)

    # add all the test and results directories for non *.py files
    for root, dirnames, filenames in os.walk(curdir):
        for dir_name in dirnames:
            if dir_name in ['tests', 'results'] and root != 'sandbox':
                config.add_data_dir(os.path.relpath(
                                    os.path.join(root, dir_name),
                                    start = curdir)
                                    )

    return config
예제 #14
0
파일: setup.py 프로젝트: 87/scipy
def configuration(parent_package='', top_path=None):
    import numpy
    from numpy.distutils.misc_util import Configuration

    config = Configuration('csgraph', parent_package, top_path)

    config.add_data_dir('tests')

    config.add_extension('_shortest_path',
         sources=['_shortest_path.c'],
         include_dirs=[numpy.get_include()])

    config.add_extension('_traversal',
         sources=['_traversal.c'],
         include_dirs=[numpy.get_include()])

    config.add_extension('_min_spanning_tree',
         sources=['_min_spanning_tree.c'],
         include_dirs=[numpy.get_include()])

    config.add_extension('_tools',
         sources=['_tools.c'],
         include_dirs=[numpy.get_include()])

    return config
예제 #15
0
def configuration(parent_package='',top_path=None):
    import numpy
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info, dict_append

    config = Configuration('umfpack', parent_package, top_path)
    config.add_data_dir('tests')

    umf_info = get_info('umfpack', notfound_action=1)

    umfpack_i_file = config.paths('umfpack.i')[0]

    def umfpack_i(ext, build_dir):
        if umf_info:
            return umfpack_i_file

    blas_info = get_info('blas_opt')
    build_info = {}
    dict_append(build_info, **umf_info)
    dict_append(build_info, **blas_info)

    #config.add_extension('__umfpack',
    #                      sources=[umfpack_i],
    #                      depends=['umfpack.i'],
    #                      **build_info)

    return config
예제 #16
0
파일: setup.py 프로젝트: A-0-/scikit-image
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs

    config = Configuration('feature', parent_package, top_path)
    config.add_data_dir('tests')

    cython(['corner_cy.pyx'], working_path=base_path)
    cython(['censure_cy.pyx'], working_path=base_path)
    cython(['orb_cy.pyx'], working_path=base_path)
    cython(['brief_cy.pyx'], working_path=base_path)
    cython(['_texture.pyx'], working_path=base_path)
    cython(['_hessian_det_appx.pyx'], working_path=base_path)

    config.add_extension('corner_cy', sources=['corner_cy.c'],
                         include_dirs=[get_numpy_include_dirs()])
    config.add_extension('censure_cy', sources=['censure_cy.c'],
                         include_dirs=[get_numpy_include_dirs()])
    config.add_extension('orb_cy', sources=['orb_cy.c'],
                         include_dirs=[get_numpy_include_dirs()])
    config.add_extension('brief_cy', sources=['brief_cy.c'],
                         include_dirs=[get_numpy_include_dirs()])
    config.add_extension('_texture', sources=['_texture.c'],
                         include_dirs=[get_numpy_include_dirs(), '../_shared'])
    config.add_extension('_hessian_det_appx', sources=['_hessian_det_appx.c'],
                         include_dirs=[get_numpy_include_dirs()])

    return config
예제 #17
0
파일: setup.py 프로젝트: arthornsby/numpy
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    config = Configuration('pyrex_ext',parent_package,top_path)
    config.add_extension('primes',
                         ['primes.pyx'])
    config.add_data_dir('tests')
    return config
예제 #18
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs

    config = Configuration('measure', parent_package, top_path)
    config.add_data_dir('tests')

    cython(['_ccomp.pyx'], working_path=base_path)
    cython(['_find_contours_cy.pyx'], working_path=base_path)
    cython(['_moments_cy.pyx'], working_path=base_path)
    cython(['_marching_cubes_classic_cy.pyx'], working_path=base_path)
    cython(['_marching_cubes_lewiner_cy.pyx'], working_path=base_path)
    cython(['_pnpoly.pyx'], working_path=base_path)

    config.add_extension('_ccomp', sources=['_ccomp.c'],
                         include_dirs=[get_numpy_include_dirs()])
    config.add_extension('_find_contours_cy', sources=['_find_contours_cy.c'],
                         include_dirs=[get_numpy_include_dirs()])
    config.add_extension('_moments_cy', sources=['_moments_cy.c'],
                         include_dirs=[get_numpy_include_dirs()])
    config.add_extension('_marching_cubes_classic_cy',
                         sources=['_marching_cubes_classic_cy.c'],
                         include_dirs=[get_numpy_include_dirs()])
    config.add_extension('_marching_cubes_lewiner_cy',
                         sources=['_marching_cubes_lewiner_cy.c'],
                         include_dirs=[get_numpy_include_dirs()])
    config.add_extension('_pnpoly', sources=['_pnpoly.c'],
                         include_dirs=[get_numpy_include_dirs(), '../_shared'])

    return config
예제 #19
0
파일: setupscons.py 프로젝트: derr57/scipy
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration

    config = Configuration('umfpack', parent_package, top_path)
    config.add_sconscript('SConstruct')
    config.add_data_dir('tests')

#    umf_info = get_info( 'umfpack', notfound_action = 1 )
#
#    umfpack_i_file = config.paths('umfpack.i')[0]
#    def umfpack_i(ext, build_dir):
#        if umf_info:
#            return umfpack_i_file
#
#    blas_info = get_info('blas_opt')
#    build_info = {}
#    dict_append(build_info, **umf_info)
#    dict_append(build_info, **blas_info)
#
#    config.add_extension( '__umfpack',
#                          sources = [umfpack_i],
#                          depends = ['umfpack.i'],
#                          **build_info)
#
    return config
예제 #20
0
파일: setup.py 프로젝트: Garyfallidis/nipy
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    config = Configuration('statistics', parent_package, top_path)
    config.add_data_dir('tests')
    config.add_extension('intvol', 'intvol.pyx',
                         include_dirs = [np.get_include()])
    return config
예제 #21
0
파일: setup.py 프로젝트: Garyfallidis/nipy
def configuration(parent_package='',top_path=None):
    
    from numpy.distutils.misc_util import Configuration
    
    config = Configuration('graph', parent_package, top_path)
    config.add_data_dir('tests')
   
    # We need this because libcstat.a is linked to lapack, which can
    # be a fortran library, and the linker needs this information.
    from numpy.distutils.system_info import get_info
    lapack_info = get_info('lapack_opt',0)
    if 'libraries' not in lapack_info:
        # But on OSX that may not give us what we need, so try with 'lapack'
        # instead.  NOTE: scipy.linalg uses lapack_opt, not 'lapack'...
        lapack_info = get_info('lapack',0)

    config.add_extension(
                '_graph',
                sources=['graph.c'],
                libraries=['cstat'],
                extra_info=lapack_info,
                )
    config.add_extension(
                '_field',
                sources=['field.c'],
                libraries=['cstat'],
                extra_info=lapack_info,
                )
    
    return config
예제 #22
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration

    config = Configuration('skimage', parent_package, top_path)

    config.add_subpackage('_shared')
    config.add_subpackage('color')
    config.add_subpackage('data')
    config.add_subpackage('draw')
    config.add_subpackage('feature')
    config.add_subpackage('filter')
    config.add_subpackage('graph')
    config.add_subpackage('io')
    config.add_subpackage('measure')
    config.add_subpackage('morphology')
    config.add_subpackage('transform')
    config.add_subpackage('util')
    config.add_subpackage('segmentation')

    def add_test_directories(arg, dirname, fnames):
        if dirname.split(os.path.sep)[-1] == 'tests':
            config.add_data_dir(dirname)

    # Add test directories
    from os.path import isdir, dirname, join
    rel_isdir = lambda d: isdir(join(curpath, d))

    curpath = join(dirname(__file__), './')
    subdirs = [join(d, 'tests') for d in os.listdir(curpath) if rel_isdir(d)]
    subdirs = [d for d in subdirs if rel_isdir(d)]
    for test_dir in subdirs:
        config.add_data_dir(test_dir)
    return config
예제 #23
0
def configuration(parent_package="", top_path=None):
    from numpy.distutils.misc_util import Configuration, get_mathlibs

    config = Configuration("random", parent_package, top_path)

    def generate_libraries(ext, build_dir):
        config_cmd = config.get_config_cmd()
        if top_path is None:
            libs = get_mathlibs()
        else:
            path = join(split(build_dir)[0], "core")
            libs = get_mathlibs(path)
        tc = testcode_wincrypt()
        if config_cmd.try_run(tc):
            libs.append("Advapi32")
        ext.libraries.extend(libs)
        return None

    libs = []
    # Configure mtrand
    config.add_extension(
        "mtrand",
        sources=[join("mtrand", x) for x in ["mtrand.c", "randomkit.c", "initarray.c", "distributions.c"]]
        + [generate_libraries],
        libraries=libs,
        depends=[join("mtrand", "*.h"), join("mtrand", "*.pyx"), join("mtrand", "*.pxi")],
    )

    config.add_data_files((".", join("mtrand", "randomkit.h")))
    config.add_data_dir("tests")

    return config
예제 #24
0
파일: setup.py 프로젝트: PeterZhouSZ/mayavi
def configuration(parent_package=None, top_path=None):
    from numpy.distutils.misc_util import Configuration
    config = Configuration(None, parent_package, top_path)
    config.set_options(
        ignore_setup_xxx_py=True,
        assume_default_configuration=True,
        delegate_options_to_subpackages=True,
        quiet=True,
    )

    config.add_subpackage('tvtk')
    config.add_data_dir('mayavi/core/lut')
    config.add_data_dir('mayavi/tests/data')
    config.add_data_dir('mayavi/tests/csv_files')

    # Image files.
    for pkgdir in ('mayavi', 'tvtk'):
        for root, dirs, files in os.walk(pkgdir):
            if split(root)[-1] == 'images':
                config.add_data_dir(root)

    # *.ini files.
    config.add_data_dir('tvtk/plugins/scene')
    config.add_data_dir('mayavi/preferences')

    return config
예제 #25
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    config = Configuration('lib',parent_package,top_path)
    config.add_subpackage('parser')
    config.add_data_files('*.txt','parser/*.txt')
    config.add_data_dir('src')
    return config
예제 #26
0
파일: setup.py 프로젝트: wrbrooks/VB3
def configuration(parent_package="", top_path=None):
    import warnings
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info, BlasNotFoundError

    config = Configuration("odr", parent_package, top_path)

    libodr_files = ["d_odr.f", "d_mprec.f", "dlunoc.f"]

    blas_info = get_info("blas_opt")
    if blas_info:
        libodr_files.append("d_lpk.f")
    else:
        warnings.warn(BlasNotFoundError.__doc__)
        libodr_files.append("d_lpkbls.f")

    libodr = [join("odrpack", x) for x in libodr_files]
    config.add_library("odrpack", sources=libodr)
    sources = ["__odrpack.c"]
    libraries = ["odrpack"] + blas_info.pop("libraries", [])
    include_dirs = ["."] + blas_info.pop("include_dirs", [])
    config.add_extension(
        "__odrpack", sources=sources, libraries=libraries, include_dirs=include_dirs, depends=["odrpack.h"], **blas_info
    )

    config.add_data_dir("tests")
    return config
예제 #27
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs

    config = Configuration('io', parent_package, top_path)
    config.add_data_dir('tests')
    config.add_data_files('_plugins/*.ini')

    # This function tries to create C files from the given .pyx files.  If
    # it fails, we build the checked-in .c files.
    cython(['_plugins/_colormixer.pyx', '_plugins/_histograms.pyx'],
           working_path=base_path)

    config.add_extension('_plugins._colormixer',
                         sources=['_plugins/_colormixer.c'],
                         include_dirs=[get_numpy_include_dirs()])

    config.add_extension('_plugins._histograms',
                         sources=['_plugins/_histograms.c'],
                         include_dirs=[get_numpy_include_dirs()])

    config.add_extension('_plugins._tifffile',
                         sources=['_plugins/tifffile.c'],
                         include_dirs=[get_numpy_include_dirs()])

    return config
예제 #28
0
파일: setup.py 프로젝트: beiko-lab/gengis
def configuration(parent_package='', top_path=None):
    import warnings
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info, BlasNotFoundError
    config = Configuration('odr', parent_package, top_path)

    libodr_files = ['d_odr.f',
                    'd_mprec.f',
                    'dlunoc.f']

    blas_info = get_info('blas_opt')
    if blas_info:
        libodr_files.append('d_lpk.f')
    else:
        warnings.warn(BlasNotFoundError.__doc__)
        libodr_files.append('d_lpkbls.f')

    odrpack_src = [join('odrpack', x) for x in libodr_files]
    config.add_library('odrpack', sources=odrpack_src)

    sources = ['__odrpack.c']
    libraries = ['odrpack'] + blas_info.pop('libraries', [])
    include_dirs = ['.'] + blas_info.pop('include_dirs', [])
    config.add_extension('__odrpack',
        sources=sources,
        libraries=libraries,
        include_dirs=include_dirs,
        depends=(['odrpack.h'] + odrpack_src),
        **blas_info
    )

    config.add_data_dir('tests')
    return config
예제 #29
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    config = Configuration('stats', parent_package, top_path)

    config.add_data_dir('tests')

    config.add_library('statlib',
                       sources=[join('statlib', '*.f')])

    # add statlib module
    config.add_extension('statlib',
        sources=['statlib.pyf'],
        f2py_options=['--no-wrap-functions'],
        libraries=['statlib'],
    )

    # add vonmises_cython module
    config.add_extension('vonmises_cython',
        sources=['vonmises_cython.c'], # FIXME: use cython source
    )

    # add futil module
    config.add_extension('futil',
        sources=['futil.f'],
    )

    # add mvn module
    config.add_extension('mvn',
        sources=['mvn.pyf','mvndst.f'],
    )

    return config
예제 #30
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.system_info import get_info, NotFoundError

    from numpy.distutils.misc_util import Configuration

    config = Configuration('isolve',parent_package,top_path)

    lapack_opt = get_info('lapack_opt')

    if not lapack_opt:
        raise NotFoundError('no lapack/blas resources found')

    # iterative methods
    methods = ['BiCGREVCOM.f.src',
               'BiCGSTABREVCOM.f.src',
               'CGREVCOM.f.src',
               'CGSREVCOM.f.src',
#               'ChebyREVCOM.f.src',
               'GMRESREVCOM.f.src',
#               'JacobiREVCOM.f.src',
               'QMRREVCOM.f.src',
#               'SORREVCOM.f.src'
               ]
    Util = ['STOPTEST2.f.src','getbreak.f.src']
    sources = Util + methods + ['_iterative.pyf.src']
    config.add_extension('_iterative',
                         sources = [join('iterative',x) for x in sources],
                         extra_info = lapack_opt
                         )

    config.add_data_dir('tests')

    return config
예제 #31
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import (Configuration, dot_join,
                                           exec_mod_from_location)
    from numpy.distutils.system_info import (get_info, blas_opt_info,
                                             lapack_opt_info)

    config = Configuration('core', parent_package, top_path)
    local_dir = config.local_path
    codegen_dir = join(local_dir, 'code_generators')

    if is_released(config):
        warnings.simplefilter('error', MismatchCAPIWarning)

    # Check whether we have a mismatch between the set C API VERSION and the
    # actual C API VERSION
    check_api_version(C_API_VERSION, codegen_dir)

    generate_umath_py = join(codegen_dir, 'generate_umath.py')
    n = dot_join(config.name, 'generate_umath')
    generate_umath = exec_mod_from_location('_'.join(n.split('.')),
                                            generate_umath_py)

    header_dir = 'include/numpy'  # this is relative to config.path_in_package

    cocache = CallOnceOnly()

    def generate_config_h(ext, build_dir):
        target = join(build_dir, header_dir, 'config.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)

        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir)

            # Check math library and C99 math funcs availability
            mathlibs = check_mathlib(config_cmd)
            moredefs.append(('MATHLIB', ','.join(mathlibs)))

            check_math_capabilities(config_cmd, ext, moredefs, mathlibs)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[0])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0])

            # Signal check
            if is_npy_no_signal():
                moredefs.append('__NPY_PRIVATE_NO_SIGNAL')

            # Windows checks
            if sys.platform == 'win32' or os.name == 'nt':
                win32_checks(moredefs)

            # C99 restrict keyword
            moredefs.append(('NPY_RESTRICT', config_cmd.check_restrict()))

            # Inline check
            inline = config_cmd.check_inline()

            if can_link_svml():
                moredefs.append(('NPY_CAN_LINK_SVML', 1))

            # Use relaxed stride checking
            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))
            else:
                moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 0))

            # Use bogus stride debug aid when relaxed strides are enabled
            if NPY_RELAXED_STRIDES_DEBUG:
                moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1))
            else:
                moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 0))

            # Get long double representation
            rep = check_long_double_representation(config_cmd)
            moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1))

            if check_for_right_shift_internal_compiler_error(config_cmd):
                moredefs.append('NPY_DO_NOT_OPTIMIZE_LONG_right_shift')
                moredefs.append('NPY_DO_NOT_OPTIMIZE_ULONG_right_shift')
                moredefs.append('NPY_DO_NOT_OPTIMIZE_LONGLONG_right_shift')
                moredefs.append('NPY_DO_NOT_OPTIMIZE_ULONGLONG_right_shift')

            # Generate the config.h file from moredefs
            with open(target, 'w') as target_f:
                for d in moredefs:
                    if isinstance(d, str):
                        target_f.write('#define %s\n' % (d))
                    else:
                        target_f.write('#define %s %s\n' % (d[0], d[1]))

                # define inline to our keyword, or nothing
                target_f.write('#ifndef __cplusplus\n')
                if inline == 'inline':
                    target_f.write('/* #undef inline */\n')
                else:
                    target_f.write('#define inline %s\n' % inline)
                target_f.write('#endif\n')

                # add the guard to make sure config.h is never included directly,
                # but always through npy_config.h
                target_f.write(
                    textwrap.dedent("""
                    #ifndef NUMPY_CORE_SRC_COMMON_NPY_CONFIG_H_
                    #error config.h should never be included directly, include npy_config.h instead
                    #endif
                    """))

            log.info('File: %s' % target)
            with open(target) as target_f:
                log.info(target_f.read())
            log.info('EOF')
        else:
            mathlibs = []
            with open(target) as target_f:
                for line in target_f:
                    s = '#define MATHLIB'
                    if line.startswith(s):
                        value = line[len(s):].strip()
                        if value:
                            mathlibs.extend(value.split(','))

        # Ugly: this can be called within a library and not an extension,
        # in which case there is no libraries attributes (and none is
        # needed).
        if hasattr(ext, 'libraries'):
            ext.libraries.extend(mathlibs)

        incl_dir = os.path.dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)

        return target

    def generate_numpyconfig_h(ext, build_dir):
        """Depends on config.h: generate_config_h has to be called before !"""
        # put common include directory in build_dir on search path
        # allows using code generation in headers
        config.add_include_dirs(join(build_dir, "src", "common"))
        config.add_include_dirs(join(build_dir, "src", "npymath"))

        target = join(build_dir, header_dir, '_numpyconfig.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)
        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir)

            if is_npy_no_signal():
                moredefs.append(('NPY_NO_SIGNAL', 1))

            if is_npy_no_smp():
                moredefs.append(('NPY_NO_SMP', 1))
            else:
                moredefs.append(('NPY_NO_SMP', 0))

            mathlibs = check_mathlib(config_cmd)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[1])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1])

            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))

            if NPY_RELAXED_STRIDES_DEBUG:
                moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1))

            # Check whether we can use inttypes (C99) formats
            if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']):
                moredefs.append(('NPY_USE_C99_FORMATS', 1))

            # visibility check
            hidden_visibility = visibility_define(config_cmd)
            moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility))

            # Add the C API/ABI versions
            moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION))
            moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION))

            # Add moredefs to header
            with open(target, 'w') as target_f:
                for d in moredefs:
                    if isinstance(d, str):
                        target_f.write('#define %s\n' % (d))
                    else:
                        target_f.write('#define %s %s\n' % (d[0], d[1]))

                # Define __STDC_FORMAT_MACROS
                target_f.write(
                    textwrap.dedent("""
                    #ifndef __STDC_FORMAT_MACROS
                    #define __STDC_FORMAT_MACROS 1
                    #endif
                    """))

            # Dump the numpyconfig.h header to stdout
            log.info('File: %s' % target)
            with open(target) as target_f:
                log.info(target_f.read())
            log.info('EOF')
        config.add_data_files((header_dir, target))
        return target

    def generate_api_func(module_name):
        def generate_api(ext, build_dir):
            script = join(codegen_dir, module_name + '.py')
            sys.path.insert(0, codegen_dir)
            try:
                m = __import__(module_name)
                log.info('executing %s', script)
                h_file, c_file, doc_file = m.generate_api(
                    os.path.join(build_dir, header_dir))
            finally:
                del sys.path[0]
            config.add_data_files((header_dir, h_file), (header_dir, doc_file))
            return (h_file, )

        return generate_api

    generate_numpy_api = generate_api_func('generate_numpy_api')
    generate_ufunc_api = generate_api_func('generate_ufunc_api')

    config.add_include_dirs(join(local_dir, "src", "common"))
    config.add_include_dirs(join(local_dir, "src"))
    config.add_include_dirs(join(local_dir))

    config.add_data_dir('include/numpy')
    config.add_include_dirs(join('src', 'npymath'))
    config.add_include_dirs(join('src', 'multiarray'))
    config.add_include_dirs(join('src', 'umath'))
    config.add_include_dirs(join('src', 'npysort'))
    config.add_include_dirs(join('src', '_simd'))

    config.add_define_macros([
        ("NPY_INTERNAL_BUILD", "1")
    ])  # this macro indicates that Numpy build is in process
    config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")])
    if sys.platform[:3] == "aix":
        config.add_define_macros([("_LARGE_FILES", None)])
    else:
        config.add_define_macros([("_FILE_OFFSET_BITS", "64")])
        config.add_define_macros([('_LARGEFILE_SOURCE', '1')])
        config.add_define_macros([('_LARGEFILE64_SOURCE', '1')])

    config.numpy_include_dirs.extend(config.paths('include'))

    deps = [
        join('src', 'npymath', '_signbit.c'),
        join('include', 'numpy', '*object.h'),
        join(codegen_dir, 'genapi.py'),
    ]

    #######################################################################
    #                          npymath library                            #
    #######################################################################

    subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")])

    def get_mathlib_info(*args):
        # Another ugly hack: the mathlib info is known once build_src is run,
        # but we cannot use add_installed_pkg_config here either, so we only
        # update the substitution dictionary during npymath build
        config_cmd = config.get_config_cmd()
        # Check that the toolchain works, to fail early if it doesn't
        # (avoid late errors with MATHLIB which are confusing if the
        # compiler does not work).
        for lang, test_code, note in (
            ('c', 'int main(void) { return 0;}', ''),
            ('c++', ('int main(void)'
                     '{ auto x = 0.0; return static_cast<int>(x); }'),
             ('note: A compiler with support for C++11 language '
              'features is required.')),
        ):
            is_cpp = lang == 'c++'
            if is_cpp:
                # this a workround to get rid of invalid c++ flags
                # without doing big changes to config.
                # c tested first, compiler should be here
                bk_c = config_cmd.compiler
                config_cmd.compiler = bk_c.cxx_compiler()
            st = config_cmd.try_link(test_code, lang=lang)
            if not st:
                # rerun the failing command in verbose mode
                config_cmd.compiler.verbose = True
                config_cmd.try_link(test_code, lang=lang)
                raise RuntimeError(
                    f"Broken toolchain: cannot link a simple {lang.upper()} "
                    f"program. {note}")
            if is_cpp:
                config_cmd.compiler = bk_c
        mlibs = check_mathlib(config_cmd)

        posix_mlib = ' '.join(['-l%s' % l for l in mlibs])
        msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs])
        subst_dict["posix_mathlib"] = posix_mlib
        subst_dict["msvc_mathlib"] = msvc_mlib

    npymath_sources = [
        join('src', 'npymath', 'npy_math_internal.h.src'),
        join('src', 'npymath', 'npy_math.c'),
        join('src', 'npymath', 'ieee754.c.src'),
        join('src', 'npymath', 'npy_math_complex.c.src'),
        join('src', 'npymath', 'halffloat.c')
    ]

    def gl_if_msvc(build_cmd):
        """ Add flag if we are using MSVC compiler

        We can't see this in our scope, because we have not initialized the
        distutils build command, so use this deferred calculation to run when
        we are building the library.
        """
        if build_cmd.compiler.compiler_type == 'msvc':
            # explicitly disable whole-program optimization
            return ['/GL-']
        return []

    config.add_installed_library(
        'npymath',
        sources=npymath_sources + [get_mathlib_info],
        install_dir='lib',
        build_info={
            'include_dirs':
            [],  # empty list required for creating npy_math_internal.h
            'extra_compiler_args': [gl_if_msvc],
        })
    config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config",
                              subst_dict)
    config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config", subst_dict)

    #######################################################################
    #                     multiarray_tests module                         #
    #######################################################################

    config.add_extension('_multiarray_tests',
                         sources=[
                             join('src', 'multiarray',
                                  '_multiarray_tests.c.src'),
                             join('src', 'common', 'mem_overlap.c'),
                             join('src', 'common', 'npy_argparse.c'),
                             join('src', 'common', 'npy_hashtable.c')
                         ],
                         depends=[
                             join('src', 'common', 'mem_overlap.h'),
                             join('src', 'common', 'npy_argparse.h'),
                             join('src', 'common', 'npy_hashtable.h'),
                             join('src', 'common', 'npy_extint128.h')
                         ],
                         libraries=['npymath'])

    #######################################################################
    #             _multiarray_umath module - common part                  #
    #######################################################################

    common_deps = [
        join('src', 'common', 'dlpack', 'dlpack.h'),
        join('src', 'common', 'array_assign.h'),
        join('src', 'common', 'binop_override.h'),
        join('src', 'common', 'cblasfuncs.h'),
        join('src', 'common', 'lowlevel_strided_loops.h'),
        join('src', 'common', 'mem_overlap.h'),
        join('src', 'common', 'npy_argparse.h'),
        join('src', 'common', 'npy_cblas.h'),
        join('src', 'common', 'npy_config.h'),
        join('src', 'common', 'npy_ctypes.h'),
        join('src', 'common', 'npy_dlpack.h'),
        join('src', 'common', 'npy_extint128.h'),
        join('src', 'common', 'npy_import.h'),
        join('src', 'common', 'npy_hashtable.h'),
        join('src', 'common', 'npy_longdouble.h'),
        join('src', 'common', 'npy_svml.h'),
        join('src', 'common', 'templ_common.h.src'),
        join('src', 'common', 'ucsnarrow.h'),
        join('src', 'common', 'ufunc_override.h'),
        join('src', 'common', 'umathmodule.h'),
        join('src', 'common', 'numpyos.h'),
        join('src', 'common', 'npy_cpu_dispatch.h'),
        join('src', 'common', 'simd', 'simd.h'),
    ]

    common_src = [
        join('src', 'common', 'array_assign.c'),
        join('src', 'common', 'mem_overlap.c'),
        join('src', 'common', 'npy_argparse.c'),
        join('src', 'common', 'npy_hashtable.c'),
        join('src', 'common', 'npy_longdouble.c'),
        join('src', 'common', 'templ_common.h.src'),
        join('src', 'common', 'ucsnarrow.c'),
        join('src', 'common', 'ufunc_override.c'),
        join('src', 'common', 'numpyos.c'),
        join('src', 'common', 'npy_cpu_features.c.src'),
    ]

    if os.environ.get('NPY_USE_BLAS_ILP64', "0") != "0":
        blas_info = get_info('blas_ilp64_opt', 2)
    else:
        blas_info = get_info('blas_opt', 0)

    have_blas = blas_info and ('HAVE_CBLAS', None) in blas_info.get(
        'define_macros', [])

    if have_blas:
        extra_info = blas_info
        # These files are also in MANIFEST.in so that they are always in
        # the source distribution independently of HAVE_CBLAS.
        common_src.extend([
            join('src', 'common', 'cblasfuncs.c'),
            join('src', 'common', 'python_xerbla.c'),
        ])
    else:
        extra_info = {}

    #######################################################################
    #             _multiarray_umath module - multiarray part              #
    #######################################################################

    multiarray_deps = [
        join('src', 'multiarray', 'abstractdtypes.h'),
        join('src', 'multiarray', 'arrayobject.h'),
        join('src', 'multiarray', 'arraytypes.h'),
        join('src', 'multiarray', 'arrayfunction_override.h'),
        join('src', 'multiarray', 'array_coercion.h'),
        join('src', 'multiarray', 'array_method.h'),
        join('src', 'multiarray', 'npy_buffer.h'),
        join('src', 'multiarray', 'calculation.h'),
        join('src', 'multiarray', 'common.h'),
        join('src', 'multiarray', 'common_dtype.h'),
        join('src', 'multiarray', 'convert_datatype.h'),
        join('src', 'multiarray', 'convert.h'),
        join('src', 'multiarray', 'conversion_utils.h'),
        join('src', 'multiarray', 'ctors.h'),
        join('src', 'multiarray', 'descriptor.h'),
        join('src', 'multiarray', 'dtypemeta.h'),
        join('src', 'multiarray', 'dtype_transfer.h'),
        join('src', 'multiarray', 'dragon4.h'),
        join('src', 'multiarray', 'einsum_debug.h'),
        join('src', 'multiarray', 'einsum_sumprod.h'),
        join('src', 'multiarray', 'experimental_public_dtype_api.h'),
        join('src', 'multiarray', 'getset.h'),
        join('src', 'multiarray', 'hashdescr.h'),
        join('src', 'multiarray', 'iterators.h'),
        join('src', 'multiarray', 'legacy_dtype_implementation.h'),
        join('src', 'multiarray', 'mapping.h'),
        join('src', 'multiarray', 'methods.h'),
        join('src', 'multiarray', 'multiarraymodule.h'),
        join('src', 'multiarray', 'nditer_impl.h'),
        join('src', 'multiarray', 'number.h'),
        join('src', 'multiarray', 'refcount.h'),
        join('src', 'multiarray', 'scalartypes.h'),
        join('src', 'multiarray', 'sequence.h'),
        join('src', 'multiarray', 'shape.h'),
        join('src', 'multiarray', 'strfuncs.h'),
        join('src', 'multiarray', 'typeinfo.h'),
        join('src', 'multiarray', 'usertypes.h'),
        join('src', 'multiarray', 'vdot.h'),
        join('include', 'numpy', 'arrayobject.h'),
        join('include', 'numpy', '_neighborhood_iterator_imp.h'),
        join('include', 'numpy', 'npy_endian.h'),
        join('include', 'numpy', 'arrayscalars.h'),
        join('include', 'numpy', 'noprefix.h'),
        join('include', 'numpy', 'npy_interrupt.h'),
        join('include', 'numpy', 'npy_3kcompat.h'),
        join('include', 'numpy', 'npy_math.h'),
        join('include', 'numpy', 'halffloat.h'),
        join('include', 'numpy', 'npy_common.h'),
        join('include', 'numpy', 'npy_os.h'),
        join('include', 'numpy', 'utils.h'),
        join('include', 'numpy', 'ndarrayobject.h'),
        join('include', 'numpy', 'npy_cpu.h'),
        join('include', 'numpy', 'numpyconfig.h'),
        join('include', 'numpy', 'ndarraytypes.h'),
        join('include', 'numpy', 'npy_1_7_deprecated_api.h'),
        # add library sources as distuils does not consider libraries
        # dependencies
    ] + npymath_sources

    multiarray_src = [
        join('src', 'multiarray', 'abstractdtypes.c'),
        join('src', 'multiarray', 'alloc.c'),
        join('src', 'multiarray', 'arrayobject.c'),
        join('src', 'multiarray', 'arraytypes.c.src'),
        join('src', 'multiarray', 'array_coercion.c'),
        join('src', 'multiarray', 'array_method.c'),
        join('src', 'multiarray', 'array_assign_scalar.c'),
        join('src', 'multiarray', 'array_assign_array.c'),
        join('src', 'multiarray', 'arrayfunction_override.c'),
        join('src', 'multiarray', 'buffer.c'),
        join('src', 'multiarray', 'calculation.c'),
        join('src', 'multiarray', 'compiled_base.c'),
        join('src', 'multiarray', 'common.c'),
        join('src', 'multiarray', 'common_dtype.c'),
        join('src', 'multiarray', 'convert.c'),
        join('src', 'multiarray', 'convert_datatype.c'),
        join('src', 'multiarray', 'conversion_utils.c'),
        join('src', 'multiarray', 'ctors.c'),
        join('src', 'multiarray', 'datetime.c'),
        join('src', 'multiarray', 'datetime_strings.c'),
        join('src', 'multiarray', 'datetime_busday.c'),
        join('src', 'multiarray', 'datetime_busdaycal.c'),
        join('src', 'multiarray', 'descriptor.c'),
        join('src', 'multiarray', 'dlpack.c'),
        join('src', 'multiarray', 'dtypemeta.c'),
        join('src', 'multiarray', 'dragon4.c'),
        join('src', 'multiarray', 'dtype_transfer.c'),
        join('src', 'multiarray', 'einsum.c.src'),
        join('src', 'multiarray', 'einsum_sumprod.c.src'),
        join('src', 'multiarray', 'experimental_public_dtype_api.c'),
        join('src', 'multiarray', 'flagsobject.c'),
        join('src', 'multiarray', 'getset.c'),
        join('src', 'multiarray', 'hashdescr.c'),
        join('src', 'multiarray', 'item_selection.c'),
        join('src', 'multiarray', 'iterators.c'),
        join('src', 'multiarray', 'legacy_dtype_implementation.c'),
        join('src', 'multiarray', 'lowlevel_strided_loops.c.src'),
        join('src', 'multiarray', 'mapping.c'),
        join('src', 'multiarray', 'methods.c'),
        join('src', 'multiarray', 'multiarraymodule.c'),
        join('src', 'multiarray', 'nditer_templ.c.src'),
        join('src', 'multiarray', 'nditer_api.c'),
        join('src', 'multiarray', 'nditer_constr.c'),
        join('src', 'multiarray', 'nditer_pywrap.c'),
        join('src', 'multiarray', 'number.c'),
        join('src', 'multiarray', 'refcount.c'),
        join('src', 'multiarray', 'sequence.c'),
        join('src', 'multiarray', 'shape.c'),
        join('src', 'multiarray', 'scalarapi.c'),
        join('src', 'multiarray', 'scalartypes.c.src'),
        join('src', 'multiarray', 'strfuncs.c'),
        join('src', 'multiarray', 'temp_elide.c'),
        join('src', 'multiarray', 'typeinfo.c'),
        join('src', 'multiarray', 'usertypes.c'),
        join('src', 'multiarray', 'vdot.c'),
        join('src', 'common', 'npy_sort.h.src'),
        join('src', 'npysort', 'quicksort.c.src'),
        join('src', 'npysort', 'mergesort.c.src'),
        join('src', 'npysort', 'timsort.c.src'),
        join('src', 'npysort', 'heapsort.c.src'),
        join('src', 'npysort', 'radixsort.cpp'),
        join('src', 'common', 'npy_partition.h.src'),
        join('src', 'npysort', 'selection.c.src'),
        join('src', 'common', 'npy_binsearch.h'),
        join('src', 'npysort', 'binsearch.cpp'),
    ]

    #######################################################################
    #             _multiarray_umath module - umath part                   #
    #######################################################################

    def generate_umath_c(ext, build_dir):
        target = join(build_dir, header_dir, '__umath_generated.c')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        script = generate_umath_py
        if newer(script, target):
            with open(target, 'w') as f:
                f.write(
                    generate_umath.make_code(generate_umath.defdict,
                                             generate_umath.__file__))
        return []

    def generate_umath_doc_header(ext, build_dir):
        from numpy.distutils.misc_util import exec_mod_from_location

        target = join(build_dir, header_dir, '_umath_doc_generated.h')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)

        generate_umath_doc_py = join(codegen_dir, 'generate_umath_doc.py')
        if newer(generate_umath_doc_py, target):
            n = dot_join(config.name, 'generate_umath_doc')
            generate_umath_doc = exec_mod_from_location(
                '_'.join(n.split('.')), generate_umath_doc_py)
            generate_umath_doc.write_code(target)

    umath_src = [
        join('src', 'umath', 'umathmodule.c'),
        join('src', 'umath', 'reduction.c'),
        join('src', 'umath', 'funcs.inc.src'),
        join('src', 'umath', 'simd.inc.src'),
        join('src', 'umath', 'loops.h.src'),
        join('src', 'umath', 'loops_utils.h.src'),
        join('src', 'umath', 'loops.c.src'),
        join('src', 'umath', 'loops_unary_fp.dispatch.c.src'),
        join('src', 'umath', 'loops_arithm_fp.dispatch.c.src'),
        join('src', 'umath', 'loops_arithmetic.dispatch.c.src'),
        join('src', 'umath', 'loops_trigonometric.dispatch.c.src'),
        join('src', 'umath', 'loops_umath_fp.dispatch.c.src'),
        join('src', 'umath', 'loops_exponent_log.dispatch.c.src'),
        join('src', 'umath', 'matmul.h.src'),
        join('src', 'umath', 'matmul.c.src'),
        join('src', 'umath', 'clip.h'),
        join('src', 'umath', 'clip.cpp'),
        join('src', 'umath', 'dispatching.c'),
        join('src', 'umath', 'legacy_array_method.c'),
        join('src', 'umath', 'ufunc_object.c'),
        join('src', 'umath', 'extobj.c'),
        join('src', 'umath', 'scalarmath.c.src'),
        join('src', 'umath', 'ufunc_type_resolution.c'),
        join('src', 'umath', 'override.c'),
        # For testing. Eventually, should use public API and be separate:
        join('src', 'umath', '_scaled_float_dtype.c'),
    ]

    umath_deps = [
        generate_umath_py,
        join('include', 'numpy', 'npy_math.h'),
        join('include', 'numpy', 'halffloat.h'),
        join('src', 'multiarray', 'common.h'),
        join('src', 'multiarray', 'number.h'),
        join('src', 'common', 'templ_common.h.src'),
        join('src', 'umath', 'simd.inc.src'),
        join('src', 'umath', 'override.h'),
        join(codegen_dir, 'generate_ufunc_api.py'),
        join(codegen_dir, 'ufunc_docstrings.py'),
    ]

    svml_path = join('numpy', 'core', 'src', 'umath', 'svml')
    svml_objs = []
    if can_link_svml() and check_svml_submodule(svml_path):
        svml_objs = glob.glob(svml_path + '/**/*.s', recursive=True)

    config.add_extension(
        '_multiarray_umath',
        # Forcing C language even though we have C++ sources.
        # It forces the C linker and don't link C++ runtime.
        language='c',
        sources=multiarray_src + umath_src + common_src + [
            generate_config_h,
            generate_numpyconfig_h,
            generate_numpy_api,
            join(codegen_dir, 'generate_numpy_api.py'),
            join('*.py'),
            generate_umath_c,
            generate_umath_doc_header,
            generate_ufunc_api,
        ],
        depends=deps + multiarray_deps + umath_deps + common_deps,
        libraries=['npymath'],
        extra_objects=svml_objs,
        extra_info=extra_info,
        extra_cxx_compile_args=[
            '-std=c++11', '-D__STDC_VERSION__=0', '-fno-exceptions',
            '-fno-rtti'
        ])

    #######################################################################
    #                        umath_tests module                           #
    #######################################################################

    config.add_extension('_umath_tests',
                         sources=[
                             join('src', 'umath', '_umath_tests.c.src'),
                             join('src', 'umath', '_umath_tests.dispatch.c'),
                             join('src', 'common', 'npy_cpu_features.c.src'),
                         ])

    #######################################################################
    #                   custom rational dtype module                      #
    #######################################################################

    config.add_extension(
        '_rational_tests',
        sources=[join('src', 'umath', '_rational_tests.c.src')])

    #######################################################################
    #                        struct_ufunc_test module                     #
    #######################################################################

    config.add_extension(
        '_struct_ufunc_tests',
        sources=[join('src', 'umath', '_struct_ufunc_tests.c.src')])

    #######################################################################
    #                        operand_flag_tests module                    #
    #######################################################################

    config.add_extension(
        '_operand_flag_tests',
        sources=[join('src', 'umath', '_operand_flag_tests.c')])

    #######################################################################
    #                        SIMD module                                  #
    #######################################################################

    config.add_extension('_simd',
                         sources=[
                             join('src', 'common', 'npy_cpu_features.c.src'),
                             join('src', '_simd', '_simd.c'),
                             join('src', '_simd', '_simd_inc.h.src'),
                             join('src', '_simd', '_simd_data.inc.src'),
                             join('src', '_simd', '_simd.dispatch.c.src'),
                         ],
                         depends=[
                             join('src', 'common', 'npy_cpu_dispatch.h'),
                             join('src', 'common', 'simd', 'simd.h'),
                             join('src', '_simd', '_simd.h'),
                             join('src', '_simd', '_simd_inc.h.src'),
                             join('src', '_simd', '_simd_data.inc.src'),
                             join('src', '_simd', '_simd_arg.inc'),
                             join('src', '_simd', '_simd_convert.inc'),
                             join('src', '_simd', '_simd_easyintrin.inc'),
                             join('src', '_simd', '_simd_vector.inc'),
                         ])

    config.add_subpackage('tests')
    config.add_data_dir('tests/data')
    config.add_data_dir('tests/examples')
    config.add_data_files('*.pyi')

    config.make_svn_version_py()

    return config
예제 #32
0
파일: setup.py 프로젝트: myneuronews/flbrh
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    config = Configuration('f2py_ext', parent_package, top_path)
    config.add_extension('fib2', ['src/fib2.pyf', 'src/fib1.f'])
    config.add_data_dir('tests')
    return config
예제 #33
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration, dot_join
    from numpy.distutils.system_info import get_info

    config = Configuration('core', parent_package, top_path)
    local_dir = config.local_path
    codegen_dir = join(local_dir, 'code_generators')

    if is_released(config):
        warnings.simplefilter('error', MismatchCAPIWarning)

    # Check whether we have a mismatch between the set C API VERSION and the
    # actual C API VERSION
    check_api_version(C_API_VERSION, codegen_dir)

    generate_umath_py = join(codegen_dir, 'generate_umath.py')
    n = dot_join(config.name, 'generate_umath')
    generate_umath = npy_load_module('_'.join(n.split('.')),
                                     generate_umath_py, ('.py', 'U', 1))

    header_dir = 'include/numpy'  # this is relative to config.path_in_package

    cocache = CallOnceOnly()

    def generate_config_h(ext, build_dir):
        target = join(build_dir, header_dir, 'config.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)

        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir)

            # Check math library and C99 math funcs availability
            mathlibs = check_mathlib(config_cmd)
            moredefs.append(('MATHLIB', ','.join(mathlibs)))

            check_math_capabilities(config_cmd, moredefs, mathlibs)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[0])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0])

            # Signal check
            if is_npy_no_signal():
                moredefs.append('__NPY_PRIVATE_NO_SIGNAL')

            # Windows checks
            if sys.platform == 'win32' or os.name == 'nt':
                win32_checks(moredefs)

            # C99 restrict keyword
            moredefs.append(('NPY_RESTRICT', config_cmd.check_restrict()))

            # Inline check
            inline = config_cmd.check_inline()

            # Use relaxed stride checking
            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))

            # Use bogus stride debug aid when relaxed strides are enabled
            if NPY_RELAXED_STRIDES_DEBUG:
                moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1))

            # Get long double representation
            if sys.platform != 'darwin':
                rep = check_long_double_representation(config_cmd)
                if rep in ['INTEL_EXTENDED_12_BYTES_LE',
                           'INTEL_EXTENDED_16_BYTES_LE',
                           'MOTOROLA_EXTENDED_12_BYTES_BE',
                           'IEEE_QUAD_LE', 'IEEE_QUAD_BE',
                           'IEEE_DOUBLE_LE', 'IEEE_DOUBLE_BE',
                           'DOUBLE_DOUBLE_BE', 'DOUBLE_DOUBLE_LE']:
                    moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1))
                else:
                    raise ValueError("Unrecognized long double format: %s" % rep)

            # Py3K check
            if sys.version_info[0] == 3:
                moredefs.append(('NPY_PY3K', 1))

            # Generate the config.h file from moredefs
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # define inline to our keyword, or nothing
            target_f.write('#ifndef __cplusplus\n')
            if inline == 'inline':
                target_f.write('/* #undef inline */\n')
            else:
                target_f.write('#define inline %s\n' % inline)
            target_f.write('#endif\n')

            # add the guard to make sure config.h is never included directly,
            # but always through npy_config.h
            target_f.write("""
#ifndef _NPY_NPY_CONFIG_H_
#error config.h should never be included directly, include npy_config.h instead
#endif
""")

            target_f.close()
            print('File:', target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        else:
            mathlibs = []
            target_f = open(target)
            for line in target_f:
                s = '#define MATHLIB'
                if line.startswith(s):
                    value = line[len(s):].strip()
                    if value:
                        mathlibs.extend(value.split(','))
            target_f.close()

        # Ugly: this can be called within a library and not an extension,
        # in which case there is no libraries attributes (and none is
        # needed).
        if hasattr(ext, 'libraries'):
            ext.libraries.extend(mathlibs)

        incl_dir = os.path.dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)

        return target

    def generate_numpyconfig_h(ext, build_dir):
        """Depends on config.h: generate_config_h has to be called before !"""
        # put private include directory in build_dir on search path
        # allows using code generation in headers headers
        config.add_include_dirs(join(build_dir, "src", "private"))
        config.add_include_dirs(join(build_dir, "src", "npymath"))

        target = join(build_dir, header_dir, '_numpyconfig.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)
        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir)

            if is_npy_no_signal():
                moredefs.append(('NPY_NO_SIGNAL', 1))

            if is_npy_no_smp():
                moredefs.append(('NPY_NO_SMP', 1))
            else:
                moredefs.append(('NPY_NO_SMP', 0))

            mathlibs = check_mathlib(config_cmd)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[1])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1])

            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))

            if NPY_RELAXED_STRIDES_DEBUG:
                moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1))

            # Check wether we can use inttypes (C99) formats
            if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']):
                moredefs.append(('NPY_USE_C99_FORMATS', 1))

            # visibility check
            hidden_visibility = visibility_define(config_cmd)
            moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility))

            # Add the C API/ABI versions
            moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION))
            moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION))

            # Add moredefs to header
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # Define __STDC_FORMAT_MACROS
            target_f.write("""
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS 1
#endif
""")
            target_f.close()

            # Dump the numpyconfig.h header to stdout
            print('File: %s' % target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        config.add_data_files((header_dir, target))
        return target

    def generate_api_func(module_name):
        def generate_api(ext, build_dir):
            script = join(codegen_dir, module_name + '.py')
            sys.path.insert(0, codegen_dir)
            try:
                m = __import__(module_name)
                log.info('executing %s', script)
                h_file, c_file, doc_file = m.generate_api(os.path.join(build_dir, header_dir))
            finally:
                del sys.path[0]
            config.add_data_files((header_dir, h_file),
                                  (header_dir, doc_file))
            return (h_file,)
        return generate_api

    generate_numpy_api = generate_api_func('generate_numpy_api')
    generate_ufunc_api = generate_api_func('generate_ufunc_api')

    config.add_include_dirs(join(local_dir, "src", "private"))
    config.add_include_dirs(join(local_dir, "src"))
    config.add_include_dirs(join(local_dir))

    config.add_data_files('include/numpy/*.h')
    config.add_include_dirs(join('src', 'npymath'))
    config.add_include_dirs(join('src', 'multiarray'))
    config.add_include_dirs(join('src', 'umath'))
    config.add_include_dirs(join('src', 'npysort'))

    config.add_define_macros([("NPY_INTERNAL_BUILD", "1")]) # this macro indicates that Numpy build is in process
    config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")])
    if sys.platform[:3] == "aix":
        config.add_define_macros([("_LARGE_FILES", None)])
    else:
        config.add_define_macros([("_FILE_OFFSET_BITS", "64")])
        config.add_define_macros([('_LARGEFILE_SOURCE', '1')])
        config.add_define_macros([('_LARGEFILE64_SOURCE', '1')])

    config.numpy_include_dirs.extend(config.paths('include'))

    deps = [join('src', 'npymath', '_signbit.c'),
            join('include', 'numpy', '*object.h'),
            join(codegen_dir, 'genapi.py'),
            ]

    #######################################################################
    #                            dummy module                             #
    #######################################################################

    # npymath needs the config.h and numpyconfig.h files to be generated, but
    # build_clib cannot handle generate_config_h and generate_numpyconfig_h
    # (don't ask). Because clib are generated before extensions, we have to
    # explicitly add an extension which has generate_config_h and
    # generate_numpyconfig_h as sources *before* adding npymath.

    config.add_extension('_dummy',
                         sources=[join('src', 'dummymodule.c'),
                                  generate_config_h,
                                  generate_numpyconfig_h,
                                  generate_numpy_api]
                         )

    #######################################################################
    #                          npymath library                            #
    #######################################################################

    subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")])

    def get_mathlib_info(*args):
        # Another ugly hack: the mathlib info is known once build_src is run,
        # but we cannot use add_installed_pkg_config here either, so we only
        # update the substition dictionary during npymath build
        config_cmd = config.get_config_cmd()

        # Check that the toolchain works, to fail early if it doesn't
        # (avoid late errors with MATHLIB which are confusing if the
        # compiler does not work).
        st = config_cmd.try_link('int apis(void) { return 0;}')
        if not st:
            raise RuntimeError("Broken toolchain: cannot link a simple C program")
        mlibs = check_mathlib(config_cmd)

        posix_mlib = ' '.join(['-l%s' % l for l in mlibs])
        msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs])
        subst_dict["posix_mathlib"] = posix_mlib
        subst_dict["msvc_mathlib"] = msvc_mlib

    npymath_sources = [join('src', 'npymath', 'npy_math_internal.h.src'),
                       join('src', 'npymath', 'npy_math.c'),
                       join('src', 'npymath', 'ieee754.c.src'),
                       join('src', 'npymath', 'npy_math_complex.c.src'),
                       join('src', 'npymath', 'halffloat.c')
                       ]
    
    # Must be true for CRT compilers but not MinGW/cygwin. See gh-9977.
    is_msvc = platform.system() == 'Windows'
    config.add_installed_library('npymath',
            sources=npymath_sources + [get_mathlib_info],
            install_dir='lib',
            build_info={
                'include_dirs' : [],  # empty list required for creating npy_math_internal.h
                'extra_compiler_args' : (['/GL-'] if is_msvc else []),
            })
    config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config",
            subst_dict)
    config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config",
            subst_dict)

    #######################################################################
    #                         npysort library                             #
    #######################################################################

    # This library is created for the build but it is not installed
    npysort_sources = [join('src', 'npysort', 'quicksort.c.src'),
                       join('src', 'npysort', 'mergesort.c.src'),
                       join('src', 'npysort', 'heapsort.c.src'),
                       join('src', 'private', 'npy_partition.h.src'),
                       join('src', 'npysort', 'selection.c.src'),
                       join('src', 'private', 'npy_binsearch.h.src'),
                       join('src', 'npysort', 'binsearch.c.src'),
                       ]
    config.add_library('npysort',
                       sources=npysort_sources,
                       include_dirs=[])

    #######################################################################
    #                        multiarray module                            #
    #######################################################################

    multiarray_deps = [
            join('src', 'multiarray', 'arrayobject.h'),
            join('src', 'multiarray', 'arraytypes.h'),
            join('src', 'multiarray', 'array_assign.h'),
            join('src', 'multiarray', 'buffer.h'),
            join('src', 'multiarray', 'calculation.h'),
            join('src', 'multiarray', 'cblasfuncs.h'),
            join('src', 'multiarray', 'common.h'),
            join('src', 'multiarray', 'convert_datatype.h'),
            join('src', 'multiarray', 'convert.h'),
            join('src', 'multiarray', 'conversion_utils.h'),
            join('src', 'multiarray', 'ctors.h'),
            join('src', 'multiarray', 'descriptor.h'),
            join('src', 'multiarray', 'dragon4.h'),
            join('src', 'multiarray', 'getset.h'),
            join('src', 'multiarray', 'hashdescr.h'),
            join('src', 'multiarray', 'iterators.h'),
            join('src', 'multiarray', 'mapping.h'),
            join('src', 'multiarray', 'methods.h'),
            join('src', 'multiarray', 'multiarraymodule.h'),
            join('src', 'multiarray', 'nditer_impl.h'),
            join('src', 'multiarray', 'number.h'),
            join('src', 'multiarray', 'numpyos.h'),
            join('src', 'multiarray', 'refcount.h'),
            join('src', 'multiarray', 'scalartypes.h'),
            join('src', 'multiarray', 'sequence.h'),
            join('src', 'multiarray', 'shape.h'),
            join('src', 'multiarray', 'strfuncs.h'),
            join('src', 'multiarray', 'ucsnarrow.h'),
            join('src', 'multiarray', 'usertypes.h'),
            join('src', 'multiarray', 'vdot.h'),
            join('src', 'private', 'npy_config.h'),
            join('src', 'private', 'templ_common.h.src'),
            join('src', 'private', 'lowlevel_strided_loops.h'),
            join('src', 'private', 'mem_overlap.h'),
            join('src', 'private', 'npy_longdouble.h'),
            join('src', 'private', 'ufunc_override.h'),
            join('src', 'private', 'binop_override.h'),
            join('src', 'private', 'npy_extint128.h'),
            join('include', 'numpy', 'arrayobject.h'),
            join('include', 'numpy', '_neighborhood_iterator_imp.h'),
            join('include', 'numpy', 'npy_endian.h'),
            join('include', 'numpy', 'arrayscalars.h'),
            join('include', 'numpy', 'noprefix.h'),
            join('include', 'numpy', 'npy_interrupt.h'),
            join('include', 'numpy', 'npy_3kcompat.h'),
            join('include', 'numpy', 'npy_math.h'),
            join('include', 'numpy', 'halffloat.h'),
            join('include', 'numpy', 'npy_common.h'),
            join('include', 'numpy', 'npy_os.h'),
            join('include', 'numpy', 'utils.h'),
            join('include', 'numpy', 'ndarrayobject.h'),
            join('include', 'numpy', 'npy_cpu.h'),
            join('include', 'numpy', 'numpyconfig.h'),
            join('include', 'numpy', 'ndarraytypes.h'),
            join('include', 'numpy', 'npy_1_7_deprecated_api.h'),
            # add library sources as distuils does not consider libraries
            # dependencies
            ] + npysort_sources + npymath_sources

    multiarray_src = [
            join('src', 'multiarray', 'alloc.c'),
            join('src', 'multiarray', 'arrayobject.c'),
            join('src', 'multiarray', 'arraytypes.c.src'),
            join('src', 'multiarray', 'array_assign.c'),
            join('src', 'multiarray', 'array_assign_scalar.c'),
            join('src', 'multiarray', 'array_assign_array.c'),
            join('src', 'multiarray', 'buffer.c'),
            join('src', 'multiarray', 'calculation.c'),
            join('src', 'multiarray', 'compiled_base.c'),
            join('src', 'multiarray', 'common.c'),
            join('src', 'multiarray', 'convert.c'),
            join('src', 'multiarray', 'convert_datatype.c'),
            join('src', 'multiarray', 'conversion_utils.c'),
            join('src', 'multiarray', 'ctors.c'),
            join('src', 'multiarray', 'datetime.c'),
            join('src', 'multiarray', 'datetime_strings.c'),
            join('src', 'multiarray', 'datetime_busday.c'),
            join('src', 'multiarray', 'datetime_busdaycal.c'),
            join('src', 'multiarray', 'descriptor.c'),
            join('src', 'multiarray', 'dragon4.c'),
            join('src', 'multiarray', 'dtype_transfer.c'),
            join('src', 'multiarray', 'einsum.c.src'),
            join('src', 'multiarray', 'flagsobject.c'),
            join('src', 'multiarray', 'getset.c'),
            join('src', 'multiarray', 'hashdescr.c'),
            join('src', 'multiarray', 'item_selection.c'),
            join('src', 'multiarray', 'iterators.c'),
            join('src', 'multiarray', 'lowlevel_strided_loops.c.src'),
            join('src', 'multiarray', 'mapping.c'),
            join('src', 'multiarray', 'methods.c'),
            join('src', 'multiarray', 'multiarraymodule.c'),
            join('src', 'multiarray', 'nditer_templ.c.src'),
            join('src', 'multiarray', 'nditer_api.c'),
            join('src', 'multiarray', 'nditer_constr.c'),
            join('src', 'multiarray', 'nditer_pywrap.c'),
            join('src', 'multiarray', 'number.c'),
            join('src', 'multiarray', 'numpyos.c'),
            join('src', 'multiarray', 'refcount.c'),
            join('src', 'multiarray', 'sequence.c'),
            join('src', 'multiarray', 'shape.c'),
            join('src', 'multiarray', 'scalarapi.c'),
            join('src', 'multiarray', 'scalartypes.c.src'),
            join('src', 'multiarray', 'strfuncs.c'),
            join('src', 'multiarray', 'temp_elide.c'),
            join('src', 'multiarray', 'usertypes.c'),
            join('src', 'multiarray', 'ucsnarrow.c'),
            join('src', 'multiarray', 'vdot.c'),
            join('src', 'private', 'templ_common.h.src'),
            join('src', 'private', 'mem_overlap.c'),
            join('src', 'private', 'npy_longdouble.c'),
            join('src', 'private', 'ufunc_override.c'),
            ]

    blas_info = get_info('blas_opt', 0)
    if blas_info and ('HAVE_CBLAS', None) in blas_info.get('define_macros', []):
        extra_info = blas_info
        # These files are also in MANIFEST.in so that they are always in
        # the source distribution independently of HAVE_CBLAS.
        multiarray_src.extend([join('src', 'multiarray', 'cblasfuncs.c'),
                               join('src', 'multiarray', 'python_xerbla.c'),
                               ])
        if uses_accelerate_framework(blas_info):
            multiarray_src.extend(get_sgemv_fix())
    else:
        extra_info = {}

    config.add_extension('multiarray',
                         sources=multiarray_src +
                                 [generate_config_h,
                                  generate_numpyconfig_h,
                                  generate_numpy_api,
                                  join(codegen_dir, 'generate_numpy_api.py'),
                                  join('*.py')],
                         depends=deps + multiarray_deps,
                         libraries=['npymath', 'npysort'],
                         extra_info=extra_info)

    #######################################################################
    #                           umath module                              #
    #######################################################################

    def generate_umath_c(ext, build_dir):
        target = join(build_dir, header_dir, '__umath_generated.c')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        script = generate_umath_py
        if newer(script, target):
            f = open(target, 'w')
            f.write(generate_umath.make_code(generate_umath.defdict,
                                             generate_umath.__file__))
            f.close()
        return []

    umath_src = [
            join('src', 'umath', 'umathmodule.c'),
            join('src', 'umath', 'reduction.c'),
            join('src', 'umath', 'funcs.inc.src'),
            join('src', 'umath', 'simd.inc.src'),
            join('src', 'umath', 'loops.h.src'),
            join('src', 'umath', 'loops.c.src'),
            join('src', 'umath', 'ufunc_object.c'),
            join('src', 'umath', 'extobj.c'),
            join('src', 'umath', 'scalarmath.c.src'),
            join('src', 'umath', 'ufunc_type_resolution.c'),
            join('src', 'umath', 'override.c'),
            join('src', 'private', 'mem_overlap.c'),
            join('src', 'private', 'npy_longdouble.c'),
            join('src', 'private', 'ufunc_override.c')]

    umath_deps = [
            generate_umath_py,
            join('include', 'numpy', 'npy_math.h'),
            join('include', 'numpy', 'halffloat.h'),
            join('src', 'multiarray', 'common.h'),
            join('src', 'private', 'templ_common.h.src'),
            join('src', 'umath', 'simd.inc.src'),
            join('src', 'umath', 'override.h'),
            join(codegen_dir, 'generate_ufunc_api.py'),
            join('src', 'private', 'lowlevel_strided_loops.h'),
            join('src', 'private', 'mem_overlap.h'),
            join('src', 'private', 'npy_longdouble.h'),
            join('src', 'private', 'ufunc_override.h'),
            join('src', 'private', 'binop_override.h')] + npymath_sources

    config.add_extension('umath',
                         sources=umath_src +
                                 [generate_config_h,
                                 generate_numpyconfig_h,
                                 generate_umath_c,
                                 generate_ufunc_api],
                         depends=deps + umath_deps,
                         libraries=['npymath'],
                         )

    #######################################################################
    #                        umath_tests module                           #
    #######################################################################

    config.add_extension('umath_tests',
                    sources=[join('src', 'umath', 'umath_tests.c.src')])

    #######################################################################
    #                   custom rational dtype module                      #
    #######################################################################

    config.add_extension('test_rational',
                    sources=[join('src', 'umath', 'test_rational.c.src')])

    #######################################################################
    #                        struct_ufunc_test module                     #
    #######################################################################

    config.add_extension('struct_ufunc_test',
                    sources=[join('src', 'umath', 'struct_ufunc_test.c.src')])

    #######################################################################
    #                     multiarray_tests module                         #
    #######################################################################

    config.add_extension('multiarray_tests',
                    sources=[join('src', 'multiarray', 'multiarray_tests.c.src'),
                             join('src', 'private', 'mem_overlap.c')],
                    depends=[join('src', 'private', 'mem_overlap.h'),
                             join('src', 'private', 'npy_extint128.h')],
                    libraries=['npymath'])

    #######################################################################
    #                        operand_flag_tests module                    #
    #######################################################################

    config.add_extension('operand_flag_tests',
                    sources=[join('src', 'umath', 'operand_flag_tests.c.src')])

    config.add_data_dir('tests')
    config.add_data_dir('tests/data')

    config.make_svn_version_py()

    return config
예제 #34
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
    from numpy.distutils.misc_util import get_info as get_misc_info
    from scipy._build_utils.system_info import get_info as get_sys_info
    from distutils.sysconfig import get_python_inc

    config = Configuration('spatial', parent_package, top_path)

    config.add_data_dir('tests')

    # spatial.transform
    config.add_subpackage('transform')

    # qhull
    qhull_src = sorted(
        glob.glob(join(dirname(__file__), 'qhull_src', 'src', '*.c')))

    inc_dirs = [get_python_inc()]
    if inc_dirs[0] != get_python_inc(plat_specific=1):
        inc_dirs.append(get_python_inc(plat_specific=1))
    inc_dirs.append(get_numpy_include_dirs())
    inc_dirs.append(join(dirname(dirname(__file__)), '_lib'))

    cfg = dict(get_sys_info('lapack_opt'))
    cfg.setdefault('include_dirs', []).extend(inc_dirs)
    config.add_extension('qhull',
                         sources=['qhull.c', 'qhull_misc.c'] + qhull_src,
                         **cfg)

    # cKDTree
    ckdtree_src = [
        'query.cxx', 'build.cxx', 'query_pairs.cxx', 'count_neighbors.cxx',
        'query_ball_point.cxx', 'query_ball_tree.cxx', 'sparse_distances.cxx'
    ]

    ckdtree_src = [join('ckdtree', 'src', x) for x in ckdtree_src]

    ckdtree_headers = [
        'ckdtree_decl.h', 'coo_entries.h', 'distance_base.h', 'distance.h',
        'ordered_pair.h', 'partial_sort.h', 'rectangle.h'
    ]

    ckdtree_headers = [join('ckdtree', 'src', x) for x in ckdtree_headers]

    ckdtree_dep = ['ckdtree.cxx'] + ckdtree_headers + ckdtree_src
    ext = config.add_extension('ckdtree',
                               sources=['ckdtree.cxx'] + ckdtree_src,
                               depends=ckdtree_dep,
                               include_dirs=inc_dirs +
                               [join('ckdtree', 'src')])
    ext._pre_build_hook = pre_build_hook

    # _distance_wrap
    config.add_extension('_distance_wrap',
                         sources=[join('src', 'distance_wrap.c')],
                         depends=[join('src', 'distance_impl.h')],
                         include_dirs=[get_numpy_include_dirs()],
                         extra_info=get_misc_info("npymath"))

    config.add_extension('_voronoi', sources=['_voronoi.c'])

    config.add_extension('_hausdorff', sources=['_hausdorff.c'])

    # Add license files
    config.add_data_files('qhull_src/COPYING.txt')

    # Type stubs
    config.add_data_files('*.pyi')

    return config
예제 #35
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
    from numpy.distutils.misc_util import get_info as get_misc_info
    from numpy.distutils.system_info import get_info as get_sys_info
    from distutils.sysconfig import get_python_inc

    config = Configuration('spatial', parent_package, top_path)

    config.add_data_dir('tests')

    # qhull
    qhull_src = list(glob.glob(join(dirname(__file__), 'qhull', 'src', '*.c')))

    inc_dirs = [get_python_inc()]
    if inc_dirs[0] != get_python_inc(plat_specific=1):
        inc_dirs.append(get_python_inc(plat_specific=1))
    inc_dirs.append(get_numpy_include_dirs())

    cfg = dict(get_sys_info('lapack_opt'))
    cfg.setdefault('include_dirs', []).extend(inc_dirs)

    def get_qhull_misc_config(ext, build_dir):
        # Generate a header file containing defines
        config_cmd = config.get_config_cmd()
        defines = []
        if config_cmd.check_func('open_memstream', decl=True, call=True):
            defines.append(('HAVE_OPEN_MEMSTREAM', '1'))
        target = join(dirname(__file__), 'qhull_misc_config.h')
        with open(target, 'w') as f:
            for name, value in defines:
                f.write('#define {0} {1}\n'.format(name, value))

    config.add_extension('qhull',
                         sources=['qhull.c'] + qhull_src +
                         [get_qhull_misc_config],
                         **cfg)

    # cKDTree
    ckdtree_src = [
        'query.cxx', 'build.cxx', 'globals.cxx', 'cpp_exc.cxx',
        'query_pairs.cxx', 'count_neighbors.cxx', 'query_ball_point.cxx',
        'query_ball_tree.cxx', 'sparse_distances.cxx'
    ]

    ckdtree_src = [join('ckdtree', 'src', x) for x in ckdtree_src]

    ckdtree_headers = [
        'ckdtree_decl.h', 'cpp_exc.h', 'ckdtree_methods.h', 'cpp_utils.h',
        'rectangle.h', 'distance.h', 'distance_box.h', 'ordered_pair.h'
    ]

    ckdtree_headers = [join('ckdtree', 'src', x) for x in ckdtree_headers]

    ckdtree_dep = ['ckdtree.cxx'] + ckdtree_headers + ckdtree_src
    config.add_extension('ckdtree',
                         sources=['ckdtree.cxx'] + ckdtree_src,
                         depends=ckdtree_dep,
                         include_dirs=inc_dirs + [join('ckdtree', 'src')])
    # _distance_wrap
    config.add_extension('_distance_wrap',
                         sources=[join('src', 'distance_wrap.c')],
                         depends=[join('src', 'distance_impl.h')],
                         include_dirs=[get_numpy_include_dirs()],
                         extra_info=get_misc_info("npymath"))

    config.add_extension('_voronoi', sources=['_voronoi.c'])

    config.add_extension('_hausdorff', sources=['_hausdorff.c'])

    return config
예제 #36
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    config = Configuration('unit', parent_package, top_path)
    config.add_data_dir('tests')
    config.make_config_py()
    return config
예제 #37
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    config = Configuration('oldnumeric',parent_package,top_path)
    config.add_data_dir('tests')
    return config
예제 #38
0
파일: setup.py 프로젝트: ninalinzhiyun/VB3
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info

    config = Configuration('blas', parent_package, top_path)

    blas_opt = get_info('blas_opt', notfound_action=2)

    atlas_version = ([v[3:-3] for k,v in blas_opt.get('define_macros',[]) \
                      if k=='ATLAS_INFO']+[None])[0]
    if atlas_version:
        print('ATLAS version: %s' % atlas_version)

    target_dir = ''
    skip_names = {'cblas': [], 'fblas': []}
    if skip_single_routines:
        target_dir = 'dbl'
        skip_names['cblas'].extend('saxpy caxpy'.split())
        skip_names['fblas'].extend(skip_names['cblas'])
        skip_names['fblas'].extend(\
            'srotg crotg srotmg srot csrot srotm sswap cswap sscal cscal'\
            ' csscal scopy ccopy sdot cdotu cdotc snrm2 scnrm2 sasum scasum'\
            ' isamax icamax sgemv cgemv chemv ssymv strmv ctrmv'\
            ' sgemm cgemm'.split())

    if using_lapack_blas:
        target_dir = join(target_dir, 'blas')
        skip_names['fblas'].extend(\
            'drotmg srotmg drotm srotm'.split())

    depends = [
        __file__, 'fblas_l?.pyf.src', 'fblas.pyf.src', 'fblaswrap.f.src',
        'fblaswrap_veclib_c.c.src'
    ]
    # fblas:
    if needs_cblas_wrapper(blas_opt):
        sources = ['fblas.pyf.src', 'fblaswrap_veclib_c.c.src'],
    else:
        sources = ['fblas.pyf.src', 'fblaswrap.f.src']
    config.add_extension('fblas',
                         sources=sources,
                         depends=depends,
                         f2py_options=['skip:'] + skip_names['fblas'] + [':'],
                         extra_info=blas_opt)

    # cblas:
    def get_cblas_source(ext, build_dir):
        name = ext.name.split('.')[-1]
        assert name == 'cblas', repr(name)
        if atlas_version is None:
            target = join(build_dir, target_dir, 'cblas.pyf')
            from distutils.dep_util import newer
            if newer(__file__, target):
                f = open(target, 'w')
                f.write(tmpl_empty_cblas_pyf)
                f.close()
        else:
            target = ext.depends[0]
            assert os.path.basename(target) == 'cblas.pyf.src'
        return target

    config.add_extension('cblas',
                         sources=[get_cblas_source],
                         depends=['cblas.pyf.src', 'cblas_l?.pyf.src'],
                         f2py_options=['skip:'] + skip_names['cblas'] + [':'],
                         extra_info=blas_opt)

    config.add_data_dir('tests')

    return config
예제 #39
0
파일: setup.py 프로젝트: lisarosalina/App
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, dot_join
    from numpy.distutils.system_info import get_info, default_lib_dirs

    config = Configuration('core', parent_package, top_path)
    local_dir = config.local_path
    codegen_dir = join(local_dir, 'code_generators')

    generate_umath_py = join(codegen_dir, 'generate_umath.py')
    n = dot_join(config.name, 'generate_umath')
    generate_umath = imp.load_module('_'.join(n.split('.')),
                                     open(generate_umath_py, 'U'),
                                     generate_umath_py, ('.py', 'U', 1))

    header_dir = 'include/numpy'  # this is relative to config.path_in_package

    def generate_config_h(ext, build_dir):
        target = join(build_dir, header_dir, 'config.h')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)
            tc = generate_testcode(target)
            from distutils import sysconfig
            python_include = sysconfig.get_python_inc()
            python_h = join(python_include, 'Python.h')
            if not os.path.isfile(python_h):
                raise SystemError,\
                      "Non-existing %s. Perhaps you need to install"\
                      " python-dev|python-devel." % (python_h)
            result = config_cmd.try_run(tc,
                                        include_dirs=[python_include],
                                        library_dirs=default_lib_dirs)
            if not result:
                raise SystemError,"Failed to test configuration. "\
                      "See previous error messages for more information."

            moredefs = []
            #
            mathlibs = []
            tc = testcode_mathlib()
            mathlibs_choices = [[], ['m'], ['cpml']]
            mathlib = os.environ.get('MATHLIB')
            if mathlib:
                mathlibs_choices.insert(0, mathlib.split(','))
            for libs in mathlibs_choices:
                if config_cmd.try_run(tc, libraries=libs):
                    mathlibs = libs
                    break
            else:
                raise EnvironmentError("math library missing; rerun "
                                       "setup.py after setting the "
                                       "MATHLIB env variable")
            ext.libraries.extend(mathlibs)
            moredefs.append(('MATHLIB', ','.join(mathlibs)))

            def check_func(func_name):
                return config_cmd.check_func(func_name,
                                             libraries=mathlibs,
                                             decl=False,
                                             headers=['math.h'])

            for func_name, defsymbol in FUNCTIONS_TO_CHECK:
                if check_func(func_name):
                    moredefs.append(defsymbol)

            if is_npy_no_signal():
                moredefs.append('__NPY_PRIVATE_NO_SIGNAL')

            if sys.platform == 'win32' or os.name == 'nt':
                from numpy.distutils.misc_util import get_build_architecture
                a = get_build_architecture()
                print 'BUILD_ARCHITECTURE: %r, os.name=%r, sys.platform=%r' % (
                    a, os.name, sys.platform)
                if a == 'AMD64':
                    moredefs.append('DISTUTILS_USE_SDK')

            if sys.version[:3] < '2.4':
                if config_cmd.check_func('strtod',
                                         decl=False,
                                         headers=['stdlib.h']):
                    moredefs.append(('PyOS_ascii_strtod', 'strtod'))

            target_f = open(target, 'a')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))
            target_f.close()
            print 'File:', target
            target_f = open(target)
            print target_f.read()
            target_f.close()
            print 'EOF'
        else:
            mathlibs = []
            target_f = open(target)
            for line in target_f.readlines():
                s = '#define MATHLIB'
                if line.startswith(s):
                    value = line[len(s):].strip()
                    if value:
                        mathlibs.extend(value.split(','))
            target_f.close()

        ext.libraries.extend(mathlibs)

        incl_dir = os.path.dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)

        return target

    def generate_numpyconfig_h(ext, build_dir):
        """Depends on config.h: generate_config_h has to be called before !"""
        target = join(build_dir, header_dir, 'numpyconfig.h')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)
            testcode = generate_numpyconfig_code(target)

            from distutils import sysconfig
            python_include = sysconfig.get_python_inc()
            python_h = join(python_include, 'Python.h')
            if not os.path.isfile(python_h):
                raise SystemError,\
                      "Non-existing %s. Perhaps you need to install"\
                      " python-dev|python-devel." % (python_h)

            config.numpy_include_dirs
            result = config_cmd.try_run(testcode,
                                include_dirs = [python_include] + \
                                                       config.numpy_include_dirs,
                                        library_dirs = default_lib_dirs)

            if not result:
                raise SystemError,"Failed to generate numpy configuration. "\
                      "See previous error messages for more information."

            print 'File: %s' % target
            target_f = open(target)
            print target_f.read()
            target_f.close()
            print 'EOF'
        config.add_data_files((header_dir, target))
        return target

    def generate_api_func(module_name):
        def generate_api(ext, build_dir):
            script = join(codegen_dir, module_name + '.py')
            sys.path.insert(0, codegen_dir)
            try:
                m = __import__(module_name)
                log.info('executing %s', script)
                h_file, c_file, doc_file = m.generate_api(
                    os.path.join(build_dir, header_dir))
            finally:
                del sys.path[0]
            config.add_data_files((header_dir, h_file), (header_dir, doc_file))
            return (h_file, )

        return generate_api

    generate_array_api = generate_api_func('generate_array_api')
    generate_ufunc_api = generate_api_func('generate_ufunc_api')

    def generate_umath_c(ext, build_dir):
        target = join(build_dir, header_dir, '__umath_generated.c')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        script = generate_umath_py
        if newer(script, target):
            f = open(target, 'w')
            f.write(
                generate_umath.make_code(generate_umath.defdict,
                                         generate_umath.__file__))
            f.close()
        return []

    config.add_data_files('include/numpy/*.h')
    config.add_include_dirs('src')

    config.numpy_include_dirs.extend(config.paths('include'))

    deps = [
        join('src', 'arrayobject.c'),
        join('src', 'arraymethods.c'),
        join('src', 'scalartypes.inc.src'),
        join('src', 'arraytypes.inc.src'),
        join('src', '_signbit.c'),
        join('src', '_isnan.c'),
        join('src', 'ucsnarrow.c'),
        join('include', 'numpy', '*object.h'), 'include/numpy/fenv/fenv.c',
        'include/numpy/fenv/fenv.h',
        join(codegen_dir, 'genapi.py'),
        join(codegen_dir, '*.txt')
    ]

    # Don't install fenv unless we need them.
    if sys.platform == 'cygwin':
        config.add_data_dir('include/numpy/fenv')

    config.add_extension(
        'multiarray',
        sources=[
            join('src', 'multiarraymodule.c'), generate_config_h,
            generate_numpyconfig_h, generate_array_api,
            join('src', 'scalartypes.inc.src'),
            join('src', 'arraytypes.inc.src'),
            join(codegen_dir, 'generate_array_api.py'),
            join('*.py')
        ],
        depends=deps,
    )

    config.add_extension(
        'umath',
        sources=[
            generate_config_h,
            generate_numpyconfig_h,
            join('src', 'umathmodule.c.src'),
            generate_umath_c,
            generate_ufunc_api,
            join('src', 'scalartypes.inc.src'),
            join('src', 'arraytypes.inc.src'),
        ],
        depends=[
            join('src', 'ufuncobject.c'),
            generate_umath_py,
            join(codegen_dir, 'generate_ufunc_api.py'),
        ] + deps,
    )

    config.add_extension(
        '_sort',
        sources=[
            join('src', '_sortmodule.c.src'),
            generate_config_h,
            generate_numpyconfig_h,
            generate_array_api,
        ],
    )

    config.add_extension(
        'scalarmath',
        sources=[
            join('src', 'scalarmathmodule.c.src'), generate_config_h,
            generate_numpyconfig_h, generate_array_api, generate_ufunc_api
        ],
    )

    # Configure blasdot
    blas_info = get_info('blas_opt', 0)

    #blas_info = {}
    def get_dotblas_sources(ext, build_dir):
        if blas_info:
            if ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', []):
                return None  # dotblas needs ATLAS, Fortran compiled blas will not be sufficient.
            return ext.depends[:1]
        return None  # no extension module will be built

    config.add_extension('_dotblas',
                         sources=[get_dotblas_sources],
                         depends=[
                             join('blasdot', '_dotblas.c'),
                             join('blasdot', 'cblas.h'),
                         ],
                         include_dirs=['blasdot'],
                         extra_info=blas_info)

    config.add_data_dir('tests')
    config.add_data_dir('tests/data')

    config.make_svn_version_py()

    return config
예제 #40
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, dot_join
    from numpy.distutils.system_info import get_info, default_lib_dirs

    config = Configuration('core', parent_package, top_path)
    local_dir = config.local_path
    codegen_dir = join(local_dir, 'code_generators')

    if is_released(config):
        warnings.simplefilter('error', MismatchCAPIWarning)

    # Check whether we have a mismatch between the set C API VERSION and the
    # actual C API VERSION
    check_api_version(C_API_VERSION, codegen_dir)

    generate_umath_py = join(codegen_dir, 'generate_umath.py')
    n = dot_join(config.name, 'generate_umath')
    generate_umath = imp.load_module('_'.join(n.split('.')),
                                     open(generate_umath_py, 'U'),
                                     generate_umath_py, ('.py', 'U', 1))

    header_dir = 'include/numpy'  # this is relative to config.path_in_package

    cocache = CallOnceOnly()

    def generate_config_h(ext, build_dir):
        target = join(build_dir, header_dir, 'config.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)

        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir)

            # Check math library and C99 math funcs availability
            mathlibs = check_mathlib(config_cmd)
            moredefs.append(('MATHLIB', ','.join(mathlibs)))

            check_math_capabilities(config_cmd, moredefs, mathlibs)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[0])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0])

            # Signal check
            if is_npy_no_signal():
                moredefs.append('__NPY_PRIVATE_NO_SIGNAL')

            # Windows checks
            if sys.platform == 'win32' or os.name == 'nt':
                win32_checks(moredefs)

            # Inline check
            inline = config_cmd.check_inline()

            # Check whether we need our own wide character support
            if not config_cmd.check_decl('Py_UNICODE_WIDE',
                                         headers=['Python.h']):
                PYTHON_HAS_UNICODE_WIDE = True
            else:
                PYTHON_HAS_UNICODE_WIDE = False

            if ENABLE_SEPARATE_COMPILATION:
                moredefs.append(('ENABLE_SEPARATE_COMPILATION', 1))

            # Get long double representation
            if sys.platform != 'darwin':
                rep = check_long_double_representation(config_cmd)
                if rep in [
                        'INTEL_EXTENDED_12_BYTES_LE',
                        'INTEL_EXTENDED_16_BYTES_LE', 'IEEE_QUAD_LE',
                        'IEEE_QUAD_BE', 'IEEE_DOUBLE_LE', 'IEEE_DOUBLE_BE'
                ]:
                    moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1))
                else:
                    raise ValueError("Unrecognized long double format: %s" %
                                     rep)

            # Py3K check
            if sys.version_info[0] == 3:
                moredefs.append(('NPY_PY3K', 1))

            # Generate the config.h file from moredefs
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # define inline to our keyword, or nothing
            target_f.write('#ifndef __cplusplus\n')
            if inline == 'inline':
                target_f.write('/* #undef inline */\n')
            else:
                target_f.write('#define inline %s\n' % inline)
            target_f.write('#endif\n')

            # add the guard to make sure config.h is never included directly,
            # but always through npy_config.h
            target_f.write("""
#ifndef _NPY_NPY_CONFIG_H_
#error config.h should never be included directly, include npy_config.h instead
#endif
""")

            target_f.close()
            print('File:', target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        else:
            mathlibs = []
            target_f = open(target)
            for line in target_f.readlines():
                s = '#define MATHLIB'
                if line.startswith(s):
                    value = line[len(s):].strip()
                    if value:
                        mathlibs.extend(value.split(','))
            target_f.close()

        # Ugly: this can be called within a library and not an extension,
        # in which case there is no libraries attributes (and none is
        # needed).
        if hasattr(ext, 'libraries'):
            ext.libraries.extend(mathlibs)

        incl_dir = os.path.dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)

        return target

    def generate_numpyconfig_h(ext, build_dir):
        """Depends on config.h: generate_config_h has to be called before !"""
        target = join(build_dir, header_dir, '_numpyconfig.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)
        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir)

            if is_npy_no_signal():
                moredefs.append(('NPY_NO_SIGNAL', 1))

            if is_npy_no_smp():
                moredefs.append(('NPY_NO_SMP', 1))
            else:
                moredefs.append(('NPY_NO_SMP', 0))

            mathlibs = check_mathlib(config_cmd)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[1])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1])

            if ENABLE_SEPARATE_COMPILATION:
                moredefs.append(('NPY_ENABLE_SEPARATE_COMPILATION', 1))

            # Check wether we can use inttypes (C99) formats
            if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']):
                moredefs.append(('NPY_USE_C99_FORMATS', 1))

            # visibility check
            hidden_visibility = visibility_define(config_cmd)
            moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility))

            # Add the C API/ABI versions
            moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION))
            moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION))

            # Add moredefs to header
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # Define __STDC_FORMAT_MACROS
            target_f.write("""
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS 1
#endif
""")
            target_f.close()

            # Dump the numpyconfig.h header to stdout
            print('File: %s' % target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        config.add_data_files((header_dir, target))
        return target

    def generate_api_func(module_name):
        def generate_api(ext, build_dir):
            script = join(codegen_dir, module_name + '.py')
            sys.path.insert(0, codegen_dir)
            try:
                m = __import__(module_name)
                log.info('executing %s', script)
                h_file, c_file, doc_file = m.generate_api(
                    os.path.join(build_dir, header_dir))
            finally:
                del sys.path[0]
            config.add_data_files((header_dir, h_file), (header_dir, doc_file))
            return (h_file, )

        return generate_api

    generate_numpy_api = generate_api_func('generate_numpy_api')
    generate_ufunc_api = generate_api_func('generate_ufunc_api')

    config.add_include_dirs(join(local_dir, "src", "private"))
    config.add_include_dirs(join(local_dir, "src"))
    config.add_include_dirs(join(local_dir))

    # Multiarray version: this function is needed to build foo.c from foo.c.src
    # when foo.c is included in another file and as such not in the src
    # argument of build_ext command
    def generate_multiarray_templated_sources(ext, build_dir):
        from numpy.distutils.misc_util import get_cmd

        subpath = join('src', 'multiarray')
        sources = [
            join(local_dir, subpath, 'scalartypes.c.src'),
            join(local_dir, subpath, 'arraytypes.c.src')
        ]

        # numpy.distutils generate .c from .c.src in weird directories, we have
        # to add them there as they depend on the build_dir
        config.add_include_dirs(join(build_dir, subpath))

        cmd = get_cmd('build_src')
        cmd.ensure_finalized()

        cmd.template_sources(sources, ext)

    # umath version: this function is needed to build foo.c from foo.c.src
    # when foo.c is included in another file and as such not in the src
    # argument of build_ext command
    def generate_umath_templated_sources(ext, build_dir):
        from numpy.distutils.misc_util import get_cmd

        subpath = join('src', 'umath')
        sources = [
            join(local_dir, subpath, 'loops.c.src'),
            join(local_dir, subpath, 'umathmodule.c.src')
        ]

        # numpy.distutils generate .c from .c.src in weird directories, we have
        # to add them there as they depend on the build_dir
        config.add_include_dirs(join(build_dir, subpath))

        cmd = get_cmd('build_src')
        cmd.ensure_finalized()

        cmd.template_sources(sources, ext)

    def generate_umath_c(ext, build_dir):
        target = join(build_dir, header_dir, '__umath_generated.c')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        script = generate_umath_py
        if newer(script, target):
            f = open(target, 'w')
            f.write(
                generate_umath.make_code(generate_umath.defdict,
                                         generate_umath.__file__))
            f.close()
        return []

    config.add_data_files('include/numpy/*.h')
    config.add_include_dirs(join('src', 'npymath'))
    config.add_include_dirs(join('src', 'multiarray'))
    config.add_include_dirs(join('src', 'umath'))

    config.numpy_include_dirs.extend(config.paths('include'))

    deps = [
        join('src', 'npymath', '_signbit.c'),
        join('include', 'numpy', '*object.h'),
        'include/numpy/fenv/fenv.c',
        'include/numpy/fenv/fenv.h',
        join(codegen_dir, 'genapi.py'),
    ]

    # Don't install fenv unless we need them.
    if sys.platform == 'cygwin':
        config.add_data_dir('include/numpy/fenv')

    config.add_extension(
        '_sort',
        sources=[
            join('src', '_sortmodule.c.src'),
            generate_config_h,
            generate_numpyconfig_h,
            generate_numpy_api,
        ],
    )

    # npymath needs the config.h and numpyconfig.h files to be generated, but
    # build_clib cannot handle generate_config_h and generate_numpyconfig_h
    # (don't ask). Because clib are generated before extensions, we have to
    # explicitly add an extension which has generate_config_h and
    # generate_numpyconfig_h as sources *before* adding npymath.

    subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")])

    def get_mathlib_info(*args):
        # Another ugly hack: the mathlib info is known once build_src is run,
        # but we cannot use add_installed_pkg_config here either, so we only
        # updated the substition dictionary during npymath build
        config_cmd = config.get_config_cmd()

        # Check that the toolchain works, to fail early if it doesn't
        # (avoid late errors with MATHLIB which are confusing if the
        # compiler does not work).
        st = config_cmd.try_link('int main(void) { return 0;}')
        if not st:
            raise RuntimeError(
                "Broken toolchain: cannot link a simple C program")
        mlibs = check_mathlib(config_cmd)

        posix_mlib = ' '.join(['-l%s' % l for l in mlibs])
        msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs])
        subst_dict["posix_mathlib"] = posix_mlib
        subst_dict["msvc_mathlib"] = msvc_mlib

    config.add_installed_library('npymath',
                                 sources=[
                                     join('src', 'npymath', 'npy_math.c.src'),
                                     join('src', 'npymath', 'ieee754.c.src'),
                                     join('src', 'npymath',
                                          'npy_math_complex.c.src'),
                                     get_mathlib_info
                                 ],
                                 install_dir='lib')
    config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config",
                              subst_dict)
    config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config", subst_dict)

    multiarray_deps = [
        join('src', 'multiarray', 'arrayobject.h'),
        join('src', 'multiarray', 'arraytypes.h'),
        join('src', 'multiarray', 'buffer.h'),
        join('src', 'multiarray', 'calculation.h'),
        join('src', 'multiarray', 'common.h'),
        join('src', 'multiarray', 'convert_datatype.h'),
        join('src', 'multiarray', 'convert.h'),
        join('src', 'multiarray', 'conversion_utils.h'),
        join('src', 'multiarray', 'ctors.h'),
        join('src', 'multiarray', 'descriptor.h'),
        join('src', 'multiarray', 'getset.h'),
        join('src', 'multiarray', 'hashdescr.h'),
        join('src', 'multiarray', 'iterators.h'),
        join('src', 'multiarray', 'mapping.h'),
        join('src', 'multiarray', 'methods.h'),
        join('src', 'multiarray', 'multiarraymodule.h'),
        join('src', 'multiarray', 'numpymemoryview.h'),
        join('src', 'multiarray', 'number.h'),
        join('src', 'multiarray', 'numpyos.h'),
        join('src', 'multiarray', 'refcount.h'),
        join('src', 'multiarray', 'scalartypes.h'),
        join('src', 'multiarray', 'sequence.h'),
        join('src', 'multiarray', 'shape.h'),
        join('src', 'multiarray', 'ucsnarrow.h'),
        join('src', 'multiarray', 'usertypes.h')
    ]

    multiarray_src = [
        join('src', 'multiarray', 'multiarraymodule.c'),
        join('src', 'multiarray', 'hashdescr.c'),
        join('src', 'multiarray', 'arrayobject.c'),
        join('src', 'multiarray', 'numpymemoryview.c'),
        join('src', 'multiarray', 'buffer.c'),
        join('src', 'multiarray', 'datetime.c'),
        join('src', 'multiarray', 'numpyos.c'),
        join('src', 'multiarray', 'conversion_utils.c'),
        join('src', 'multiarray', 'flagsobject.c'),
        join('src', 'multiarray', 'descriptor.c'),
        join('src', 'multiarray', 'iterators.c'),
        join('src', 'multiarray', 'mapping.c'),
        join('src', 'multiarray', 'number.c'),
        join('src', 'multiarray', 'getset.c'),
        join('src', 'multiarray', 'sequence.c'),
        join('src', 'multiarray', 'methods.c'),
        join('src', 'multiarray', 'ctors.c'),
        join('src', 'multiarray', 'convert_datatype.c'),
        join('src', 'multiarray', 'convert.c'),
        join('src', 'multiarray', 'shape.c'),
        join('src', 'multiarray', 'item_selection.c'),
        join('src', 'multiarray', 'calculation.c'),
        join('src', 'multiarray', 'common.c'),
        join('src', 'multiarray', 'usertypes.c'),
        join('src', 'multiarray', 'scalarapi.c'),
        join('src', 'multiarray', 'refcount.c'),
        join('src', 'multiarray', 'arraytypes.c.src'),
        join('src', 'multiarray', 'scalartypes.c.src')
    ]

    if PYTHON_HAS_UNICODE_WIDE:
        multiarray_src.append(join('src', 'multiarray', 'ucsnarrow.c'))

    umath_src = [
        join('src', 'umath', 'umathmodule.c.src'),
        join('src', 'umath', 'funcs.inc.src'),
        join('src', 'umath', 'loops.c.src'),
        join('src', 'umath', 'ufunc_object.c')
    ]

    umath_deps = [
        generate_umath_py,
        join(codegen_dir, 'generate_ufunc_api.py')
    ]

    if not ENABLE_SEPARATE_COMPILATION:
        multiarray_deps.extend(multiarray_src)
        multiarray_src = [
            join('src', 'multiarray', 'multiarraymodule_onefile.c')
        ]
        multiarray_src.append(generate_multiarray_templated_sources)

        umath_deps.extend(umath_src)
        umath_src = [join('src', 'umath', 'umathmodule_onefile.c')]
        umath_src.append(generate_umath_templated_sources)
        umath_src.append(join('src', 'umath', 'funcs.inc.src'))

    config.add_extension(
        'multiarray',
        sources=multiarray_src + [
            generate_config_h, generate_numpyconfig_h, generate_numpy_api,
            join(codegen_dir, 'generate_numpy_api.py'),
            join('*.py')
        ],
        depends=deps + multiarray_deps,
        libraries=['npymath'])

    config.add_extension(
        'umath',
        sources=[
            generate_config_h,
            generate_numpyconfig_h,
            generate_umath_c,
            generate_ufunc_api,
        ] + umath_src,
        depends=deps + umath_deps,
        libraries=['npymath'],
    )

    config.add_extension(
        'scalarmath',
        sources=[
            join('src', 'scalarmathmodule.c.src'), generate_config_h,
            generate_numpyconfig_h, generate_numpy_api, generate_ufunc_api
        ],
    )

    # Configure blasdot
    blas_info = get_info('blas_opt', 0)

    #blas_info = {}
    def get_dotblas_sources(ext, build_dir):
        if blas_info:
            if ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', []):
                return None  # dotblas needs ATLAS, Fortran compiled blas will not be sufficient.
            return ext.depends[:1]
        return None  # no extension module will be built

    config.add_extension('_dotblas',
                         sources=[get_dotblas_sources],
                         depends=[
                             join('blasdot', '_dotblas.c'),
                             join('blasdot', 'cblas.h'),
                         ],
                         include_dirs=['blasdot'],
                         extra_info=blas_info)

    config.add_extension('umath_tests',
                         sources=[join('src', 'umath', 'umath_tests.c.src')])

    config.add_extension(
        'multiarray_tests',
        sources=[join('src', 'multiarray', 'multiarray_tests.c.src')])

    config.add_data_dir('tests')
    config.add_data_dir('tests/data')

    config.make_svn_version_py()

    return config
예제 #41
0
파일: setup.py 프로젝트: thomasrockhu/scipy
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    import numpy as np
    config = Configuration('stats', parent_package, top_path)

    config.add_data_dir('tests')

    statlib_src = [join('statlib', '*.f')]
    config.add_library('statlib', sources=statlib_src)

    # add statlib module
    config.add_extension('statlib',
        sources=['statlib.pyf'],
        f2py_options=['--no-wrap-functions'],
        libraries=['statlib'],
        depends=statlib_src
    )

    # add _stats module
    config.add_extension('_stats',
        sources=['_stats.c'],
    )

    # add mvn module
    config.add_extension('mvn',
        sources=['mvn.pyf', 'mvndst.f'],
    )

    # add _sobol module
    config.add_extension('_sobol',
        sources=['_sobol.c', ],
    )
    config.add_data_files('_sobol_direction_numbers.npz')

    # add BiasedUrn module
    config.add_data_files('biasedurn.pxd')
    from _generate_pyx import isNPY_OLD
    NPY_OLD = isNPY_OLD()
    biasedurn_libs = [] if NPY_OLD else ['npyrandom']
    biasedurn_libdirs = [] if NPY_OLD else [join(np.get_include(),
                                                 '..', '..', 'random', 'lib')]
    ext = config.add_extension(
        'biasedurn',
        sources=[
            'biasedurn.cxx',
            'biasedurn/impls.cpp',
            'biasedurn/fnchyppr.cpp',
            'biasedurn/wnchyppr.cpp',
            'biasedurn/stoc1.cpp',
            'biasedurn/stoc3.cpp'],
        include_dirs=[np.get_include()],
        library_dirs=biasedurn_libdirs,
        libraries=biasedurn_libs,
        define_macros=[('R_BUILD', None)],
        language='c++',
        extra_compile_args=['-Wno-narrowing'] if system() == 'Darwin' else [],
        depends=['biasedurn/stocR.h'],
    )
    ext._pre_build_hook = pre_build_hook

    return config
예제 #42
0
def configuration(parent_package="", top_path=None):
    from numpy.distutils.misc_util import Configuration, dot_join
    from numpy.distutils.system_info import get_info, blas_opt_info, lapack_opt_info

    # Accelerate is buggy, disallow it. See also numpy/linalg/setup.py
    for opt_order in (blas_opt_info.blas_order, lapack_opt_info.lapack_order):
        if "accelerate" in opt_order:
            opt_order.remove("accelerate")

    config = Configuration("core", parent_package, top_path)
    local_dir = config.local_path
    codegen_dir = join(local_dir, "code_generators")

    if is_released(config):
        warnings.simplefilter("error", MismatchCAPIWarning)

    # Check whether we have a mismatch between the set C API VERSION and the
    # actual C API VERSION
    check_api_version(C_API_VERSION, codegen_dir)

    generate_umath_py = join(codegen_dir, "generate_umath.py")
    n = dot_join(config.name, "generate_umath")
    generate_umath = npy_load_module("_".join(n.split(".")), generate_umath_py,
                                     (".py", "U", 1))

    header_dir = "include/numpy"  # this is relative to config.path_in_package

    cocache = CallOnceOnly()

    def generate_config_h(ext, build_dir):
        target = join(build_dir, header_dir, "config.h")
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)

        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info("Generating %s", target)

            # Check sizeof
            moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir)

            # Check math library and C99 math funcs availability
            mathlibs = check_mathlib(config_cmd)
            moredefs.append(("MATHLIB", ",".join(mathlibs)))

            check_math_capabilities(config_cmd, ext, moredefs, mathlibs)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[0])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0])

            # Signal check
            if is_npy_no_signal():
                moredefs.append("__NPY_PRIVATE_NO_SIGNAL")

            # Windows checks
            if sys.platform == "win32" or os.name == "nt":
                win32_checks(moredefs)

            # C99 restrict keyword
            moredefs.append(("NPY_RESTRICT", config_cmd.check_restrict()))

            # Inline check
            inline = config_cmd.check_inline()

            # Use relaxed stride checking
            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(("NPY_RELAXED_STRIDES_CHECKING", 1))

            # Use bogus stride debug aid when relaxed strides are enabled
            if NPY_RELAXED_STRIDES_DEBUG:
                moredefs.append(("NPY_RELAXED_STRIDES_DEBUG", 1))

            # Use the new experimental casting implementation in NumPy 1.20:
            if NPY_USE_NEW_CASTINGIMPL:
                moredefs.append(("NPY_USE_NEW_CASTINGIMPL", 1))

            # Get long double representation
            rep = check_long_double_representation(config_cmd)
            moredefs.append(("HAVE_LDOUBLE_%s" % rep, 1))

            if check_for_right_shift_internal_compiler_error(config_cmd):
                moredefs.append("NPY_DO_NOT_OPTIMIZE_LONG_right_shift")
                moredefs.append("NPY_DO_NOT_OPTIMIZE_ULONG_right_shift")
                moredefs.append("NPY_DO_NOT_OPTIMIZE_LONGLONG_right_shift")
                moredefs.append("NPY_DO_NOT_OPTIMIZE_ULONGLONG_right_shift")

            # Generate the config.h file from moredefs
            with open(target, "w") as target_f:
                for d in moredefs:
                    if isinstance(d, str):
                        target_f.write("#define %s\n" % (d))
                    else:
                        target_f.write("#define %s %s\n" % (d[0], d[1]))

                # define inline to our keyword, or nothing
                target_f.write("#ifndef __cplusplus\n")
                if inline == "inline":
                    target_f.write("/* #undef inline */\n")
                else:
                    target_f.write("#define inline %s\n" % inline)
                target_f.write("#endif\n")

                # add the guard to make sure config.h is never included directly,
                # but always through npy_config.h
                target_f.write(
                    textwrap.dedent("""
                    #ifndef _NPY_NPY_CONFIG_H_
                    #error config.h should never be included directly, include npy_config.h instead
                    #endif
                    """))

            log.info("File: %s" % target)
            with open(target) as target_f:
                log.info(target_f.read())
            log.info("EOF")
        else:
            mathlibs = []
            with open(target) as target_f:
                for line in target_f:
                    s = "#define MATHLIB"
                    if line.startswith(s):
                        value = line[len(s):].strip()
                        if value:
                            mathlibs.extend(value.split(","))

        # Ugly: this can be called within a library and not an extension,
        # in which case there is no libraries attributes (and none is
        # needed).
        if hasattr(ext, "libraries"):
            ext.libraries.extend(mathlibs)

        incl_dir = os.path.dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)

        return target

    def generate_numpyconfig_h(ext, build_dir):
        """Depends on config.h: generate_config_h has to be called before !"""
        # put common include directory in build_dir on search path
        # allows using code generation in headers
        config.add_include_dirs(join(build_dir, "src", "common"))
        config.add_include_dirs(join(build_dir, "src", "npymath"))

        target = join(build_dir, header_dir, "_numpyconfig.h")
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)
        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info("Generating %s", target)

            # Check sizeof
            ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir)

            if is_npy_no_signal():
                moredefs.append(("NPY_NO_SIGNAL", 1))

            if is_npy_no_smp():
                moredefs.append(("NPY_NO_SMP", 1))
            else:
                moredefs.append(("NPY_NO_SMP", 0))

            mathlibs = check_mathlib(config_cmd)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[1])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1])

            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(("NPY_RELAXED_STRIDES_CHECKING", 1))

            if NPY_RELAXED_STRIDES_DEBUG:
                moredefs.append(("NPY_RELAXED_STRIDES_DEBUG", 1))

            # Check whether we can use inttypes (C99) formats
            if config_cmd.check_decl("PRIdPTR", headers=["inttypes.h"]):
                moredefs.append(("NPY_USE_C99_FORMATS", 1))

            # visibility check
            hidden_visibility = visibility_define(config_cmd)
            moredefs.append(("NPY_VISIBILITY_HIDDEN", hidden_visibility))

            # Add the C API/ABI versions
            moredefs.append(("NPY_ABI_VERSION", "0x%.8X" % C_ABI_VERSION))
            moredefs.append(("NPY_API_VERSION", "0x%.8X" % C_API_VERSION))

            # Add moredefs to header
            with open(target, "w") as target_f:
                for d in moredefs:
                    if isinstance(d, str):
                        target_f.write("#define %s\n" % (d))
                    else:
                        target_f.write("#define %s %s\n" % (d[0], d[1]))

                # Define __STDC_FORMAT_MACROS
                target_f.write(
                    textwrap.dedent("""
                    #ifndef __STDC_FORMAT_MACROS
                    #define __STDC_FORMAT_MACROS 1
                    #endif
                    """))

            # Dump the numpyconfig.h header to stdout
            log.info("File: %s" % target)
            with open(target) as target_f:
                log.info(target_f.read())
            log.info("EOF")
        config.add_data_files((header_dir, target))
        return target

    def generate_api_func(module_name):
        def generate_api(ext, build_dir):
            script = join(codegen_dir, module_name + ".py")
            sys.path.insert(0, codegen_dir)
            try:
                m = __import__(module_name)
                log.info("executing %s", script)
                h_file, c_file, doc_file = m.generate_api(
                    os.path.join(build_dir, header_dir))
            finally:
                del sys.path[0]
            config.add_data_files((header_dir, h_file), (header_dir, doc_file))
            return (h_file, )

        return generate_api

    generate_numpy_api = generate_api_func("generate_numpy_api")
    generate_ufunc_api = generate_api_func("generate_ufunc_api")

    config.add_include_dirs(join(local_dir, "src", "common"))
    config.add_include_dirs(join(local_dir, "src"))
    config.add_include_dirs(join(local_dir))

    config.add_data_dir("include/numpy")
    config.add_include_dirs(join("src", "npymath"))
    config.add_include_dirs(join("src", "multiarray"))
    config.add_include_dirs(join("src", "umath"))
    config.add_include_dirs(join("src", "npysort"))
    config.add_include_dirs(join("src", "_simd"))

    config.add_define_macros([
        ("NPY_INTERNAL_BUILD", "1")
    ])  # this macro indicates that Numpy build is in process
    config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")])
    if sys.platform[:3] == "aix":
        config.add_define_macros([("_LARGE_FILES", None)])
    else:
        config.add_define_macros([("_FILE_OFFSET_BITS", "64")])
        config.add_define_macros([("_LARGEFILE_SOURCE", "1")])
        config.add_define_macros([("_LARGEFILE64_SOURCE", "1")])

    config.numpy_include_dirs.extend(config.paths("include"))

    deps = [
        join("src", "npymath", "_signbit.c"),
        join("include", "numpy", "*object.h"),
        join(codegen_dir, "genapi.py"),
    ]

    #######################################################################
    #                          npymath library                            #
    #######################################################################

    subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")])

    def get_mathlib_info(*args):
        # Another ugly hack: the mathlib info is known once build_src is run,
        # but we cannot use add_installed_pkg_config here either, so we only
        # update the substitution dictionary during npymath build
        config_cmd = config.get_config_cmd()

        # Check that the toolchain works, to fail early if it doesn't
        # (avoid late errors with MATHLIB which are confusing if the
        # compiler does not work).
        st = config_cmd.try_link("int main(void) { return 0;}")
        if not st:
            # rerun the failing command in verbose mode
            config_cmd.compiler.verbose = True
            config_cmd.try_link("int main(void) { return 0;}")
            raise RuntimeError(
                "Broken toolchain: cannot link a simple C program")
        mlibs = check_mathlib(config_cmd)

        posix_mlib = " ".join(["-l%s" % l for l in mlibs])
        msvc_mlib = " ".join(["%s.lib" % l for l in mlibs])
        subst_dict["posix_mathlib"] = posix_mlib
        subst_dict["msvc_mathlib"] = msvc_mlib

    npymath_sources = [
        join("src", "npymath", "npy_math_internal.h.src"),
        join("src", "npymath", "npy_math.c"),
        join("src", "npymath", "ieee754.c.src"),
        join("src", "npymath", "npy_math_complex.c.src"),
        join("src", "npymath", "halffloat.c"),
    ]

    # Must be true for CRT compilers but not MinGW/cygwin. See gh-9977.
    # Intel and Clang also don't seem happy with /GL
    is_msvc = platform.platform().startswith(
        "Windows") and platform.python_compiler().startswith("MS")
    config.add_installed_library(
        "npymath",
        sources=npymath_sources + [get_mathlib_info],
        install_dir="lib",
        build_info={
            "include_dirs":
            [],  # empty list required for creating npy_math_internal.h
            "extra_compiler_args": (["/GL-"] if is_msvc else []),
        },
    )
    config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config",
                              subst_dict)
    config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config", subst_dict)

    #######################################################################
    #                     multiarray_tests module                         #
    #######################################################################

    config.add_extension(
        "_multiarray_tests",
        sources=[
            join("src", "multiarray", "_multiarray_tests.c.src"),
            join("src", "common", "mem_overlap.c"),
        ],
        depends=[
            join("src", "common", "mem_overlap.h"),
            join("src", "common", "npy_extint128.h"),
        ],
        libraries=["npymath"],
    )

    #######################################################################
    #             _multiarray_umath module - common part                  #
    #######################################################################

    common_deps = [
        join("src", "common", "array_assign.h"),
        join("src", "common", "binop_override.h"),
        join("src", "common", "cblasfuncs.h"),
        join("src", "common", "lowlevel_strided_loops.h"),
        join("src", "common", "mem_overlap.h"),
        join("src", "common", "npy_cblas.h"),
        join("src", "common", "npy_config.h"),
        join("src", "common", "npy_ctypes.h"),
        join("src", "common", "npy_extint128.h"),
        join("src", "common", "npy_import.h"),
        join("src", "common", "npy_longdouble.h"),
        join("src", "common", "templ_common.h.src"),
        join("src", "common", "ucsnarrow.h"),
        join("src", "common", "ufunc_override.h"),
        join("src", "common", "umathmodule.h"),
        join("src", "common", "numpyos.h"),
        join("src", "common", "npy_cpu_dispatch.h"),
        join("src", "common", "simd", "simd.h"),
    ]

    common_src = [
        join("src", "common", "array_assign.c"),
        join("src", "common", "mem_overlap.c"),
        join("src", "common", "npy_longdouble.c"),
        join("src", "common", "templ_common.h.src"),
        join("src", "common", "ucsnarrow.c"),
        join("src", "common", "ufunc_override.c"),
        join("src", "common", "numpyos.c"),
        join("src", "common", "npy_cpu_features.c.src"),
    ]

    if os.environ.get("NPY_USE_BLAS_ILP64", "0") != "0":
        blas_info = get_info("blas_ilp64_opt", 2)
    else:
        blas_info = get_info("blas_opt", 0)

    have_blas = blas_info and ("HAVE_CBLAS", None) in blas_info.get(
        "define_macros", [])

    if have_blas:
        extra_info = blas_info
        # These files are also in MANIFEST.in so that they are always in
        # the source distribution independently of HAVE_CBLAS.
        common_src.extend([
            join("src", "common", "cblasfuncs.c"),
            join("src", "common", "python_xerbla.c"),
        ])
    else:
        extra_info = {}

    #######################################################################
    #             _multiarray_umath module - multiarray part              #
    #######################################################################

    multiarray_deps = [
        join("src", "multiarray", "abstractdtypes.h"),
        join("src", "multiarray", "arrayobject.h"),
        join("src", "multiarray", "arraytypes.h"),
        join("src", "multiarray", "arrayfunction_override.h"),
        join("src", "multiarray", "array_coercion.h"),
        join("src", "multiarray", "array_method.h"),
        join("src", "multiarray", "npy_buffer.h"),
        join("src", "multiarray", "calculation.h"),
        join("src", "multiarray", "common.h"),
        join("src", "multiarray", "convert_datatype.h"),
        join("src", "multiarray", "convert.h"),
        join("src", "multiarray", "conversion_utils.h"),
        join("src", "multiarray", "ctors.h"),
        join("src", "multiarray", "descriptor.h"),
        join("src", "multiarray", "dtypemeta.h"),
        join("src", "multiarray", "dragon4.h"),
        join("src", "multiarray", "einsum_debug.h"),
        join("src", "multiarray", "einsum_sumprod.h"),
        join("src", "multiarray", "getset.h"),
        join("src", "multiarray", "hashdescr.h"),
        join("src", "multiarray", "iterators.h"),
        join("src", "multiarray", "legacy_dtype_implementation.h"),
        join("src", "multiarray", "mapping.h"),
        join("src", "multiarray", "methods.h"),
        join("src", "multiarray", "multiarraymodule.h"),
        join("src", "multiarray", "nditer_impl.h"),
        join("src", "multiarray", "number.h"),
        join("src", "multiarray", "refcount.h"),
        join("src", "multiarray", "scalartypes.h"),
        join("src", "multiarray", "sequence.h"),
        join("src", "multiarray", "shape.h"),
        join("src", "multiarray", "strfuncs.h"),
        join("src", "multiarray", "typeinfo.h"),
        join("src", "multiarray", "usertypes.h"),
        join("src", "multiarray", "vdot.h"),
        join("include", "numpy", "arrayobject.h"),
        join("include", "numpy", "_neighborhood_iterator_imp.h"),
        join("include", "numpy", "npy_endian.h"),
        join("include", "numpy", "arrayscalars.h"),
        join("include", "numpy", "noprefix.h"),
        join("include", "numpy", "npy_interrupt.h"),
        join("include", "numpy", "npy_3kcompat.h"),
        join("include", "numpy", "npy_math.h"),
        join("include", "numpy", "halffloat.h"),
        join("include", "numpy", "npy_common.h"),
        join("include", "numpy", "npy_os.h"),
        join("include", "numpy", "utils.h"),
        join("include", "numpy", "ndarrayobject.h"),
        join("include", "numpy", "npy_cpu.h"),
        join("include", "numpy", "numpyconfig.h"),
        join("include", "numpy", "ndarraytypes.h"),
        join("include", "numpy", "npy_1_7_deprecated_api.h"),
        # add library sources as distuils does not consider libraries
        # dependencies
    ] + npymath_sources

    multiarray_src = [
        join("src", "multiarray", "abstractdtypes.c"),
        join("src", "multiarray", "alloc.c"),
        join("src", "multiarray", "arrayobject.c"),
        join("src", "multiarray", "arraytypes.c.src"),
        join("src", "multiarray", "array_coercion.c"),
        join("src", "multiarray", "array_method.c"),
        join("src", "multiarray", "array_assign_scalar.c"),
        join("src", "multiarray", "array_assign_array.c"),
        join("src", "multiarray", "arrayfunction_override.c"),
        join("src", "multiarray", "buffer.c"),
        join("src", "multiarray", "calculation.c"),
        join("src", "multiarray", "compiled_base.c"),
        join("src", "multiarray", "common.c"),
        join("src", "multiarray", "convert.c"),
        join("src", "multiarray", "convert_datatype.c"),
        join("src", "multiarray", "conversion_utils.c"),
        join("src", "multiarray", "ctors.c"),
        join("src", "multiarray", "datetime.c"),
        join("src", "multiarray", "datetime_strings.c"),
        join("src", "multiarray", "datetime_busday.c"),
        join("src", "multiarray", "datetime_busdaycal.c"),
        join("src", "multiarray", "descriptor.c"),
        join("src", "multiarray", "dtypemeta.c"),
        join("src", "multiarray", "dragon4.c"),
        join("src", "multiarray", "dtype_transfer.c"),
        join("src", "multiarray", "einsum.c.src"),
        join("src", "multiarray", "einsum_sumprod.c.src"),
        join("src", "multiarray", "flagsobject.c"),
        join("src", "multiarray", "getset.c"),
        join("src", "multiarray", "hashdescr.c"),
        join("src", "multiarray", "item_selection.c"),
        join("src", "multiarray", "iterators.c"),
        join("src", "multiarray", "legacy_dtype_implementation.c"),
        join("src", "multiarray", "lowlevel_strided_loops.c.src"),
        join("src", "multiarray", "mapping.c"),
        join("src", "multiarray", "methods.c"),
        join("src", "multiarray", "multiarraymodule.c"),
        join("src", "multiarray", "nditer_templ.c.src"),
        join("src", "multiarray", "nditer_api.c"),
        join("src", "multiarray", "nditer_constr.c"),
        join("src", "multiarray", "nditer_pywrap.c"),
        join("src", "multiarray", "number.c"),
        join("src", "multiarray", "refcount.c"),
        join("src", "multiarray", "sequence.c"),
        join("src", "multiarray", "shape.c"),
        join("src", "multiarray", "scalarapi.c"),
        join("src", "multiarray", "scalartypes.c.src"),
        join("src", "multiarray", "strfuncs.c"),
        join("src", "multiarray", "temp_elide.c"),
        join("src", "multiarray", "typeinfo.c"),
        join("src", "multiarray", "usertypes.c"),
        join("src", "multiarray", "vdot.c"),
        join("src", "common", "npy_sort.h.src"),
        join("src", "npysort", "quicksort.c.src"),
        join("src", "npysort", "mergesort.c.src"),
        join("src", "npysort", "timsort.c.src"),
        join("src", "npysort", "heapsort.c.src"),
        join("src", "npysort", "radixsort.c.src"),
        join("src", "common", "npy_partition.h.src"),
        join("src", "npysort", "selection.c.src"),
        join("src", "common", "npy_binsearch.h.src"),
        join("src", "npysort", "binsearch.c.src"),
    ]

    #######################################################################
    #             _multiarray_umath module - umath part                   #
    #######################################################################

    def generate_umath_c(ext, build_dir):
        target = join(build_dir, header_dir, "__umath_generated.c")
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        script = generate_umath_py
        if newer(script, target):
            with open(target, "w") as f:
                f.write(
                    generate_umath.make_code(generate_umath.defdict,
                                             generate_umath.__file__))
        return []

    umath_src = [
        join("src", "umath", "umathmodule.c"),
        join("src", "umath", "reduction.c"),
        join("src", "umath", "funcs.inc.src"),
        join("src", "umath", "simd.inc.src"),
        join("src", "umath", "loops.h.src"),
        join("src", "umath", "loops.c.src"),
        join("src", "umath", "loops_unary_fp.dispatch.c.src"),
        join("src", "umath", "matmul.h.src"),
        join("src", "umath", "matmul.c.src"),
        join("src", "umath", "clip.h.src"),
        join("src", "umath", "clip.c.src"),
        join("src", "umath", "ufunc_object.c"),
        join("src", "umath", "extobj.c"),
        join("src", "umath", "scalarmath.c.src"),
        join("src", "umath", "ufunc_type_resolution.c"),
        join("src", "umath", "override.c"),
    ]

    umath_deps = [
        generate_umath_py,
        join("include", "numpy", "npy_math.h"),
        join("include", "numpy", "halffloat.h"),
        join("src", "multiarray", "common.h"),
        join("src", "multiarray", "number.h"),
        join("src", "common", "templ_common.h.src"),
        join("src", "umath", "simd.inc.src"),
        join("src", "umath", "override.h"),
        join(codegen_dir, "generate_ufunc_api.py"),
    ]

    config.add_extension(
        "_multiarray_umath",
        sources=multiarray_src + umath_src + common_src + [
            generate_config_h,
            generate_numpyconfig_h,
            generate_numpy_api,
            join(codegen_dir, "generate_numpy_api.py"),
            join("*.py"),
            generate_umath_c,
            generate_ufunc_api,
        ],
        depends=deps + multiarray_deps + umath_deps + common_deps,
        libraries=["npymath"],
        extra_info=extra_info,
    )

    #######################################################################
    #                        umath_tests module                           #
    #######################################################################

    config.add_extension(
        "_umath_tests",
        sources=[
            join("src", "umath", "_umath_tests.c.src"),
            join("src", "umath", "_umath_tests.dispatch.c"),
            join("src", "common", "npy_cpu_features.c.src"),
        ],
    )

    #######################################################################
    #                   custom rational dtype module                      #
    #######################################################################

    config.add_extension(
        "_rational_tests",
        sources=[join("src", "umath", "_rational_tests.c.src")])

    #######################################################################
    #                        struct_ufunc_test module                     #
    #######################################################################

    config.add_extension(
        "_struct_ufunc_tests",
        sources=[join("src", "umath", "_struct_ufunc_tests.c.src")],
    )

    #######################################################################
    #                        operand_flag_tests module                    #
    #######################################################################

    config.add_extension(
        "_operand_flag_tests",
        sources=[join("src", "umath", "_operand_flag_tests.c.src")],
    )

    #######################################################################
    #                        SIMD module                                  #
    #######################################################################

    config.add_extension(
        "_simd",
        sources=[
            join("src", "common", "npy_cpu_features.c.src"),
            join("src", "_simd", "_simd.c"),
            join("src", "_simd", "_simd_inc.h.src"),
            join("src", "_simd", "_simd_data.inc.src"),
            join("src", "_simd", "_simd.dispatch.c.src"),
        ],
        depends=[
            join("src", "common", "npy_cpu_dispatch.h"),
            join("src", "common", "simd", "simd.h"),
            join("src", "_simd", "_simd.h"),
            join("src", "_simd", "_simd_inc.h.src"),
            join("src", "_simd", "_simd_data.inc.src"),
            join("src", "_simd", "_simd_arg.inc"),
            join("src", "_simd", "_simd_convert.inc"),
            join("src", "_simd", "_simd_easyintrin.inc"),
            join("src", "_simd", "_simd_vector.inc"),
        ],
    )

    config.add_subpackage("tests")
    config.add_data_dir("tests/data")
    config.add_data_dir("tests/examples")
    config.add_data_files("*.pyi")

    config.make_svn_version_py()

    return config
예제 #43
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration

    config = Configuration(None, parent_package, top_path)
    config.set_options(ignore_setup_xxx_py=True,
                       assume_default_configuration=True,
                       delegate_options_to_subpackages=True,
                       quiet=True)

    config.add_subpackage('sfepy')

    main_scripts = [
        'phonon.py',
        'extractor.py',
        'homogen.py',
        'postproc.py',
        'probe.py',
        'run_tests.py',
        'simple.py',
        'test_install.py',
    ]

    aux_scripts = [
        'blockgen.py',
        'convert_mesh.py',
        'cylindergen.py',
        'edit_identifiers.py',
        'eval_ns_forms.py',
        'eval_tl_forms.py',
        'extract_edges.py',
        'extract_surface.py',
        'gen_gallery.py',
        'gen_iga_patch.py',
        'gen_lobatto1d_c.py',
        'gen_mesh_prev.py',
        'gen_release_notes.py',
        'gen_solver_table.py',
        'gen_term_table.py',
        'plot_condition_numbers.py',
        'plot_logs.py',
        'plot_mesh.py',
        'plot_quadratures.py',
        'plot_times.py',
        'save_basis.py',
        'show_authors.py',
        'show_mesh_info.py',
        'show_terms_use.py',
        'sync_module_docs.py',
        'tile_periodic_mesh.py',
    ]
    aux_scripts = [os.path.join('script', ii) for ii in aux_scripts]

    config.add_data_files(
        ('sfepy', ('VERSION', 'INSTALL', 'README.rst', 'LICENSE', 'AUTHORS',
                   'build_helpers.py', 'site_cfg_template.py', 'Makefile')))
    config.add_data_files(('sfepy/script', main_scripts))
    config.add_data_files(('sfepy/script', aux_scripts))

    config.add_data_dir(('sfepy/meshes', 'meshes'))
    config.add_data_dir(('sfepy/examples', 'examples'))
    config.add_data_dir(('sfepy/tests', 'tests'))

    config.get_version('sfepy/version.py')  # sets config.version

    return config
예제 #44
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.system_info import get_info, NotFoundError
    from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
    from scipy._build_utils import get_sgemv_fix, get_g77_abi_wrappers, split_fortran_files

    config = Configuration('linalg', parent_package, top_path)

    lapack_opt = get_info('lapack_opt')

    if not lapack_opt:
        raise NotFoundError('no lapack/blas resources found')

    atlas_version = ([
        v[3:-3]
        for k, v in lapack_opt.get('define_macros', []) if k == 'ATLAS_INFO'
    ] + [None])[0]
    if atlas_version:
        print(('ATLAS version: %s' % atlas_version))

    # fblas:
    sources = ['fblas.pyf.src']
    sources += get_g77_abi_wrappers(lapack_opt)
    sources += get_sgemv_fix(lapack_opt)

    config.add_extension('_fblas',
                         sources=sources,
                         depends=['fblas_l?.pyf.src'],
                         extra_info=lapack_opt)

    # flapack:
    sources = ['flapack.pyf.src']
    sources += get_g77_abi_wrappers(lapack_opt)

    config.add_extension('_flapack',
                         sources=sources,
                         depends=['flapack_user.pyf.src'],
                         extra_info=lapack_opt)

    if atlas_version is not None:
        # cblas:
        config.add_extension('_cblas',
                             sources=['cblas.pyf.src'],
                             depends=['cblas.pyf.src', 'cblas_l1.pyf.src'],
                             extra_info=lapack_opt)

        # clapack:
        config.add_extension('_clapack',
                             sources=['clapack.pyf.src'],
                             depends=['clapack.pyf.src'],
                             extra_info=lapack_opt)

    # _flinalg:
    config.add_extension('_flinalg',
                         sources=[join('src', 'det.f'),
                                  join('src', 'lu.f')],
                         extra_info=lapack_opt)

    # _interpolative:
    routines_to_split = [
        'dfftb1',
        'dfftf1',
        'dffti1',
        'dsint1',
        'dzfft1',
        'id_srand',
        'idd_copyints',
        'idd_id2svd0',
        'idd_pairsamps',
        'idd_permute',
        'idd_permuter',
        'idd_random_transf0',
        'idd_random_transf0_inv',
        'idd_random_transf_init0',
        'idd_subselect',
        'iddp_asvd0',
        'iddp_rsvd0',
        'iddr_asvd0',
        'iddr_rsvd0',
        'idz_estrank0',
        'idz_id2svd0',
        'idz_permute',
        'idz_permuter',
        'idz_random_transf0_inv',
        'idz_random_transf_init0',
        'idz_random_transf_init00',
        'idz_realcomp',
        'idz_realcomplex',
        'idz_reco',
        'idz_subselect',
        'idzp_aid0',
        'idzp_aid1',
        'idzp_asvd0',
        'idzp_rsvd0',
        'idzr_asvd0',
        'idzr_reco',
        'idzr_rsvd0',
        'zfftb1',
        'zfftf1',
        'zffti1',
    ]
    print('Splitting linalg.interpolative Fortran source files')
    fnames = split_fortran_files(
        join(
            os.path.split(os.path.abspath(__file__))[0], 'src', 'id_dist',
            'src'), routines_to_split)
    fnames = [join('src', 'id_dist', 'src', f) for f in fnames]
    config.add_extension('_interpolative',
                         fnames + ["interpolative.pyf"],
                         extra_info=lapack_opt)

    # _calc_lwork:
    config.add_extension('_calc_lwork', [join('src', 'calc_lwork.f')],
                         extra_info=lapack_opt)

    # _solve_toeplitz:
    config.add_extension('_solve_toeplitz',
                         sources=[('_solve_toeplitz.c')],
                         include_dirs=[get_numpy_include_dirs()])

    config.add_data_dir('tests')
    config.add_data_dir('benchmarks')
    return config
예제 #45
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    config = Configuration(None, parent_package, top_path)
    config.set_options(ignore_setup_xxx_py=True,
                       assume_default_configuration=True,
                       delegate_options_to_subpackages=True,
                       quiet=True)

    config.add_data_files(('compmech', 'LICENSE'))
    config.add_data_files(('compmech', 'README.rst'))
    config.add_data_files(('compmech', 'ROADMAP.rst'))
    config.add_data_files(('compmech', 'setup.cfg'))
    config.add_data_files(('compmech', 'setup.py'))

    if 'bdist_wheel' in sys.argv[1:]:
        includedir = join(get_python_lib(), 'compmech', 'include')
        libdir = join(get_python_lib(), 'compmech', 'lib')
        if not (os.path.isdir(includedir) and os.path.isdir(libdir)):
            raise RuntimeError('Need to run first: python setup.py install')
        config.add_data_dir(('compmech/include', includedir))
        config.add_data_dir(('compmech/lib', libdir))
        config.add_data_dir(('compmech/theory', 'theory'))
        config.add_data_dir(('compmech/doc', 'doc/build/html'))
    elif sys.argv[1] in ('bdist', 'sdist'):
        config.add_data_dir('compmech/include')
        config.add_data_dir('compmech/lib')

    config.add_subpackage('compmech')

    config.get_version('compmech/__version__.py')

    return config
예제 #46
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info
    config = Configuration('integrate', parent_package, top_path)

    # Get a local copy of lapack_opt_info
    lapack_opt = dict(get_info('lapack_opt',notfound_action=2))
    # Pop off the libraries list so it can be combined with
    # additional required libraries
    lapack_libs = lapack_opt.pop('libraries', [])

    mach_src = [join('mach','*.f')]
    quadpack_src = [join('quadpack','*.f')]
    odepack_src = [join('odepack','*.f')]
    dop_src = [join('dop','*.f')]
    quadpack_test_src = [join('tests','_test_multivariate.c')]
    odeint_banded_test_src = [join('tests', 'banded5x5.f')]

    config.add_library('mach', sources=mach_src,
                       config_fc={'noopt':(__file__,1)})
    config.add_library('quadpack', sources=quadpack_src)
    config.add_library('odepack', sources=odepack_src)
    config.add_library('dop', sources=dop_src)

    # Extensions
    # quadpack:
    include_dirs = [join(os.path.dirname(__file__), '..', '_lib', 'src')]
    if 'include_dirs' in lapack_opt:
        lapack_opt = dict(lapack_opt)
        include_dirs.extend(lapack_opt.pop('include_dirs'))

    config.add_extension('_quadpack',
                         sources=['_quadpackmodule.c'],
                         libraries=(['quadpack', 'mach'] + lapack_libs),
                         depends=(['quadpack.h','__quadpack.h']
                                  + quadpack_src + mach_src),
                         include_dirs=include_dirs,
                         **lapack_opt)

    # odepack
    odepack_libs = ['odepack','mach'] + lapack_libs

    odepack_opts = lapack_opt.copy()
    odepack_opts.update(numpy_nodepr_api)
    config.add_extension('_odepack',
                         sources=['_odepackmodule.c'],
                         libraries=odepack_libs,
                         depends=(odepack_src + mach_src),
                         **odepack_opts)

    # vode
    config.add_extension('vode',
                         sources=['vode.pyf'],
                         libraries=odepack_libs,
                         depends=(odepack_src
                                  + mach_src),
                         **lapack_opt)

    # lsoda
    config.add_extension('lsoda',
                         sources=['lsoda.pyf'],
                         libraries=odepack_libs,
                         depends=(odepack_src
                                  + mach_src),
                         **lapack_opt)

    # dop
    config.add_extension('_dop',
                         sources=['dop.pyf'],
                         libraries=['dop'],
                         depends=dop_src)

    config.add_extension('_test_multivariate',
                         sources=quadpack_test_src)

    # Fortran+f2py extension module for testing odeint.
    config.add_extension('_test_odeint_banded',
                         sources=odeint_banded_test_src,
                         libraries=odepack_libs,
                         depends=(odepack_src + mach_src),
                         **lapack_opt)

    config.add_data_dir('tests')
    return config
예제 #47
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info
    config = Configuration('optimize',parent_package, top_path)

    minpack_src = [join('minpack','*f')]
    config.add_library('minpack',sources=minpack_src)
    config.add_extension('_minpack',
                         sources=['_minpackmodule.c'],
                         libraries=['minpack'],
                         depends=(["minpack.h","__minpack.h"]
                                  + minpack_src),
                         **numpy_nodepr_api)

    rootfind_src = [join('Zeros','*.c')]
    rootfind_hdr = [join('Zeros','zeros.h')]
    config.add_library('rootfind',
                       sources=rootfind_src,
                       headers=rootfind_hdr,
                         **numpy_nodepr_api)

    config.add_extension('_zeros',
                         sources=['zeros.c'],
                         libraries=['rootfind'],
                         depends=(rootfind_src + rootfind_hdr),
                         **numpy_nodepr_api)

    lapack = get_info('lapack_opt')
    if 'define_macros' in numpy_nodepr_api:
        if ('define_macros' in lapack) and (lapack['define_macros'] is not None):
            lapack['define_macros'] = (lapack['define_macros'] +
                                       numpy_nodepr_api['define_macros'])
        else:
            lapack['define_macros'] = numpy_nodepr_api['define_macros']
    sources = ['lbfgsb.pyf', 'lbfgsb.f', 'linpack.f', 'timer.f']
    config.add_extension('_lbfgsb',
                         sources=[join('lbfgsb',x) for x in sources],
                         **lapack)

    sources = ['moduleTNC.c','tnc.c']
    config.add_extension('moduleTNC',
                         sources=[join('tnc',x) for x in sources],
                         depends=[join('tnc','tnc.h')],
                         **numpy_nodepr_api)

    config.add_extension('_cobyla',
                         sources=[join('cobyla',x) for x in ['cobyla.pyf',
                                                             'cobyla2.f',
                                                             'trstlp.f']],
                         **numpy_nodepr_api)

    sources = ['minpack2.pyf', 'dcsrch.f', 'dcstep.f']
    config.add_extension('minpack2',
                         sources=[join('minpack2',x) for x in sources],
                         **numpy_nodepr_api)

    sources = ['slsqp.pyf', 'slsqp_optmz.f']
    config.add_extension('_slsqp', sources=[join('slsqp', x) for x in sources],
                         **numpy_nodepr_api)

    config.add_extension('_nnls', sources=[join('nnls', x)
                                          for x in ["nnls.f","nnls.pyf"]],
                         **numpy_nodepr_api)

    config.add_extension('_group_columns', sources=['_group_columns.c'],)

    config.add_subpackage('_lsq')
    
    config.add_subpackage('_trlib')

    config.add_subpackage('_trustregion_constr')

    config.add_data_dir('tests')

    # Add license files
    config.add_data_files('lbfgsb/README')

    return config
예제 #48
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    from scipy._build_utils.system_info import get_info
    from scipy._build_utils import (gfortran_legacy_flag_hook,
                                    blas_ilp64_pre_build_hook, combine_dict,
                                    uses_blas64, get_f2py_int64_options)
    from scipy._build_utils.compiler_helper import (set_cxx_flags_hook,
                                                    set_cxx_flags_clib_hook,
                                                    set_c_flags_hook)

    config = Configuration('optimize', parent_package, top_path)

    include_dirs = [join(os.path.dirname(__file__), '..', '_lib', 'src')]

    minpack_src = [join('minpack', '*f')]
    config.add_library('minpack', sources=minpack_src)
    config.add_extension('_minpack',
                         sources=['_minpackmodule.c'],
                         libraries=['minpack'],
                         depends=(["minpack.h", "__minpack.h"] + minpack_src),
                         include_dirs=include_dirs,
                         **numpy_nodepr_api)

    config.add_library('rectangular_lsap',
                       sources='rectangular_lsap/rectangular_lsap.cpp',
                       headers='rectangular_lsap/rectangular_lsap.h',
                       _pre_build_hook=set_cxx_flags_clib_hook)
    _lsap = config.add_extension('_lsap_module',
                                 sources=['_lsap_module.c'],
                                 libraries=['rectangular_lsap'],
                                 depends=([
                                     'rectangular_lsap/rectangular_lsap.cpp',
                                     'rectangular_lsap/rectangular_lsap.h'
                                 ]),
                                 include_dirs=include_dirs,
                                 **numpy_nodepr_api)
    _lsap._pre_build_hook = set_c_flags_hook

    rootfind_src = [join('Zeros', '*.c')]
    rootfind_hdr = [join('Zeros', 'zeros.h')]
    config.add_library('rootfind',
                       sources=rootfind_src,
                       headers=rootfind_hdr,
                       **numpy_nodepr_api)

    config.add_extension('_zeros',
                         sources=['zeros.c'],
                         libraries=['rootfind'],
                         depends=(rootfind_src + rootfind_hdr),
                         **numpy_nodepr_api)

    if uses_blas64():
        lapack = get_info('lapack_ilp64_opt')
        f2py_options = get_f2py_int64_options()
        pre_build_hook = blas_ilp64_pre_build_hook(lapack)
    else:
        lapack = get_info('lapack_opt')
        f2py_options = None
        pre_build_hook = None

    lapack = combine_dict(lapack, numpy_nodepr_api)

    sources = ['lbfgsb.pyf', 'lbfgsb.f', 'linpack.f', 'timer.f']
    ext = config.add_extension(
        '_lbfgsb',
        sources=[join('lbfgsb_src', x) for x in sources],
        f2py_options=f2py_options,
        **lapack)
    ext._pre_build_hook = pre_build_hook

    sources = ['moduleTNC.c', 'tnc.c']
    config.add_extension('moduleTNC',
                         sources=[join('tnc', x) for x in sources],
                         depends=[join('tnc', 'tnc.h')],
                         **numpy_nodepr_api)

    config.add_extension('_cobyla',
                         sources=[
                             join('cobyla', x)
                             for x in ['cobyla.pyf', 'cobyla2.f', 'trstlp.f']
                         ],
                         **numpy_nodepr_api)

    sources = ['minpack2.pyf', 'dcsrch.f', 'dcstep.f']
    config.add_extension('minpack2',
                         sources=[join('minpack2', x) for x in sources],
                         **numpy_nodepr_api)

    sources = ['slsqp.pyf', 'slsqp_optmz.f']
    ext = config.add_extension('_slsqp',
                               sources=[join('slsqp', x) for x in sources],
                               **numpy_nodepr_api)
    ext._pre_build_hook = gfortran_legacy_flag_hook

    config.add_data_files('__nnls.pyi')
    ext = config.add_extension(
        '__nnls',
        sources=[join('__nnls', x) for x in ["nnls.f", "nnls.pyf"]],
        **numpy_nodepr_api)
    ext._pre_build_hook = gfortran_legacy_flag_hook

    if int(os.environ.get('SCIPY_USE_PYTHRAN', 0)):
        import pythran
        ext = pythran.dist.PythranExtension(
            'scipy.optimize._group_columns',
            sources=["scipy/optimize/_group_columns.py"],
            config=['compiler.blas=none'])
        config.ext_modules.append(ext)
    else:
        config.add_extension(
            '_group_columns',
            sources=['_group_columns.c'],
        )

    config.add_extension('_bglu_dense', sources=['_bglu_dense.c'])

    config.add_subpackage('_lsq')

    config.add_subpackage('_trlib')

    config.add_subpackage('_trustregion_constr')

    # Cython optimize API for zeros functions
    config.add_subpackage('cython_optimize')
    config.add_data_files('cython_optimize.pxd')
    config.add_data_files(os.path.join('cython_optimize', '*.pxd'))
    config.add_extension('cython_optimize._zeros',
                         sources=[os.path.join('cython_optimize', '_zeros.c')])

    config.add_subpackage('_shgo_lib')
    config.add_data_dir('_shgo_lib')

    # HiGHS linear programming libraries and extensions
    config.add_subpackage('_highs')

    config.add_data_dir('tests')

    # Add license files
    config.add_data_files('lbfgsb_src/README')

    return config
예제 #49
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, dot_join
    from numpy.distutils.command.scons import get_scons_pkg_build_dir
    from numpy.distutils.system_info import get_info, default_lib_dirs

    config = Configuration('core', parent_package, top_path)
    local_dir = config.local_path

    header_dir = 'include/numpy'  # this is relative to config.path_in_package

    config.add_subpackage('code_generators')

    # List of files to register to numpy.distutils
    dot_blas_src = [join('blasdot', '_dotblas.c'), join('blasdot', 'cblas.h')]
    api_definition = [
        join('code_generators', 'array_api_order.txt'),
        join('code_generators', 'multiarray_api_order.txt'),
        join('code_generators', 'ufunc_api_order.txt')
    ]
    core_src = [
        join('src', basename(i))
        for i in glob.glob(join(local_dir, 'src', '*.c'))
    ]
    core_src += [
        join('src', basename(i))
        for i in glob.glob(join(local_dir, 'src', '*.src'))
    ]

    source_files = dot_blas_src + api_definition + core_src + \
                   [join(header_dir, 'numpyconfig.h.in')]

    # Add generated files to distutils...
    def add_config_header():
        scons_build_dir = get_scons_build_dir()
        # XXX: I really have to think about how to communicate path info
        # between scons and distutils, and set the options at one single
        # location.
        target = join(get_scons_pkg_build_dir(config.name), 'config.h')
        incl_dir = os.path.dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)

    def add_numpyconfig_header():
        scons_build_dir = get_scons_build_dir()
        # XXX: I really have to think about how to communicate path info
        # between scons and distutils, and set the options at one single
        # location.
        target = join(get_scons_pkg_build_dir(config.name),
                      'include/numpy/numpyconfig.h')
        incl_dir = os.path.dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)
        config.add_data_files((header_dir, target))

    def add_array_api():
        scons_build_dir = get_scons_build_dir()
        # XXX: I really have to think about how to communicate path info
        # between scons and distutils, and set the options at one single
        # location.
        h_file = join(get_scons_pkg_build_dir(config.name),
                      '__multiarray_api.h')
        t_file = join(get_scons_pkg_build_dir(config.name),
                      'multiarray_api.txt')
        config.add_data_files((header_dir, h_file), (header_dir, t_file))

    def add_ufunc_api():
        scons_build_dir = get_scons_build_dir()
        # XXX: I really have to think about how to communicate path info
        # between scons and distutils, and set the options at one single
        # location.
        h_file = join(get_scons_pkg_build_dir(config.name), '__ufunc_api.h')
        t_file = join(get_scons_pkg_build_dir(config.name), 'ufunc_api.txt')
        config.add_data_files((header_dir, h_file), (header_dir, t_file))

    def add_generated_files(*args, **kw):
        add_config_header()
        add_numpyconfig_header()
        add_array_api()
        add_ufunc_api()

    config.add_sconscript('SConstruct',
                          post_hook=add_generated_files,
                          source_files=source_files)

    config.add_data_files('include/numpy/*.h')
    config.add_include_dirs('src')

    config.numpy_include_dirs.extend(config.paths('include'))

    # Don't install fenv unless we need them.
    if sys.platform == 'cygwin':
        config.add_data_dir('include/numpy/fenv')

    config.add_data_dir('tests')
    config.make_svn_version_py()

    return config
예제 #50
0
파일: setup.py 프로젝트: yacth/scipy
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
    from numpy.distutils.misc_util import get_info as get_misc_info
    from scipy._build_utils.system_info import get_info
    from scipy._build_utils import combine_dict, uses_blas64, numpy_nodepr_api
    from scipy._build_utils.compiler_helper import set_cxx_flags_hook
    from distutils.sysconfig import get_python_inc
    import pybind11

    config = Configuration('spatial', parent_package, top_path)

    config.add_data_dir('tests')

    # spatial.transform
    config.add_subpackage('transform')

    # qhull
    qhull_src = sorted(
        glob.glob(join(dirname(__file__), 'qhull_src', 'src', '*.c')))

    inc_dirs = [get_python_inc()]
    if inc_dirs[0] != get_python_inc(plat_specific=1):
        inc_dirs.append(get_python_inc(plat_specific=1))
    inc_dirs.append(get_numpy_include_dirs())
    inc_dirs.append(join(dirname(dirname(__file__)), '_lib'))
    inc_dirs.append(join(dirname(dirname(__file__)), '_build_utils', 'src'))

    if uses_blas64():
        lapack_opt = get_info('lapack_ilp64_opt')
    else:
        lapack_opt = get_info('lapack_opt')

    cfg = combine_dict(lapack_opt, include_dirs=inc_dirs)
    config.add_extension('qhull',
                         sources=['qhull.c', 'qhull_misc.c'] + qhull_src,
                         **cfg)

    # cKDTree
    ckdtree_src = [
        'query.cxx', 'build.cxx', 'query_pairs.cxx', 'count_neighbors.cxx',
        'query_ball_point.cxx', 'query_ball_tree.cxx', 'sparse_distances.cxx'
    ]

    ckdtree_src = [join('ckdtree', 'src', x) for x in ckdtree_src]

    ckdtree_headers = [
        'ckdtree_decl.h', 'coo_entries.h', 'distance_base.h', 'distance.h',
        'ordered_pair.h', 'rectangle.h'
    ]

    ckdtree_headers = [join('ckdtree', 'src', x) for x in ckdtree_headers]

    ckdtree_dep = ['ckdtree.cxx'] + ckdtree_headers + ckdtree_src
    ext = config.add_extension('ckdtree',
                               sources=['ckdtree.cxx'] + ckdtree_src,
                               depends=ckdtree_dep,
                               include_dirs=inc_dirs +
                               [join('ckdtree', 'src')])
    ext._pre_build_hook = set_cxx_flags_hook

    # _distance_wrap
    config.add_extension('_distance_wrap',
                         sources=[join('src', 'distance_wrap.c')],
                         depends=[join('src', 'distance_impl.h')],
                         include_dirs=[
                             get_numpy_include_dirs(),
                             join(dirname(dirname(__file__)), '_lib')
                         ],
                         extra_info=get_misc_info("npymath"),
                         **numpy_nodepr_api)

    distance_pybind_includes = [
        pybind11.get_include(True),
        pybind11.get_include(False),
        get_numpy_include_dirs()
    ]
    ext = config.add_extension('_distance_pybind',
                               sources=[join('src', 'distance_pybind.cpp')],
                               depends=[
                                   join('src', 'function_ref.h'),
                                   join('src', 'views.h'),
                                   join('src', 'distance_metrics.h')
                               ],
                               include_dirs=distance_pybind_includes,
                               language='c++',
                               **numpy_nodepr_api)
    ext._pre_build_hook = pre_build_hook

    config.add_extension('_voronoi', sources=['_voronoi.c'])

    config.add_extension('_hausdorff', sources=['_hausdorff.c'])

    # Add license files
    config.add_data_files('qhull_src/COPYING.txt')

    # Type stubs
    config.add_data_files('*.pyi')

    return config
예제 #51
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    from scipy._build_utils.system_info import get_info
    config = Configuration('optimize', parent_package, top_path)

    include_dirs = [join(os.path.dirname(__file__), '..', '_lib', 'src')]

    minpack_src = [join('minpack', '*f')]
    config.add_library('minpack', sources=minpack_src)
    config.add_extension('_minpack',
                         sources=['_minpackmodule.c'],
                         libraries=['minpack'],
                         depends=(["minpack.h", "__minpack.h"] + minpack_src),
                         include_dirs=include_dirs,
                         **numpy_nodepr_api)

    config.add_library('rectangular_lsap',
                       sources='rectangular_lsap/rectangular_lsap.cpp',
                       headers='rectangular_lsap/rectangular_lsap.h')
    config.add_extension('_lsap_module',
                         sources=['_lsap_module.c'],
                         libraries=['rectangular_lsap'],
                         depends=([
                             'rectangular_lsap/rectangular_lsap.cpp',
                             'rectangular_lsap/rectangular_lsap.h'
                         ]),
                         include_dirs=include_dirs,
                         **numpy_nodepr_api)

    rootfind_src = [join('Zeros', '*.c')]
    rootfind_hdr = [join('Zeros', 'zeros.h')]
    config.add_library('rootfind',
                       sources=rootfind_src,
                       headers=rootfind_hdr,
                       **numpy_nodepr_api)

    config.add_extension('_zeros',
                         sources=['zeros.c'],
                         libraries=['rootfind'],
                         depends=(rootfind_src + rootfind_hdr),
                         **numpy_nodepr_api)

    lapack = get_info('lapack_opt')
    if 'define_macros' in numpy_nodepr_api:
        if ('define_macros' in lapack) and (lapack['define_macros']
                                            is not None):
            lapack['define_macros'] = (lapack['define_macros'] +
                                       numpy_nodepr_api['define_macros'])
        else:
            lapack['define_macros'] = numpy_nodepr_api['define_macros']
    sources = ['lbfgsb.pyf', 'lbfgsb.f', 'linpack.f', 'timer.f']
    config.add_extension('_lbfgsb',
                         sources=[join('lbfgsb_src', x) for x in sources],
                         **lapack)

    sources = ['moduleTNC.c', 'tnc.c']
    config.add_extension('moduleTNC',
                         sources=[join('tnc', x) for x in sources],
                         depends=[join('tnc', 'tnc.h')],
                         **numpy_nodepr_api)

    config.add_extension('_cobyla',
                         sources=[
                             join('cobyla', x)
                             for x in ['cobyla.pyf', 'cobyla2.f', 'trstlp.f']
                         ],
                         **numpy_nodepr_api)

    sources = ['minpack2.pyf', 'dcsrch.f', 'dcstep.f']
    config.add_extension('minpack2',
                         sources=[join('minpack2', x) for x in sources],
                         **numpy_nodepr_api)

    sources = ['slsqp.pyf', 'slsqp_optmz.f']
    config.add_extension('_slsqp',
                         sources=[join('slsqp', x) for x in sources],
                         **numpy_nodepr_api)

    config.add_extension(
        '_nnls',
        sources=[join('nnls', x) for x in ["nnls.f", "nnls.pyf"]],
        **numpy_nodepr_api)

    config.add_extension(
        '_group_columns',
        sources=['_group_columns.c'],
    )

    config.add_extension('_bglu_dense', sources=['_bglu_dense.c'])

    config.add_subpackage('_lsq')

    config.add_subpackage('_trlib')

    config.add_subpackage('_trustregion_constr')

    # Cython optimize API for zeros functions
    config.add_subpackage('cython_optimize')
    config.add_data_files('cython_optimize.pxd')
    config.add_data_files(os.path.join('cython_optimize', '*.pxd'))
    config.add_extension('cython_optimize._zeros',
                         sources=[os.path.join('cython_optimize', '_zeros.c')])

    config.add_subpackage('_shgo_lib')
    config.add_data_dir('_shgo_lib')

    config.add_data_dir('tests')

    # Add license files
    config.add_data_files('lbfgsb_src/README')

    return config
예제 #52
0
def configuration(parent_package='', top_path=None):
    config = Configuration('f2py', parent_package, top_path)
    config.add_data_dir('tests')
    config.add_data_files('src/fortranobject.c', 'src/fortranobject.h')
    return config
예제 #53
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import dict_append, get_info

    agg_dir = 'agg-24'
    agg_lib = 'agg24_src'

    config = Configuration('agg', parent_package,top_path)
    numerix_info = get_info('numerix')

    if ('NUMPY', None) in numerix_info.get('define_macros',[]):
        dict_append(numerix_info,
                    define_macros = [('PY_ARRAY_TYPES_PREFIX','NUMPY_CXX'),
                                     ('OWN_DIMENSIONS','0'),
                                     ('OWN_STRIDES','0')])

    #-------------------------------------------------------------------------
    # Configure the Agg backend to use on each platform
    #-------------------------------------------------------------------------
    if sys.platform=='win32':
        plat = 'win32'
    elif sys.platform == 'darwin':
        plat = 'gl'
    else:
        #plat = 'gtk1'  # use with gtk1, it's fast
        plat = 'x11'  # use with gtk2, it's slow but reliable
        #plat = 'gdkpixbuf2'


    #-------------------------------------------------------------------------
    # Add the freetype library (agg 2.4 links against this)
    #-------------------------------------------------------------------------

    prefix = config.paths('freetype2/src')[0]
    freetype_lib = 'freetype2_src'

    def get_ft2_sources(name_info, build_dir):
        (lib_name, build_info) = name_info
        sources = [prefix + "/" + s for s in freetype2_sources]
        if sys.platform=='darwin':
            return sources[:]
        return sources[:-1]

    ft2_incl_dirs = ['freetype2/src/' + s for s in freetype2_dirs] \
                    + ['freetype2/include', 'freetype2/src']
    ft2_incl_dirs = config.paths(*ft2_incl_dirs)
    if sys.platform == 'darwin' and '64bit' not in platform.architecture():
        ft2_incl_dirs.append("/Developer/Headers/FlatCarbon")

    config.add_library(freetype_lib,
                       sources = [get_ft2_sources],
                       include_dirs = ft2_incl_dirs,

                       # This macro was introduced in Freetype 2.2; if it is
                       # not defined, then the ftheader.h file (one of the
                       # primary headers) won't pull in any additional internal
                       # Freetype headers, and the library will mysteriously
                       # fail to build.
                       macros = [("FT2_BUILD_LIBRARY", None)],

                       depends = ['freetype2'],
                       )

    #-------------------------------------------------------------------------
    # Add the Agg sources
    #-------------------------------------------------------------------------

    agg_include_dirs = [agg_dir+'/include',agg_dir+'/font_freetype'] + \
                                   ft2_incl_dirs
    agg_sources = [agg_dir+'/src/*.cpp',
                    agg_dir+'/font_freetype/*.cpp']
    config.add_library(agg_lib,
                       agg_sources,
                       include_dirs = agg_include_dirs,
                       depends = [agg_dir])

    #-------------------------------------------------------------------------
    # Add the Kiva sources
    #-------------------------------------------------------------------------
    if sys.platform == 'darwin':
        define_macros = [('__DARWIN__', None)]
        macros = [('__DARWIN__', None)]
        extra_link_args = ['-framework', 'Carbon']
    else:
        define_macros = []
        macros = []
        extra_link_args = []

    kiva_include_dirs = ['src'] + agg_include_dirs
    config.add_library('kiva_src',
                       ['src/kiva_*.cpp', 'src/gl_graphics_context.cpp'],
                       include_dirs = kiva_include_dirs,
                       # Use "macros" instead of "define_macros" because the
                       # latter is only used for extensions, and not clibs
                       macros = macros,
                       )

    # MSVC6.0: uncomment to handle template parameters:
    #extra_compile_args = ['/Zm1000']
    extra_compile_args = []

    # XXX: test whether numpy has weakref support

    #-------------------------------------------------------------------------
    # Build the extension itself
    #-------------------------------------------------------------------------

    # Check for g++ < 4.0 on 64-bit Linux
    use_32bit_workaround = False

    if sys.platform == 'linux2' and '64bit' in platform.architecture():
        f = os.popen("g++ --version")
        line0 = f.readline()
        f.close()
        m = re.match(r'.+?\s([3-5])\.\d+', line0)
        if m is not None and int(m.group(1)) < 4:
            use_32bit_workaround = True

    # Enable workaround of agg bug on 64-bit machines with g++ < 4.0
    if use_32bit_workaround:
        define_macros.append(("ALWAYS_32BIT_WORKAROUND", 1))

    # Options to make OS X link OpenGL
    if '64bit' not in platform.architecture():
        darwin_frameworks = ['Carbon', 'ApplicationServices', 'OpenGL']
    else:
        darwin_frameworks = ['ApplicationServices', 'OpenGL']    
    darwin_opengl_opts = dict(
            include_dirs = [
              '/System/Library/Frameworks/%s.framework/Versions/A/Headers' % x
              for x in darwin_frameworks],
            define_macros = [('__DARWIN__',None)],
            extra_link_args = ['-framework %s' % x for x in darwin_frameworks]
            )

    build_info = {}
    kiva_lib = 'kiva_src'
    build_libraries = [kiva_lib, agg_lib, freetype_lib]
    if sys.platform == "win32":
        build_libraries += ["opengl32", "glu32"]
    elif sys.platform == "darwin":
        dict_append(build_info, **darwin_opengl_opts)
    else:
        # This should work for most linuxes (linuces?)
        build_libraries += ["GL", "GLU"]
    dict_append(build_info,
                sources = ['agg.i'],
                include_dirs = kiva_include_dirs,
                libraries = build_libraries,
                depends = ['src/*.[ih]'],
                extra_compile_args = extra_compile_args,
                extra_link_args = extra_link_args,
                define_macros=define_macros,
                )
    dict_append(build_info, **numerix_info)
    config.add_extension('_agg', **build_info)

    sources = [os.path.join('src',plat,'plat_support.i'),
               os.path.join('src',plat,'agg_bmp.cpp'),
               ]
    if plat != 'gl':
        sources.append(os.path.join('src',plat,'agg_platform_specific.cpp'))

    plat_info = {}
    dict_append(plat_info, libraries = [agg_lib],
                include_dirs = kiva_include_dirs,
                extra_compile_args = extra_compile_args,
                depends = ['src'])
    dict_append(plat_info, **numerix_info)

    if plat=='win32':
        dict_append(plat_info, libraries = ['gdi32','user32'])

    elif plat in ['x11','gtk1']:
        # Make sure we raise an error if the information is not found.
        # Frequently, the 64-bit libraries are not in a known location and need
        # manual configuration. From experience, this is usually not detected by
        # the builder if we do not raise an exception.
        x11_info = get_info('x11', notfound_action=2)
        dict_append(plat_info, **x11_info)

    elif plat=='gdkpixbuf2':
        #gdk_pixbuf_xlib_2 = get_info('gdk_pixbuf_xlib_2',notfound_action=1)
        #dict_append(plat_info,**gdk_pixbuf_xlib_2)
        gtk_info = get_info('gtk+-2.0')
        dict_append(plat_info, **gtk_info)
        #x11_info = get_info('x11',notfound_action=1)
        #dict_append(plat_info,**x11_info)

    elif plat == 'gl':
        if sys.platform == 'darwin':
            dict_append(plat_info, **darwin_opengl_opts)
        else:
            msg = "OpenGL build support only on MacOSX right now."
            raise NotImplementedError(msg)


    config.add_extension('_plat_support',
                         sources,
                         **plat_info
                         )

    config.add_data_dir('tests')
    config.add_data_files('*.txt', '*.bat')

    return config
예제 #54
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, get_mathlibs
    config = Configuration('random', parent_package, top_path)

    def generate_libraries(ext, build_dir):
        config_cmd = config.get_config_cmd()
        libs = get_mathlibs()
        if sys.platform == 'win32':
            libs.extend(['Advapi32', 'Kernel32'])
        ext.libraries.extend(libs)
        return None

    # enable unix large file support on 32 bit systems
    # (64 bit off_t, lseek -> lseek64 etc.)
    if sys.platform[:3] == "aix":
        defs = [('_LARGE_FILES', None)]
    else:
        defs = [('_FILE_OFFSET_BITS', '64'), ('_LARGEFILE_SOURCE', '1'),
                ('_LARGEFILE64_SOURCE', '1')]

    defs.append(('NPY_NO_DEPRECATED_API', 0))
    config.add_data_dir('tests')
    config.add_data_dir('examples')

    EXTRA_LINK_ARGS = []
    # Math lib
    EXTRA_LIBRARIES = ['m'] if os.name != 'nt' else []
    # Some bit generators exclude GCC inlining
    EXTRA_COMPILE_ARGS = ['-U__GNUC_GNU_INLINE__']

    if is_msvc and platform_bits == 32:
        # 32-bit windows requires explicit sse2 option
        EXTRA_COMPILE_ARGS += ['/arch:SSE2']
    elif not is_msvc:
        # Some bit generators require c99
        EXTRA_COMPILE_ARGS += ['-std=c99']

    # Use legacy integer variable sizes
    LEGACY_DEFS = [('NP_RANDOM_LEGACY', '1')]
    PCG64_DEFS = []
    # One can force emulated 128-bit arithmetic if one wants.
    #PCG64_DEFS += [('PCG_FORCE_EMULATED_128BIT_MATH', '1')]

    for gen in ['mt19937']:
        # gen.pyx, src/gen/gen.c, src/gen/gen-jump.c
        config.add_extension(
            '_{0}'.format(gen),
            sources=[
                '_{0}.c'.format(gen), 'src/{0}/{0}.c'.format(gen),
                'src/{0}/{0}-jump.c'.format(gen)
            ],
            include_dirs=['.', 'src', join('src', gen)],
            libraries=EXTRA_LIBRARIES,
            extra_compile_args=EXTRA_COMPILE_ARGS,
            extra_link_args=EXTRA_LINK_ARGS,
            depends=['_%s.pyx' % gen],
            define_macros=defs,
        )
    for gen in ['philox', 'pcg64', 'sfc64']:
        # gen.pyx, src/gen/gen.c
        _defs = defs + PCG64_DEFS if gen == 'pcg64' else defs
        config.add_extension(
            '_{0}'.format(gen),
            sources=['_{0}.c'.format(gen), 'src/{0}/{0}.c'.format(gen)],
            include_dirs=['.', 'src', join('src', gen)],
            libraries=EXTRA_LIBRARIES,
            extra_compile_args=EXTRA_COMPILE_ARGS,
            extra_link_args=EXTRA_LINK_ARGS,
            depends=[
                '_%s.pyx' % gen, 'bit_generator.pyx', 'bit_generator.pxd'
            ],
            define_macros=_defs,
        )
    for gen in ['_common', '_bit_generator']:
        # gen.pyx
        config.add_extension(
            gen,
            sources=['{0}.c'.format(gen)],
            libraries=EXTRA_LIBRARIES,
            extra_compile_args=EXTRA_COMPILE_ARGS,
            extra_link_args=EXTRA_LINK_ARGS,
            include_dirs=['.', 'src'],
            depends=[
                '%s.pyx' % gen,
                '%s.pxd' % gen,
            ],
            define_macros=defs,
        )
        config.add_data_files('{0}.pxd'.format(gen))
    other_srcs = [
        'src/distributions/logfactorial.c',
        'src/distributions/distributions.c',
        'src/distributions/random_mvhg_count.c',
        'src/distributions/random_mvhg_marginals.c',
        'src/distributions/random_hypergeometric.c',
    ]
    for gen in ['_generator', '_bounded_integers']:
        # gen.pyx, src/distributions/distributions.c
        config.add_extension(
            gen,
            sources=['{0}.c'.format(gen)] + other_srcs,
            libraries=EXTRA_LIBRARIES,
            extra_compile_args=EXTRA_COMPILE_ARGS,
            include_dirs=['.', 'src'],
            extra_link_args=EXTRA_LINK_ARGS,
            depends=['%s.pyx' % gen],
            define_macros=defs,
        )
    config.add_data_files('_bounded_inteters.pxd')
    config.add_extension(
        'mtrand',
        sources=[
            'mtrand.c', 'src/legacy/legacy-distributions.c',
            'src/distributions/logfactorial.c',
            'src/distributions/distributions.c'
        ],
        include_dirs=['.', 'src', 'src/legacy'],
        libraries=EXTRA_LIBRARIES,
        extra_compile_args=EXTRA_COMPILE_ARGS,
        extra_link_args=EXTRA_LINK_ARGS,
        depends=['mtrand.pyx'],
        define_macros=defs + LEGACY_DEFS,
    )
    return config
예제 #55
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration,dot_join
    from numpy.distutils.system_info import get_info, default_lib_dirs

    config = Configuration('core', parent_package, top_path)
    local_dir = config.local_path
    codegen_dir = join(local_dir, 'code_generators')

    if is_released(config):
        warnings.simplefilter('error', MismatchCAPIWarning)

    generate_umath_py = join(codegen_dir, 'generate_umath.py')
    n = dot_join(config.name, 'generate_umath')
    generate_umath = imp.load_module('_'.join(n.split('.')),
                                     open(generate_umath_py,'U'),
                                     generate_umath_py,
                                     ('.py','U',1))

    header_dir = 'include/numpy' # this is relative to config.path_in_package

    def generate_config_h(ext, build_dir):
        target = join(build_dir,header_dir,'config.h')
        d = dirname(target)
        if not exists(d):
            os.makedirs(d)

        if newer(__file__,target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s',target)
            moredefs = []

            # Check math library and C99 math funcs availability
            mathlibs = check_mathlib(config_cmd)
            moredefs.append(('MATHLIB',','.join(mathlibs)))

            # Signal check
            if is_npy_no_signal():
                moredefs.append('__NPY_PRIVATE_NO_SIGNAL')

            # Windows checks
            if sys.platform=='win32' or os.name=='nt':
                win32_checks(moredefs)

            # Inline check
            inline = config_cmd.check_inline()

            # Check whether we need our own wide character support
            if not config_cmd.check_decl('Py_UNICODE_WIDE',
                                         headers=['Python.h']):
                PYTHON_HAS_UNICODE_WIDE = True
            else:
                PYTHON_HAS_UNICODE_WIDE = False

            # Py3K check
            if sys.version_info[0] == 3:
                moredefs.append(('NPY_PY3K', 1))

            # Generate the config.h file from moredefs
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d,str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0],d[1]))

            # define inline to our keyword, or nothing
            target_f.write('#ifndef __cplusplus\n')
            if inline == 'inline':
                target_f.write('/* #undef inline */\n')
            else:
                target_f.write('#define inline %s\n' % inline)
            target_f.write('#endif\n')

            # add the guard to make sure config.h is never included directly,
            # but always through numpy_config.h
            target_f.write("""
#ifndef _NUMPY_CONFIG_H_
#error config.h should never be included directly, include numpy_config.h instead
#endif
""")

            target_f.close()
            print('File:',target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        else:
            mathlibs = []
            target_f = open(target)
            for line in target_f.readlines():
                s = '#define MATHLIB'
                if line.startswith(s):
                    value = line[len(s):].strip()
                    if value:
                        mathlibs.extend(value.split(','))
            target_f.close()

        # Ugly: this can be called within a library and not an extension,
        # in which case there is no libraries attributes (and none is
        # needed).
        if hasattr(ext, 'libraries'):
            ext.libraries.extend(mathlibs)

        incl_dir = dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)

        return target

    def generate_numpyconfig_h(ext, build_dir):
        """Depends on config.h: generate_config_h has to be called before !"""
        target = join(build_dir,header_dir,'_numpyconfig.h')
        d = dirname(target)
        if not exists(d):
            os.makedirs(d)
        if newer(__file__,target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s',target)
            moredefs = []

            if is_npy_no_signal():
                moredefs.append(('NPY_NO_SIGNAL', 1))

            if is_npy_no_smp():
                moredefs.append(('NPY_NO_SMP', 1))
            else:
                moredefs.append(('NPY_NO_SMP', 0))

            mathlibs = check_mathlib(config_cmd)

            # Check wether we can use inttypes (C99) formats
            if config_cmd.check_decl('PRIdPTR', headers = ['inttypes.h']):
                moredefs.append(('NPY_USE_C99_FORMATS', 1))

            # visibility check
            hidden_visibility = visibility_define(config_cmd)
            moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility))

            # Add the C API/ABI versions
            moredefs.append(('NUMPY_ABI_VERSION', '2.0.0'))
            moredefs.append(('NUMPY_API_VERSION', '2.0.0'))

            # Add moredefs to header
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d,str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0],d[1]))

            # Define __STDC_FORMAT_MACROS
            target_f.write("""
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS 1
#endif
""")
            target_f.close()

            # Dump the numpyconfig.h header to stdout
            print('File: %s' % target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        config.add_data_files((header_dir, target))
        return target

    def generate_api_func(module_name):
        def generate_api(ext, build_dir):
            script = join(codegen_dir, module_name + '.py')
            sys.path.insert(0, codegen_dir)
            try:
                m = __import__(module_name)
                log.info('executing %s', script)
                h_file, c_file, doc_file = m.generate_api(
                    join(build_dir, header_dir))
            finally:
                del sys.path[0]
            config.add_data_files((header_dir, h_file),
                                  (header_dir, doc_file))
            return (h_file,)
        return generate_api

    generate_numpy_api = generate_api_func('generate_numpy_api')
    generate_ufunc_api = generate_api_func('generate_ufunc_api')

    config.add_include_dirs(join(local_dir, "src", "private"))
    config.add_include_dirs(join(local_dir, "src"))
    config.add_include_dirs(join(local_dir))
    config.add_include_dirs(ndarray_include_dir())

    # Multiarray version: this function is needed to build foo.c from foo.c.src
    # when foo.c is included in another file and as such not in the src
    # argument of build_ext command
    def generate_multiarray_templated_sources(ext, build_dir):
        from numpy.distutils.misc_util import get_cmd

        subpath = join('src', 'multiarray')
        sources = [join(local_dir, subpath, 'scalartypes.c.src'),
                   join(local_dir, subpath, 'arraytypes.c.src')]

        # numpy.distutils generate .c from .c.src in weird directories, we have
        # to add them there as they depend on the build_dir
        config.add_include_dirs(join(build_dir, subpath))

        cmd = get_cmd('build_src')
        cmd.ensure_finalized()

        cmd.template_sources(sources, ext)

    # umath version: this function is needed to build foo.c from foo.c.src
    # when foo.c is included in another file and as such not in the src
    # argument of build_ext command
    def generate_umath_templated_sources(ext, build_dir):
        from numpy.distutils.misc_util import get_cmd

        subpath = join('src', 'umath')
        sources = [join(local_dir, subpath, 'loops.c.src'),
                   join(local_dir, subpath, 'umathmodule.c.src')]

        # numpy.distutils generate .c from .c.src in weird directories, we have
        # to add them there as they depend on the build_dir
        config.add_include_dirs(join(build_dir, subpath))

        cmd = get_cmd('build_src')
        cmd.ensure_finalized()

        cmd.template_sources(sources, ext)


    def generate_umath_c(ext,build_dir):
        target = join(build_dir,header_dir,'__umath_generated.c')
        dir = dirname(target)
        if not exists(dir):
            os.makedirs(dir)
        script = generate_umath_py
        if newer(script,target):
            f = open(target,'w')
            f.write(generate_umath.make_code(generate_umath.defdict,
                                             generate_umath.__file__))
            f.close()
        return []

    config.add_data_files('include/numpy/*.h')
    config.add_include_dirs(join('src', 'multiarray'))
    config.add_include_dirs(join('src', 'umath'))

    config.numpy_include_dirs.extend(config.paths('include'))

    deps = [join('include','numpy','*object.h'),
            'include/numpy/fenv/fenv.c',
            'include/numpy/fenv/fenv.h',
            join(codegen_dir,'genapi.py'),
            ]

    # Don't install fenv unless we need them.
    if sys.platform == 'cygwin':
        config.add_data_dir('include/numpy/fenv')

    config.add_extension('_sort',
                         sources=[join('src','_sortmodule.c.src'),
                                  generate_config_h,
                                  generate_numpyconfig_h,
                                  generate_numpy_api,
                                  ],
                         library_dirs=[ndarray_lib_dir()],
                         libraries=['ndarray'],
                         )

    # npymath needs the config.h and numpyconfig.h files to be generated, but
    # build_clib cannot handle generate_config_h and generate_numpyconfig_h
    # (don't ask). Because clib are generated before extensions, we have to
    # explicitly add an extension which has generate_config_h and
    # generate_numpyconfig_h as sources *before* adding npymath.

    subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")])

    multiarray_deps = [
        join('src', 'multiarray', 'arrayobject.h'),
        join('src', 'multiarray', 'arraytypes.h'),
        join('src', 'multiarray', 'buffer.h'),
        join('src', 'multiarray', 'calculation.h'),
        join('src', 'multiarray', 'common.h'),
        join('src', 'multiarray', 'conversion_utils.h'),
        join('src', 'multiarray', 'convert_datatype.h'),
        join('src', 'multiarray', 'ctors.h'),
        join('src', 'multiarray', 'descriptor.h'),
        join('src', 'multiarray', 'getset.h'),
        join('src', 'multiarray', 'hashdescr.h'),
        join('src', 'multiarray', 'iterators.h'),
        join('src', 'multiarray', 'mapping.h'),
        join('src', 'multiarray', 'methods.h'),
        join('src', 'multiarray', 'multiarraymodule.h'),
        join('src', 'multiarray', 'numpymemoryview.h'),
        join('src', 'multiarray', 'number.h'),
        join('src', 'multiarray', 'refcount.h'),
        join('src', 'multiarray', 'scalartypes.h'),
        join('src', 'multiarray', 'sequence.h'),
        join('src', 'multiarray', 'shape.h'),
        join('src', 'multiarray', 'ucsnarrow.h'),
        join('src', 'multiarray', 'usertypes.h'),
    ]

    multiarray_src = [
        join('src', 'multiarray', 'arrayobject.c'),
        join('src', 'multiarray', 'arraytypes.c.src'),
        join('src', 'multiarray', 'buffer.c'),
        join('src', 'multiarray', 'calculation.c'),
        join('src', 'multiarray', 'common.c'),
        join('src', 'multiarray', 'conversion_utils.c'),
        join('src', 'multiarray', 'convert.c'),
        join('src', 'multiarray', 'convert_datatype.c'),
        join('src', 'multiarray', 'ctors.c'),
        join('src', 'multiarray', 'datetime.c'),
        join('src', 'multiarray', 'descriptor.c'),
        join('src', 'multiarray', 'flagsobject.c'),
        join('src', 'multiarray', 'getset.c'),
        join('src', 'multiarray', 'hashdescr.c'),
        join('src', 'multiarray', 'item_selection.c'),
        join('src', 'multiarray', 'iterators.c'),
        join('src', 'multiarray', 'mapping.c'),
        join('src', 'multiarray', 'methods.c'),
        join('src', 'multiarray', 'multiarraymodule.c'),
        join('src', 'multiarray', 'number.c'),
        join('src', 'multiarray', 'numpymemoryview.c'),
        join('src', 'multiarray', 'refcount.c'),
        join('src', 'multiarray', 'scalarapi.c'),
        join('src', 'multiarray', 'scalartypes.c.src'),
        join('src', 'multiarray', 'sequence.c'),
        join('src', 'multiarray', 'shape.c'),
        join('src', 'multiarray', 'usertypes.c'),
    ]

    if PYTHON_HAS_UNICODE_WIDE:
        multiarray_src.append(join('src', 'multiarray', 'ucsnarrow.c'))

    umath_src = [join('src', 'umath', 'umathmodule.c.src'),
                 join('src', 'umath', 'loops.c.src'),
                 join('src', 'umath', 'ufunc_object.c')]

    umath_deps = [generate_umath_py,
                  join(codegen_dir,'generate_ufunc_api.py')]

    config.add_extension('multiarray',
                         sources = multiarray_src +
                                [generate_config_h,
                                 generate_numpyconfig_h,
                                 generate_numpy_api,
                                 join(codegen_dir, 'generate_numpy_api.py'),
                                 join('*.py')],
                         depends = deps + multiarray_deps,
                         library_dirs=[ndarray_lib_dir()],
                         libraries=['ndarray'],
                         )

    config.add_extension('umath',
                         sources = [generate_config_h,
                                    generate_numpyconfig_h,
                                    generate_umath_c,
                                    generate_ufunc_api,
                                    ] + umath_src,
                         depends = deps + umath_deps,
                         library_dirs=[ndarray_lib_dir()],
                         libraries=['ndarray'],
                         )

    config.add_extension('scalarmath',
                         sources=[join('src','scalarmathmodule.c.src'),
                                  generate_config_h,
                                  generate_numpyconfig_h,
                                  generate_numpy_api,
                                  generate_ufunc_api],
                         library_dirs=[ndarray_lib_dir()],
                         libraries=['ndarray'],
                         )

    # Configure blasdot
    blas_info = get_info('blas_opt',0)
    #blas_info = {}
    def get_dotblas_sources(ext, build_dir):
        if blas_info:
            if ('NO_ATLAS_INFO',1) in blas_info.get('define_macros',[]):
            # dotblas needs ATLAS, Fortran compiled blas will not be sufficient.
                return None
            return ext.depends[:1]
        return None # no extension module will be built

    config.add_extension('_dotblas',
                         sources = [get_dotblas_sources],
                         depends=[join('blasdot','_dotblas.c'),
                                  join('blasdot','cblas.h'),
                                  ],
                         include_dirs = ['blasdot'],
                         library_dirs=[ndarray_lib_dir()],
                         libraries=['ndarray'],
                         extra_info = blas_info
                         )

    config.add_extension('umath_tests',
                    sources = [join('src','umath', 'umath_tests.c.src')])

    config.add_extension('multiarray_tests',
                    sources = [join('src', 'multiarray',
                                    'multiarray_tests.c.src')])

    config.add_data_dir('tests')
    config.add_data_dir('tests/data')

    config.make_svn_version_py()

    return config
예제 #56
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    config = Configuration('physics', parent_package, top_path)
    config.make_config_py()  # installs __config__.py
    config.add_data_dir('tests')
    return config
예제 #57
0
파일: setup.py 프로젝트: wenqixe/scipy
def configuration(parent_package='', top_path=None):
    from distutils.sysconfig import get_python_inc
    from scipy._build_utils.system_info import get_info, numpy_info
    from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
    from scipy._build_utils import (get_g77_abi_wrappers, gfortran_legacy_flag_hook,
                                    blas_ilp64_pre_build_hook, get_f2py_int64_options,
                                    uses_blas64)

    config = Configuration('linalg', parent_package, top_path)

    lapack_opt = get_info('lapack_opt')

    atlas_version = ([v[3:-3] for k, v in lapack_opt.get('define_macros', [])
                      if k == 'ATLAS_INFO']+[None])[0]
    if atlas_version:
        print(('ATLAS version: %s' % atlas_version))

    if uses_blas64():
        lapack_ilp64_opt = get_info('lapack_ilp64_opt', 2)

    # fblas:
    sources = ['fblas.pyf.src']
    sources += get_g77_abi_wrappers(lapack_opt)
    depends = ['fblas_l?.pyf.src']

    config.add_extension('_fblas',
                         sources=sources,
                         depends=depends,
                         extra_info=lapack_opt
                         )

    if uses_blas64():
        sources = ['fblas_64.pyf.src'] + sources[1:]
        ext = config.add_extension('_fblas_64',
                                   sources=sources,
                                   depends=depends,
                                   f2py_options=get_f2py_int64_options(),
                                   extra_info=lapack_ilp64_opt)
        ext._pre_build_hook = blas_ilp64_pre_build_hook(lapack_ilp64_opt)

    # flapack:
    sources = ['flapack.pyf.src']
    sources += get_g77_abi_wrappers(lapack_opt)
    dep_pfx = join('src', 'lapack_deprecations')
    deprecated_lapack_routines = [join(dep_pfx, c + 'gegv.f') for c in 'cdsz']
    sources += deprecated_lapack_routines
    depends = ['flapack_gen.pyf.src',
               'flapack_gen_banded.pyf.src',
               'flapack_gen_tri.pyf.src',
               'flapack_pos_def.pyf.src',
               'flapack_pos_def_tri.pyf.src',
               'flapack_sym_herm.pyf.src',
               'flapack_other.pyf.src',
               'flapack_user.pyf.src']

    config.add_extension('_flapack',
                         sources=sources,
                         depends=depends,
                         extra_info=lapack_opt
                         )

    if uses_blas64():
        sources = ['flapack_64.pyf.src'] + sources[1:]
        ext = config.add_extension('_flapack_64',
                                   sources=sources,
                                   depends=depends,
                                   f2py_options=get_f2py_int64_options(),
                                   extra_info=lapack_ilp64_opt)
        ext._pre_build_hook = blas_ilp64_pre_build_hook(lapack_ilp64_opt)

    if atlas_version is not None:
        # cblas:
        config.add_extension('_cblas',
                             sources=['cblas.pyf.src'],
                             depends=['cblas.pyf.src', 'cblas_l1.pyf.src'],
                             extra_info=lapack_opt
                             )

        # clapack:
        config.add_extension('_clapack',
                             sources=['clapack.pyf.src'],
                             depends=['clapack.pyf.src'],
                             extra_info=lapack_opt
                             )

    # _flinalg:
    config.add_extension('_flinalg',
                         sources=[join('src', 'det.f'), join('src', 'lu.f')],
                         extra_info=lapack_opt
                         )

    # _interpolative:
    ext = config.add_extension('_interpolative',
                               sources=[join('src', 'id_dist', 'src', '*.f'),
                                        "interpolative.pyf"],
                               extra_info=lapack_opt
                               )
    ext._pre_build_hook = gfortran_legacy_flag_hook

    # _solve_toeplitz:
    config.add_extension('_solve_toeplitz',
                         sources=[('_solve_toeplitz.c')],
                         include_dirs=[get_numpy_include_dirs()])

    config.add_data_dir('tests')

    # Cython BLAS/LAPACK
    config.add_data_files('cython_blas.pxd')
    config.add_data_files('cython_lapack.pxd')

    sources = ['_blas_subroutine_wrappers.f', '_lapack_subroutine_wrappers.f']
    sources += get_g77_abi_wrappers(lapack_opt)
    includes = numpy_info().get_include_dirs() + [get_python_inc()]
    config.add_library('fwrappers', sources=sources, include_dirs=includes)

    config.add_extension('cython_blas',
                         sources=['cython_blas.c'],
                         depends=['cython_blas.pyx', 'cython_blas.pxd',
                                  'fortran_defs.h', '_blas_subroutines.h'],
                         include_dirs=['.'],
                         libraries=['fwrappers'],
                         extra_info=lapack_opt)

    config.add_extension('cython_lapack',
                         sources=['cython_lapack.c'],
                         depends=['cython_lapack.pyx', 'cython_lapack.pxd',
                                  'fortran_defs.h', '_lapack_subroutines.h'],
                         include_dirs=['.'],
                         libraries=['fwrappers'],
                         extra_info=lapack_opt)

    config.add_extension('_decomp_update',
                         sources=['_decomp_update.c'])

    # Add any license files
    config.add_data_files('src/id_dist/doc/doc.tex')
    config.add_data_files('src/lapack_deprecations/LICENSE')

    return config
예제 #58
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    config = Configuration('compression',parent_package,top_path)
    config.add_subpackage('NLM')
    config.add_data_dir('tests')
    return config
예제 #59
0
def configuration(parent_package="", top_path=None):
    config = Configuration("f2py", parent_package, top_path)
    config.add_data_dir("tests")
    config.add_data_files("src/fortranobject.c", "src/fortranobject.h")
    return config
예제 #60
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, get_mathlibs
    config = Configuration('random', parent_package, top_path)

    def generate_libraries(ext, build_dir):
        config_cmd = config.get_config_cmd()
        libs = get_mathlibs()
        if sys.platform == 'win32':
            libs.extend(['Advapi32', 'Kernel32'])
        ext.libraries.extend(libs)
        return None

    # enable unix large file support on 32 bit systems
    # (64 bit off_t, lseek -> lseek64 etc.)
    if sys.platform[:3] == "aix":
        defs = [('_LARGE_FILES', None)]
    else:
        defs = [('_FILE_OFFSET_BITS', '64'), ('_LARGEFILE_SOURCE', '1'),
                ('_LARGEFILE64_SOURCE', '1')]

    defs.append(('NPY_NO_DEPRECATED_API', 0))
    config.add_data_dir('tests')

    EXTRA_LINK_ARGS = []
    # Math lib
    EXTRA_LIBRARIES = ['m'] if os.name != 'nt' else []
    # Some bit generators exclude GCC inlining
    EXTRA_COMPILE_ARGS = ['-U__GNUC_GNU_INLINE__']

    if is_msvc and platform_bits == 32:
        # 32-bit windows requires explicit sse2 option
        EXTRA_COMPILE_ARGS += ['/arch:SSE2']
    elif not is_msvc:
        # Some bit generators require c99
        EXTRA_COMPILE_ARGS += ['-std=c99']
        INTEL_LIKE = any([
            val in k.lower() for k in platform.uname()
            for val in ('x86', 'i686', 'i386', 'amd64')
        ])
        if INTEL_LIKE:
            # Assumes GCC or GCC-like compiler
            EXTRA_COMPILE_ARGS += ['-msse2']

    # Use legacy integer variable sizes
    LEGACY_DEFS = [('NP_RANDOM_LEGACY', '1')]
    PCG64_DEFS = []
    if 1 or sys.maxsize < 2**32 or os.name == 'nt':
        # Force emulated mode here
        PCG64_DEFS += [('PCG_FORCE_EMULATED_128BIT_MATH', '1')]

    config.add_extension(
        'entropy',
        sources=['entropy.c', 'src/entropy/entropy.c'] + [generate_libraries],
        libraries=EXTRA_LIBRARIES,
        extra_compile_args=EXTRA_COMPILE_ARGS,
        extra_link_args=EXTRA_LINK_ARGS,
        depends=[
            join('src', 'splitmix64', 'splitmix.h'),
            join('src', 'entropy', 'entropy.h'),
            'entropy.pyx',
        ],
        define_macros=defs,
    )
    for gen in ['mt19937']:
        # gen.pyx, src/gen/gen.c, src/gen/gen-jump.c
        config.add_extension(
            gen,
            sources=[
                '{0}.c'.format(gen), 'src/{0}/{0}.c'.format(gen),
                'src/{0}/{0}-jump.c'.format(gen)
            ],
            include_dirs=['.', 'src', join('src', gen)],
            libraries=EXTRA_LIBRARIES,
            extra_compile_args=EXTRA_COMPILE_ARGS,
            extra_link_args=EXTRA_LINK_ARGS,
            depends=['%s.pyx' % gen],
            define_macros=defs,
        )
    for gen in [
            'philox', 'threefry', 'xoshiro256', 'xoshiro512', 'pcg64', 'pcg32'
    ]:
        # gen.pyx, src/gen/gen.c
        _defs = defs + PCG64_DEFS if gen == 'pcg64' else defs
        config.add_extension(
            gen,
            sources=['{0}.c'.format(gen), 'src/{0}/{0}.c'.format(gen)],
            include_dirs=['.', 'src', join('src', gen)],
            libraries=EXTRA_LIBRARIES,
            extra_compile_args=EXTRA_COMPILE_ARGS,
            extra_link_args=EXTRA_LINK_ARGS,
            depends=['%s.pyx' % gen],
            define_macros=_defs,
        )
    for gen in ['common']:
        # gen.pyx
        config.add_extension(
            gen,
            sources=['{0}.c'.format(gen)],
            libraries=EXTRA_LIBRARIES,
            extra_compile_args=EXTRA_COMPILE_ARGS,
            extra_link_args=EXTRA_LINK_ARGS,
            include_dirs=['.', 'src'],
            depends=['%s.pyx' % gen],
            define_macros=defs,
        )
    other_srcs = [
        'src/distributions/logfactorial.c',
        'src/distributions/distributions.c',
        'src/distributions/random_hypergeometric.c',
    ]
    for gen in ['generator', 'bounded_integers']:
        # gen.pyx, src/distributions/distributions.c
        config.add_extension(
            gen,
            sources=['{0}.c'.format(gen)] + other_srcs,
            libraries=EXTRA_LIBRARIES,
            extra_compile_args=EXTRA_COMPILE_ARGS,
            include_dirs=['.', 'src'],
            extra_link_args=EXTRA_LINK_ARGS,
            depends=['%s.pyx' % gen],
            define_macros=defs,
        )
    config.add_extension(
        'mtrand',
        # mtrand does not depend on random_hypergeometric.c.
        sources=[
            'mtrand.c', 'src/legacy/legacy-distributions.c',
            'src/distributions/logfactorial.c',
            'src/distributions/distributions.c'
        ],
        include_dirs=['.', 'src', 'src/legacy'],
        libraries=EXTRA_LIBRARIES,
        extra_compile_args=EXTRA_COMPILE_ARGS,
        extra_link_args=EXTRA_LINK_ARGS,
        depends=['mtrand.pyx'],
        define_macros=defs + LEGACY_DEFS,
    )
    return config