Beispiel #1
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    # We need this because libcstat.a is linked to lapack, which can
    # be a fortran library, and the linker needs this information.
    from numpy.distutils.system_info import get_info
    # First, try 'lapack_info', as that seems to provide more details on Linux
    # (both 32 and 64 bits):
    lapack_info = get_info('lapack_opt', 0)
    if 'libraries' not in lapack_info:
        # But on OSX that may not give us what we need, so try with 'lapack'
        # instead.  NOTE: scipy.linalg uses lapack_opt, not 'lapack'...
        lapack_info = get_info('lapack',0)
    config = Configuration('group', parent_package, top_path)
    config.add_subpackage('tests')
    config.add_extension(
        'onesample',
        sources=['onesample.pyx'],
        libraries=['cstat'],
        extra_info=lapack_info,
        )
    config.add_extension(
        'twosample',
        sources=['twosample.pyx'],
        libraries=['cstat'],
        extra_info=lapack_info,
        )
    config.add_extension(
        'glm_twolevel',
        sources=['glm_twolevel.pyx'],
        libraries=['cstat'],
        extra_info=lapack_info,
        )
    return config
Beispiel #2
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info, dict_append

    config = Configuration('umfpack', parent_package, top_path)
    config.add_data_dir('tests')

    umf_info = get_info('umfpack', notfound_action=1)

    ## The following addition is needed when linking against a umfpack built
    ## from the latest SparseSuite. Not (strictly) needed when linking against
    ## the version in the ubuntu repositories.
    if not sys.platform == 'darwin':
        umf_info['libraries'].insert(0, 'rt')

    umfpack_i_file = config.paths('umfpack.i')[0]

    def umfpack_i(ext, build_dir):
        if umf_info:
            return umfpack_i_file

    blas_info = get_info('blas_opt')
    build_info = {}
    dict_append(build_info, **umf_info)
    dict_append(build_info, **blas_info)

    config.add_extension('__umfpack',
                         sources=[umfpack_i],
                         depends=['umfpack.i'],
                         **build_info)

    return config
Beispiel #3
0
def configuration(parent_package='',top_path=None):
    
    from numpy.distutils.misc_util import Configuration
    
    config = Configuration('graph', parent_package, top_path)
    config.add_data_dir('tests')
   
    # We need this because libcstat.a is linked to lapack, which can
    # be a fortran library, and the linker needs this information.
    from numpy.distutils.system_info import get_info
    lapack_info = get_info('lapack_opt',0)
    if 'libraries' not in lapack_info:
        # But on OSX that may not give us what we need, so try with 'lapack'
        # instead.  NOTE: scipy.linalg uses lapack_opt, not 'lapack'...
        lapack_info = get_info('lapack',0)

    config.add_extension(
                '_graph',
                sources=['graph.c'],
                libraries=['cstat'],
                extra_info=lapack_info,
                )
    config.add_extension(
                '_field',
                sources=['field.c'],
                libraries=['cstat'],
                extra_info=lapack_info,
                )
    
    return config
Beispiel #4
0
def configuration(parent_package='',top_path=None):
    import numpy
    import os
    #import sys
    import ConfigParser
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info, NotFoundError

    # Read relevant PySparse-specific configuration options.
    #pysparse_config = ConfigParser.SafeConfigParser()
    #pysparse_config.read(os.path.join(top_path, 'site.cfg'))
    #hsl_dir = pysparse_config.get('HSL', 'hsl_dir')

    config = Configuration('sparse', parent_package, top_path)

    # Get BLAS info from site.cfg
    blas_info = get_info('blas_opt',0)
    if not blas_info:
        blas_info = get_info('blas',0)
        if not blas_info:
            print 'No blas info found'
    print 'Using BLAS info:' ; print blas_info

    spmatrix_src = ['spmatrixmodule.c']
    config.add_extension(
        name='spmatrix',
        define_macros=[('LENFUNC_OK', 1)],
        sources=[os.path.join('src',name) for name in spmatrix_src],
        libraries=[],
        include_dirs=['src'],
        extra_info=blas_info,
        )

    config.make_config_py()
    return config
Beispiel #5
0
def configuration(parent_package='',top_path=None):
    import numpy
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info, dict_append

    config = Configuration('umfpack', parent_package, top_path)
    config.add_data_dir('tests')

    umf_info = get_info('umfpack', notfound_action=1)

    umfpack_i_file = config.paths('umfpack.i')[0]

    def umfpack_i(ext, build_dir):
        if umf_info:
            return umfpack_i_file

    blas_info = get_info('blas_opt')
    build_info = {}
    dict_append(build_info, **umf_info)
    dict_append(build_info, **blas_info)

    #config.add_extension('__umfpack',
    #                      sources=[umfpack_i],
    #                      depends=['umfpack.i'],
    #                      **build_info)

    return config
Beispiel #6
0
def run_install():
    if env_lib_dirs or env_libs:
        print("using environment variables for blas/lapack libraries")
        env_vars = {}
        if env_lib_dirs:
            env_vars['library_dirs'] = env_lib_dirs.split(':')
        if env_libs:
            env_vars['libraries'] = env_libs.split(':')
        install_scs(blas_info=env_vars, lapack_info={})
        return

    # environment variables not set, using defaults instead
    try:
        print("using blas_opt / lapack_opt")
        install_scs(blas_info=get_info('blas_opt'), lapack_info=get_info('lapack_opt'))
        return
    except:
        pass # fall back to blas / lapack (not opt)

    print("blas_opt / lapack_opt install failed, trying blas / lapack")
    try:
        install_scs(blas_info=get_info('blas'), lapack_info=get_info('lapack'))
    except:
        install_scs(blas_info={}, lapack_info={})
        print("###############################################################################################")
        print("# failed to find blas/lapack libs, SCS cannot solve SDPs but can solve LPs, SOCPs, ECPs, PCPs #")
        print("# install blas/lapack and run this install script again to allow SCS to solve SDPs            #")
        print("#                                                                                             #")
        print("# scs will use environment variables BLAS_LAPACK_LIB_PATHS and BLAS_LAPACK_LIBS if set        #")
        print("# use this to link against blas/lapack libs that scs can't find on it's own, usage ex:        #")
        print("#        >> export BLAS_LAPACK_LIB_PATHS=/usr/lib/:/other/dir                                 #")
        print("#        >> export BLAS_LAPACK_LIBS=blas:lapack                                               #")
        print("#        >> python setup.py install                                                           #")
        print("###############################################################################################")
Beispiel #7
0
def get_blas_lapack_info():
    """ Try three methods for getting blas/lapack info.

    If successful, set LAPACK_LIB_FOUND and return dictionary with the arguments

    If not successful, print error message and return empty dictionary
    """
    info = {}
    
    if not info:
        print("Trying 'lapack_opt'")
        info = get_info('lapack_opt')

    if not info:
        print("lapack_opt failed. Trying 'lapack'")
        info = get_info('lapack')

    if info:
        info['define_macros'] = info.get('define_macros', []) + [('LAPACK_LIB_FOUND', None)]
        print('the resulting info is: ', info)
    else:
        print("##########################################################################################")
        print("# failed to find lapack libs, SCS cannot solve SDPs but can solve LPs, SOCPs, ECPs, PCPs #")
        print("# install lapack and run this install script again to allow SCS to solve SDPs            #")
        print("##########################################################################################")

    return info
Beispiel #8
0
def matrix_initialize():
    global initializedMatrix
    global useBLAS
    global useNumpy  # module based variable
    global dgemm

    if initializedMatrix:
        sys.stderr.write("INFO: matrix_inialize called multiple times\n")
        return

    if useBLAS and useNumpy is None:
        print get_info('blas_opt')
        try:
            from scipy.linalg.blas import dgemm
            sys.stderr.write("INFO: using linalg.fblas\n")
            useNumpy = False
            hasBLAS  = True
        except AttributeError:
            sys.stderr.write("WARNING: linalg.fblas not found, using numpy.dot instead!\n")
            useNumpy = True
    else:
        sys.stderr.write("INFO: using numpy.dot\n")
        useNumpy=True
    if cuda.useCUDA:
        sys.stderr.write("INFO: with CUDA support\n")
    initializedMatrix = True
Beispiel #9
0
def configuration(parent_package='',top_path=None):  
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info
    from arachnid.distutils.compiler import compiler_options #detect_openmp
    import os
    
    compiler_args, compiler_libraries, compiler_defs, ccompiler_args, ccompiler_lib, ccompiler_defs = compiler_options()[:6]
    ccompiler_lib;
    
    
    try:
        fftw_opt = get_info('mkl',notfound_action=2)
    except:
        try: 
            fftw_opt = get_info('fftw',notfound_action=2)
            #fftw_opt['libraries']=['fftw3f']
            fftw_opt['libraries'].extend(['fftw3f'])
            fftw_opt['library_dirs'].extend(['/usr/lib'])
        except: fftw_opt=dict(libraries=['fftw3f'])
    if 'library_dirs' not in fftw_opt: fftw_opt['library_dirs']=[]
    if 'include_dirs' not in fftw_opt: fftw_opt['include_dirs']=[]
    config = Configuration('spi', parent_package, top_path)
    #-ftrap=common
    if 1 == 0:
        f2py_options = ['--debug-capi']
    else: f2py_options=[]
    
    flink_args = compiler_args
    #-ffixed-form define_macros=[('SP_LIBFFTW3', 1)]+compiler_defs, 
    library_options=dict(macros=[('SP_LIBFFTW3', 1)]+compiler_defs, extra_f77_compile_args=compiler_args, extra_f90_compile_args=compiler_args)#extra_f77_compiler_args=['-fdefault-real-8'],, ('SP_MP', 1)
                          #extra_f90_compiler_args=['-fdefault-real-8'])
    config.add_library('spiutil', sources=['spiutil.F90', 'spider/tfd.F90', 'spider/fq_q.F90', 'spider/fq3_p.F90', 'spider/parabl.F90', 'spider/pksr3.F90', 'spider/fftw3.F90', 
                                           'spider/ccrs.F90', 'spider/apcc.F90', 'spider/quadri.F90', 'spider/rtsq.F90', 'spider/cald.F90', 'spider/bldr.F90', 
                                           'spider/fmrs.F90', 'spider/fmrs_2.F90', 'spider/besi1.F90', 'spider/wpro_n.F90', 'spider/prepcub.F90',
                                           'spider/fint.F90', 'spider/fint3.F90', 'spider/betai.F90', 'spider/gammln.F90', 'spider/betacf.F90', 'spider/histe.F90',
                                           'spider/interp_fbs3.F90', 'spider/interp_fbs.F90', 'spider/fbs2.F90', 'spider/fbs3.F90'], 
                                           depends=['spider/CMBLOCK.INC', 'spider/FFTW3.INC'], **library_options) #, 'fmrs_info.mod', 'type_kinds.mod'
    fftlibs = fftw_opt['libraries']+compiler_libraries
    del fftw_opt['libraries']
    config.add_extension('_spider_reconstruct', sources=['backproject_nn4.f90', 'backproject_bp3f.f90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=flink_args, library_dirs=fftw_opt['library_dirs'])
    config.add_extension('_spider_reproject', sources=['reproject.F90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=flink_args, library_dirs=fftw_opt['library_dirs'])
    config.add_extension('_spider_interpolate', sources=['interpolate.F90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=flink_args, library_dirs=fftw_opt['library_dirs'])
    config.add_extension('_spider_ctf', sources=['ctf.F90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=flink_args, library_dirs=fftw_opt['library_dirs'])

    #config.add_extension('_spider_interpolate', sources=['interpolate.F90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=compiler_args, library_dirs=fftw_opt['library_dirs'])
    #-fdefault-real-8
    #rot_src = 'spider_rotate_dist_wrap.cpp' if os.path.exists(os.path.join(os.path.dirname(__file__), 'spider_rotate_dist_wrap.cpp')) else 'rotate.i'
    #config.add_extension('_spider_rotate_dist', sources=[rot_src], define_macros=[('__STDC_FORMAT_MACROS', 1)]+ccompiler_defs, depends=['rotate.hpp'], swig_opts=['-c++'], libraries=['spiutil']+fftlibs, extra_compile_args=ccompiler_args, extra_link_args=compiler_args, library_dirs=fftw_opt['library_dirs'])
    config.add_extension('_spider_rotate', sources=['rotate.F90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=flink_args, library_dirs=fftw_opt['library_dirs'])
    #config.add_extension('_spider_align', sources=['align.F90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=flink_args, library_dirs=fftw_opt['library_dirs'])
    config.add_extension('_spider_filter', sources=['filter.F90'], libraries=['spiutil']+fftlibs, f2py_options=f2py_options, define_macros=ccompiler_defs, extra_compile_args=ccompiler_args, extra_link_args=flink_args, library_dirs=fftw_opt['library_dirs'])
    config.add_include_dirs(os.path.dirname(__file__))
    config.add_include_dirs(os.path.join(os.path.dirname(__file__), 'spider'))
    config.add_include_dirs(fftw_opt['include_dirs'])
    
    return config
Beispiel #10
0
def configuration(parent_package='',top_path=None):
    import numpy
    import os
    import ConfigParser
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info, NotFoundError

    # Read relevant NLPy-specific configuration options.
    nlpy_config = ConfigParser.SafeConfigParser()
    nlpy_config.read(os.path.join(top_path, 'site.cfg'))
    hsl_dir = nlpy_config.get('HSL', 'hsl_dir')
    galahad_dir = nlpy_config.get('GALAHAD', 'galahad_dir')

    config = Configuration('krylov', parent_package, top_path)

    # Get BLAS info from site.cfg
    blas_info = get_info('blas_opt',0)
    if not blas_info:
        print 'No blas info found'
    lapack_info = get_info('lapack_opt',0)
    if not lapack_info:
        print 'No lapack info found'

    gdir = os.path.join(galahad_dir,'src')
    libgltr_src = [os.path.join(hsl_dir,'hsl_zd11d.f90'),
                   os.path.join(gdir,'auxiliary','norms.f90'),
                   os.path.join(gdir,'rand','rand.f90'),
                   os.path.join(gdir,'sym','symbols.f90'),
                   os.path.join(gdir,'smt','smt.f90'),
                   os.path.join(gdir,'space','space.f90'),
                   os.path.join(gdir,'spec','specfile.f90'),
                   os.path.join(gdir,'sort','sort.f90'),
                   os.path.join(gdir,'roots','roots.f90'),
                   os.path.join(gdir,'gltr','gltr.f90'),
                   os.path.join('src','pygltr.f90')]
    pygltr_src = ['_pygltr.c']

    # Build PyGLTR
    config.add_library(
        name='nlpy_gltr',
        sources=libgltr_src,
        extra_info=[blas_info, lapack_info],
        )

    config.add_extension(
        name='_pygltr',
        sources=[os.path.join('src',name) for name in pygltr_src],
        libraries=['nlpy_gltr'],
        include_dirs=['src'],
        extra_info=[blas_info, lapack_info],
        )

    config.make_config_py()
    return config
Beispiel #11
0
def get_numpy_options():
   # Third-party modules - we depend on numpy for everything
   import numpy
   try:
       from numpy.distutils.system_info import get_info
   except:
       from numpy.__config__ import get_info
   
   # Obtain the numpy include directory
   numpy_include = numpy.get_include()

   # Obtain BLAS/LAPACK linking options
   lapack_info = get_info('lapack_opt')
   blas_info = get_info('blas_opt')
   using_atlas = False
   using_f77blas = False
   using_lapack = False
   for l in lapack_info.get('libraries', []) + blas_info.get('libraries', []):
      if "atlas" in l: using_atlas = True
      if "f77blas" in l: using_f77blas = True
      if "lapack" in l: using_lapack = True
   if using_atlas and (not using_f77blas or not using_lapack):
      lapack_info = get_info('atlas')
      # ATLAS notices an incomplete LAPACK by not setting language to f77
      complete_lapack = lapack_info.get('language', "") == "f77"
      if complete_lapack:
         blas_info = {}
      else:
         # If ATLAS has an incomplete LAPACK, use a regular one
         blas_info = get_info('atlas_blas')
         lapack_info = get_info('lapack')
   
   blaslapack_libraries = lapack_info.get('libraries', []) + blas_info.get('libraries', [])
   blaslapack_library_dirs = lapack_info.get('library_dirs', []) + blas_info.get('library_dirs', [])
   blaslapack_extra_link_args = lapack_info.get('extra_link_args', []) + blas_info.get('extra_link_args', [])
   if not blaslapack_libraries and not blaslapack_extra_link_args:
       blaslapack_libraries = ['lapack', 'blas']

   r = dict(
                   include_dirs = [numpy_include, "primme/include", "primme/src/include"],
                   library_dirs = blaslapack_library_dirs,
                   libraries = blaslapack_libraries,
                   extra_link_args = blaslapack_extra_link_args
   )

   # Link dynamically on Windows and statically otherwise
   if sys.platform == 'win32':
      r['libraries'] = ['primme'] + r['libraries']
   else:
      r['extra_objects'] = ['../lib/libprimme.a']

   return r
Beispiel #12
0
def configuration(parent_package='',top_path=None):
    import numpy
    import os
    import ConfigParser
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info, NotFoundError

    # Read relevant PySparse-specific configuration options.
    pysparse_config = ConfigParser.SafeConfigParser()
    pysparse_config.read(os.path.join(top_path, 'site.cfg'))
    dflt_lib_dirs = getoption( pysparse_config, 'DEFAULT', 'library_dirs')
    if dflt_lib_dirs is None:
        dflt_lib_dirs = []
    dflt_libs = getoption(pysparse_config, 'DEFAULT', 'libraries')
    if dflt_libs is None:
        dflt_libs = []

    print 'Using dflt_lib_dirs = ', dflt_lib_dirs
    print 'Using dflt_libs = ', dflt_libs

    config = Configuration('eigen', parent_package, top_path)

    # Get BLAS info from site.cfg
    blas_info = get_info('blas_opt',0)
    if not blas_info:
        blas_info = get_info('blas',0)
        if not blas_info:
            print 'No blas info found'
    print 'Eigen:: Using BLAS info:' ; print blas_info

    # Get LAPACK info from site.cfg
    lapack_info = get_info('lapack_opt',0)
    if not lapack_info:
        lapack_info = get_info('lapack',0)
        if not lapack_info:
            print 'No lapack info found'
    print 'Eigen:: Using LAPACK info:' ; print lapack_info

    jdsym_src = ['jdsymmodule.c']
    config.add_extension(
        name='jdsym',
        sources=[os.path.join('src',name) for name in jdsym_src],
        libraries=dflt_libs,
        library_dirs=dflt_lib_dirs,
        include_dirs=['src'],
        extra_info=[blas_info, lapack_info],
        )

    config.make_config_py()
    return config
Beispiel #13
0
def matrix_initialize(useBLAS=True): 
    global useNumpy  # module based variable
    if useBLAS and useNumpy == None:
        print get_info('blas_opt')
        try:
            linalg.fblas
            sys.stderr.write("INFO: using linalg.fblas\n")
            useNumpy = False
            hasBLAS  = True
        except AttributeError:
            sys.stderr.write("WARNING: linalg.fblas not found, using numpy.dot instead!\n")
            useNumpy = True
    else:
        sys.stderr.write("INFO: using numpy.dot\n")
        useNumpy=True
Beispiel #14
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info, get_standard_file, BlasNotFoundError
    config = Configuration('glm', parent_package, top_path)

    site_cfg  = ConfigParser()
    site_cfg.read(get_standard_file('site.cfg'))


    # cd fast needs CBLAS
    blas_info = get_info('blas_opt', 0)
    if (not blas_info) or (
        ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', [])) :
        cblas_libs = ['cblas']
        blas_info.pop('libraries', None)
    else:
        cblas_libs = blas_info.pop('libraries', [])

    config.add_extension('cd_fast',
                         sources=[join('src', 'cd_fast.c')],
                         libraries=cblas_libs,
                         include_dirs=[join('..', 'src', 'cblas'),
                                       numpy.get_include(),
                                       blas_info.pop('include_dirs', [])],
                         extra_compile_args=blas_info.pop('extra_compile_args', []),
                         **blas_info
                         )


    # add other directories
    config.add_subpackage('tests')
    config.add_subpackage('benchmarks')
    config.add_subpackage('sparse')

    return config
Beispiel #15
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info, NotFoundError
    config = Configuration('ops',parent_package,top_path)
    config.add_extension('apply_window_ext', join('src','apply_window_ext.c'))
    config.add_extension('local_extrema_ext', join('src','local_extrema_ext.c'))

    config.add_library('fminpack',
                       sources = [join ('src', 'minpack', '*.f'),
                                  ],
                       )
    config.add_extension('regress_ext', 
                         sources = [join('src','regress_ext.c'),
                                    ],
                         )
    config.add_extension('acf_ext', 
                         sources = [join('src','acf_ext.c'),
                                    join('src','acf.c')],
                         libraries = ['fminpack'])

    
    fftw3_info = get_info('fftw3')

    if fftw3_info:
        config.add_extension('discrete_gauss_ext', 
                             sources = [join('src','discrete_gauss_ext.c'),
                                        join('src','discrete_gauss.c')],
                             extra_info = fftw3_info)
    else:
        print 'FFTW3 not found: skipping discrete_gauss_ext extension'
    return config
Beispiel #16
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
    from numpy.distutils.system_info import get_info
    from distutils.sysconfig import get_python_inc

    config = Configuration('spatial', parent_package, top_path)

    config.add_data_dir('tests')

    qhull_src = ['geom2.c', 'geom.c', 'global.c', 'io.c', 'libqhull.c',
                 'mem.c', 'merge.c', 'poly2.c', 'poly.c', 'qset.c',
                 'random.c', 'rboxlib.c', 'stat.c', 'user.c', 'usermem.c',
                 'userprintf.c', 'userprintf_rbox.c']
    qhull_src = [join('qhull', 'src', x) for x in qhull_src]

    inc_dirs = [get_python_inc()]
    if inc_dirs[0] != get_python_inc(plat_specific=1):
        inc_dirs.append(get_python_inc(plat_specific=1))
    inc_dirs.append(get_numpy_include_dirs())

    cfg = dict(get_info('lapack_opt'))
    cfg.setdefault('include_dirs', []).extend(inc_dirs)
    cfg.setdefault('define_macros', []).append(('qh_QHpointer','1'))
    config.add_extension('qhull',
                         sources=['qhull.c'] + qhull_src,
                         **cfg)

    config.add_extension('ckdtree', sources=['ckdtree.c'])  # FIXME: cython

    config.add_extension('_distance_wrap',
        sources=[join('src', 'distance_wrap.c')],
        depends=[join('src', 'distance_impl.h')],
        include_dirs=[get_numpy_include_dirs()])

    return config
Beispiel #17
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info

    config = Configuration('dsolve',parent_package,top_path)
    config.add_data_dir('tests')

    lapack_opt = get_info('lapack_opt',notfound_action=2)
    if sys.platform=='win32':
        superlu_defs = [('NO_TIMER',1)]
    else:
        superlu_defs = []
    superlu_defs.append(('USE_VENDOR_BLAS',1))

    superlu_src = os.path.join(dirname(__file__), 'SuperLU', 'SRC')

    config.add_library('superlu_src',
                       sources = [join(superlu_src,'*.c')],
                       macros = superlu_defs,
                       include_dirs=[superlu_src],
                       )

    # Extension
    config.add_extension('_superlu',
                         sources = ['_superlumodule.c',
                                    '_superlu_utils.c',
                                    '_superluobject.c'],
                         libraries = ['superlu_src'],
                         extra_info = lapack_opt,
                         )

    config.add_subpackage('umfpack')

    return config
Beispiel #18
0
def configuration(parent_package='', top_path=None):
    import numpy
    from numpy.distutils.misc_util import Configuration

    config = Configuration('utils', parent_package, top_path)

    config.add_subpackage('sparsetools')

    # cd fast needs CBLAS
    blas_info = get_info('blas_opt', 0)
    if (not blas_info) or (
        ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', [])):
        cblas_libs = ['cblas']
        blas_info.pop('libraries', None)
    else:
        cblas_libs = blas_info.pop('libraries', [])

    config.add_extension('arrayfuncs',
                         sources=['arrayfuncs.c'],
                         depends=[join('src', 'cholesky_delete.c')],
                         libraries=cblas_libs,
                         include_dirs=[join('..', 'src', 'cblas'),
                                       numpy.get_include(),
                                       blas_info.pop('include_dirs', [])],
                         extra_compile_args=blas_info.pop('extra_compile_args', []),
                         **blas_info
                         )


    config.add_extension('graph_shortest_path',
                         sources=['graph_shortest_path.c'],
                         include_dirs=[numpy.get_include()])

    return config
Beispiel #19
0
def configuration(parent_package='',top_path=None):
    import numpy
    import os
    import ConfigParser
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info, NotFoundError

    # Read relevant PySparse-specific configuration options.
    #pysparse_config = ConfigParser.SafeConfigParser()
    #pysparse_config.read(os.path.join(top_path, 'site.cfg'))
    #hsl_dir = pysparse_config.get('HSL', 'hsl_dir')

    config = Configuration('itsolvers', parent_package, top_path)

    # Get BLAS info from site.cfg
    blas_info = get_info('blas_opt',0)
    if not blas_info:
        print 'No blas info found'

    itsolvers_src = ['itsolversmodule.c', 'bicgstab.c', 'cgs.c', 'gmres.c',
                     'minres.c', 'pcg.c', 'qmrs.c']
    config.add_extension(
        name='krylov',
        sources=[os.path.join('src',name) for name in itsolvers_src],
        libraries=[],
        include_dirs=['src'],
        extra_info=blas_info,
        )

    config.make_config_py()
    return config
Beispiel #20
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info
    config = Configuration('solvers',parent_package,top_path)
    lapack = dict(get_info('lapack_opt'))
    # Branch here for different operating systems
    if platform.system() == 'Linux':
        config.add_extension('isolab', sources=['src/isolab.pyf',
                                                'src/solab.f90',
                                                'src/isolab.f90'],
                                       libraries=['lapack'],
                                       library_dirs=lapack['library_dirs'])
    elif platform.system() == 'Darwin':
        lapack['library_dirs'] = ['/usr/lib']
        config.add_extension('isolab', sources=['src/isolab.pyf',
                                                'src/solab.f90',
                                                'src/isolab.f90'],
                                       libraries=['lapack'],
                                       library_dirs=lapack['library_dirs'])
    elif platform.system() == 'Windows':
        config.add_extension('isolab', sources=['src/isolab.pyf',
                                                'src/solab.f90',
                                                'src/isolab.f90'],
                                       libraries=['lapack'],
                                       library_dirs=lapack['library_dirs'])
    return config
Beispiel #21
0
def configuration(parent_package="", top_path=None):
    import warnings
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info, BlasNotFoundError

    config = Configuration("odr", parent_package, top_path)

    libodr_files = ["d_odr.f", "d_mprec.f", "dlunoc.f"]

    blas_info = get_info("blas_opt")
    if blas_info:
        libodr_files.append("d_lpk.f")
    else:
        warnings.warn(BlasNotFoundError.__doc__)
        libodr_files.append("d_lpkbls.f")

    libodr = [join("odrpack", x) for x in libodr_files]
    config.add_library("odrpack", sources=libodr)
    sources = ["__odrpack.c"]
    libraries = ["odrpack"] + blas_info.pop("libraries", [])
    include_dirs = ["."] + blas_info.pop("include_dirs", [])
    config.add_extension(
        "__odrpack", sources=sources, libraries=libraries, include_dirs=include_dirs, depends=["odrpack.h"], **blas_info
    )

    config.add_data_dir("tests")
    return config
Beispiel #22
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.system_info import get_info, NotFoundError

    from numpy.distutils.misc_util import Configuration

    config = Configuration('isolve',parent_package,top_path)

    lapack_opt = get_info('lapack_opt')

    if not lapack_opt:
        raise NotFoundError('no lapack/blas resources found')

    # iterative methods
    methods = ['BiCGREVCOM.f.src',
               'BiCGSTABREVCOM.f.src',
               'CGREVCOM.f.src',
               'CGSREVCOM.f.src',
#               'ChebyREVCOM.f.src',
               'GMRESREVCOM.f.src',
#               'JacobiREVCOM.f.src',
               'QMRREVCOM.f.src',
#               'SORREVCOM.f.src'
               ]
    Util = ['STOPTEST2.f.src','getbreak.f.src']
    sources = Util + methods + ['_iterative.pyf.src']
    config.add_extension('_iterative',
                         sources = [join('iterative',x) for x in sources],
                         extra_info = lapack_opt
                         )

    config.add_data_dir('tests')

    return config
def configuration (parent_package=''):
    package = 'linalg'
    config = default_config_dict(package,parent_package)
    del config['fortran_libraries']
    local_path = get_path(__name__)
    atlas_info = get_info('atlas_threads')
    if not atlas_info:
        atlas_info = get_info('atlas')
    if not atlas_info:
        raise AtlasNotFoundError(AtlasNotFoundError.__doc__)
    ext = Extension('atlas_version',
                    sources=[os.path.join(local_path,'atlas_version.c')],
                    libraries=[atlas_info['libraries'][-1]],
                    library_dirs=atlas_info['library_dirs'])
    config['ext_modules'].append(ext)
    return config
Beispiel #24
0
def configuration(parent_package='', top_path=None):
    import warnings
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info, BlasNotFoundError
    config = Configuration('odr', parent_package, top_path)

    libodr_files = ['d_odr.f',
                    'd_mprec.f',
                    'dlunoc.f']

    blas_info = get_info('blas_opt')
    if blas_info:
        libodr_files.append('d_lpk.f')
    else:
        warnings.warn(BlasNotFoundError.__doc__)
        libodr_files.append('d_lpkbls.f')

    odrpack_src = [join('odrpack', x) for x in libodr_files]
    config.add_library('odrpack', sources=odrpack_src)

    sources = ['__odrpack.c']
    libraries = ['odrpack'] + blas_info.pop('libraries', [])
    include_dirs = ['.'] + blas_info.pop('include_dirs', [])
    config.add_extension('__odrpack',
        sources=sources,
        libraries=libraries,
        include_dirs=include_dirs,
        depends=(['odrpack.h'] + odrpack_src),
        **blas_info
    )

    config.add_data_dir('tests')
    return config
Beispiel #25
0
def make_extension(**extra):
    def parse_define(define):
        index = define.find('=')
        if index < 0:
            return (define, None)
        else:
            return define[:index], define[index + 1:]
    extension = {
        "define_macros": cfg.get('compiler', 'defines').split(),
        "undef_macros": cfg.get('compiler', 'undefs').split(),
        "include_dirs": cfg.get('compiler', 'include_dirs').split(),
        "library_dirs": cfg.get('compiler', 'library_dirs').split(),
        "libraries": cfg.get('compiler', 'libs').split(),
        "extra_compile_args": cfg.get('compiler', 'cflags').split(),
        "extra_link_args": cfg.get('compiler', 'ldflags').split(),
    }

    extension['define_macros'].append('ENABLE_PYTHON_MODULE')

    here = os.path.dirname(os.path.dirname(__file__)) or '.'
    # using / as separator as advised in the distutils doc
    extension["include_dirs"].append(here + '/pythran')
    for k, w in extra.items():
        extension[k].extend(w)
    extension["define_macros"] = map(parse_define, extension["define_macros"])
    if cfg.getboolean('pythran', 'complex_hook'):
        # the patch is *not* portable
        extension["include_dirs"].append(here + '/pythran/pythonic/patch')

    # blas dependency
    numpy_blas = numpy_sys.get_info("blas")
    extension['libraries'].extend(numpy_blas.get('libraries', []))
    extension['library_dirs'].extend(numpy_blas.get('library_dirs', []))
    return extension
Beispiel #26
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info
    config = Configuration('linalg',parent_package,top_path)

    config.add_data_dir('tests')

    # Configure lapack_lite
    lapack_info = get_info('lapack_opt',0) # and {}
    def get_lapack_lite_sources(ext, build_dir):
        if not lapack_info:
            print("### Warning:  Using unoptimized lapack ###")
            return ext.depends[:-1]
        else:
            if sys.platform=='win32':
                print("### Warning:  python_xerbla.c is disabled ###")
                return ext.depends[:1]
            return ext.depends[:2]

    config.add_extension('lapack_lite',
                         sources = [get_lapack_lite_sources],
                         depends=  ['lapack_litemodule.c',
                                    'python_xerbla.c',
                                    'zlapack_lite.c', 'dlapack_lite.c',
                                    'blas_lite.c', 'dlamch.c',
                                    'f2c_lite.c','f2c.h'],
                         extra_info = lapack_info
                         )

    return config
Beispiel #27
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.system_info import get_info, NotFoundError
    from numpy.distutils.misc_util import Configuration
    from scipy._build_utils import get_g77_abi_wrappers, get_sgemv_fix

    lapack_opt = get_info('lapack_opt')

    if not lapack_opt:
        raise NotFoundError('no lapack/blas resources found')

    config = Configuration('arpack', parent_package, top_path)

    arpack_sources = [join('ARPACK','SRC', '*.f')]
    arpack_sources.extend([join('ARPACK','UTIL', '*.f')])

    arpack_sources += get_g77_abi_wrappers(lapack_opt)

    config.add_library('arpack_scipy', sources=arpack_sources,
                       include_dirs=[join('ARPACK', 'SRC')])

    ext_sources = ['arpack.pyf.src']
    ext_sources += get_sgemv_fix(lapack_opt)
    config.add_extension('_arpack',
                         sources=ext_sources,
                         libraries=['arpack_scipy'],
                         extra_info=lapack_opt,
                         depends=arpack_sources,
                         )

    config.add_data_dir('tests')

    # Add license files
    config.add_data_files('ARPACK/COPYING')

    return config
Beispiel #28
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info

    blas_info = get_info('blas_opt', 0)
    if (not blas_info) or (
        ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', [])):
        cblas_libs = ['cblas']
        blas_info.pop('libraries', None)
    else:
        cblas_libs = blas_info.pop('libraries', [])

    config = Configuration('cluster', parent_package, top_path)
    config.add_extension(
        '_inertia',
        sources=['_inertia.c'],
        include_dirs=[numpy.get_include()],
    )
    config.add_extension(
        '_k_means',
        libraries=cblas_libs,
        sources=['_k_means.c'],
        include_dirs=[join('..', 'src', 'cblas'),
                      numpy.get_include(),
                      blas_info.pop('include_dirs', [])],
        extra_compile_args=blas_info.pop('extra_compile_args', []),
        **blas_info
    )
    return config
Beispiel #29
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info
    config = Configuration('pycsdp', parent_package, top_path)
    ##########################################################################
    # Adjust the libname (so libsdp.so/a <-> sdp), the library path and 
    # provide the location of the "declarations.h" file.
    ##########################################################################
    libname = 'sdp'
    library_dir = [SAGE_LIB]
    includes = [SAGE_INCLUDE, SAGE_INCLUDE+'/csdp', '.']

    sources = [join('Src/','*.cxx'),
               join('Src/','*.c'),
               '_csdp.cpp']
    lapack_opt = get_info('lapack_opt')

    config.add_extension('_csdp',
                         sources = sources,
                         include_dirs = includes,
                         library_dirs = library_dir,
                         extra_compile_args = ['-funroll-loops'],
                         define_macros = [('NOSHORTS',None)],
                         libraries = [libname],
                         extra_info = lapack_opt)
    return config
Beispiel #30
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration, get_mathlibs
    config = Configuration('random',parent_package,top_path)

    def generate_libraries(ext, build_dir):
        config_cmd = config.get_config_cmd()
        libs = get_mathlibs()
        tc = testcode_wincrypt()
        if config_cmd.try_run(tc):
            libs.append('Advapi32')
        ext.libraries.extend(libs)
        return None

    # Configure mtrand
    config.add_extension('mtrand',
                         sources=[join('mtrand', x) for x in
                                  ['mtrand.c', 'randomkit.c', 'initarray.c',
                                   'distributions.c']
                                  ] + [generate_libraries],
                         depends = [join('mtrand','*.h'),
                                    join('mtrand','*.pyx'),
                                    join('mtrand','*.pxi'),
                                    ],
                         **get_info('ndarray'))

    config.add_data_files(('.', join('mtrand', 'randomkit.h')))
    config.add_data_dir('tests')

    return config
Beispiel #31
0
# Taken from https://gist.github.com/alimuldal/eb0f4eea8af331b2a890

import numpy
from numpy.distutils.system_info import get_info
import sys
import timeit

print("version: %s" % numpy.__version__)
print("maxint:  %i\n" % sys.maxint)

info = get_info('blas_opt')
print('BLAS info:')
for kk, vv in info.iteritems():
    print(' * ' + kk + ' ' + str(vv))

setup = "import numpy; x = numpy.random.random((1000, 1000))"
count = 10

t = timeit.Timer("numpy.dot(x, x.T)", setup=setup)
print("\ndot: %f sec" % (t.timeit(count) / count))
Beispiel #32
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, dot_join
    from numpy.distutils.system_info import get_info, default_lib_dirs

    config = Configuration('core', parent_package, top_path)
    local_dir = config.local_path
    codegen_dir = join(local_dir, 'code_generators')

    if is_released(config):
        warnings.simplefilter('error', MismatchCAPIWarning)

    # Check whether we have a mismatch between the set C API VERSION and the
    # actual C API VERSION
    check_api_version(C_API_VERSION, codegen_dir)

    generate_umath_py = join(codegen_dir, 'generate_umath.py')
    n = dot_join(config.name, 'generate_umath')
    generate_umath = imp.load_module('_'.join(n.split('.')),
                                     open(generate_umath_py, 'U'),
                                     generate_umath_py, ('.py', 'U', 1))

    header_dir = 'include/numpy'  # this is relative to config.path_in_package

    cocache = CallOnceOnly()

    def generate_config_h(ext, build_dir):
        target = join(build_dir, header_dir, 'config.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)

        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir)

            # Check math library and C99 math funcs availability
            mathlibs = check_mathlib(config_cmd)
            moredefs.append(('MATHLIB', ','.join(mathlibs)))

            check_math_capabilities(config_cmd, moredefs, mathlibs)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[0])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0])

            # Signal check
            if is_npy_no_signal():
                moredefs.append('__NPY_PRIVATE_NO_SIGNAL')

            # Windows checks
            if sys.platform == 'win32' or os.name == 'nt':
                win32_checks(moredefs)

            # Inline check
            inline = config_cmd.check_inline()

            # Check whether we need our own wide character support
            if not config_cmd.check_decl('Py_UNICODE_WIDE',
                                         headers=['Python.h']):
                PYTHON_HAS_UNICODE_WIDE = True
            else:
                PYTHON_HAS_UNICODE_WIDE = False

            if ENABLE_SEPARATE_COMPILATION:
                moredefs.append(('ENABLE_SEPARATE_COMPILATION', 1))

            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))

            # Get long double representation
            if sys.platform != 'darwin':
                rep = check_long_double_representation(config_cmd)
                if rep in [
                        'INTEL_EXTENDED_12_BYTES_LE',
                        'INTEL_EXTENDED_16_BYTES_LE',
                        'MOTOROLA_EXTENDED_12_BYTES_BE', 'IEEE_QUAD_LE',
                        'IEEE_QUAD_BE', 'IEEE_DOUBLE_LE', 'IEEE_DOUBLE_BE',
                        'DOUBLE_DOUBLE_BE', 'DOUBLE_DOUBLE_LE'
                ]:
                    moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1))
                else:
                    raise ValueError("Unrecognized long double format: %s" %
                                     rep)

            # Py3K check
            if sys.version_info[0] == 3:
                moredefs.append(('NPY_PY3K', 1))

            # Generate the config.h file from moredefs
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # define inline to our keyword, or nothing
            target_f.write('#ifndef __cplusplus\n')
            if inline == 'inline':
                target_f.write('/* #undef inline */\n')
            else:
                target_f.write('#define inline %s\n' % inline)
            target_f.write('#endif\n')

            # add the guard to make sure config.h is never included directly,
            # but always through npy_config.h
            target_f.write("""
#ifndef _NPY_NPY_CONFIG_H_
#error config.h should never be included directly, include npy_config.h instead
#endif
""")

            target_f.close()
            print('File:', target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        else:
            mathlibs = []
            target_f = open(target)
            for line in target_f:
                s = '#define MATHLIB'
                if line.startswith(s):
                    value = line[len(s):].strip()
                    if value:
                        mathlibs.extend(value.split(','))
            target_f.close()

        # Ugly: this can be called within a library and not an extension,
        # in which case there is no libraries attributes (and none is
        # needed).
        if hasattr(ext, 'libraries'):
            ext.libraries.extend(mathlibs)

        incl_dir = os.path.dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)

        return target

    def generate_numpyconfig_h(ext, build_dir):
        """Depends on config.h: generate_config_h has to be called before !"""
        # put private include directory in build_dir on search path
        # allows using code generation in headers headers
        config.add_include_dirs(join(build_dir, "src", "private"))

        target = join(build_dir, header_dir, '_numpyconfig.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)
        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir)

            if is_npy_no_signal():
                moredefs.append(('NPY_NO_SIGNAL', 1))

            if is_npy_no_smp():
                moredefs.append(('NPY_NO_SMP', 1))
            else:
                moredefs.append(('NPY_NO_SMP', 0))

            mathlibs = check_mathlib(config_cmd)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[1])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1])

            if ENABLE_SEPARATE_COMPILATION:
                moredefs.append(('NPY_ENABLE_SEPARATE_COMPILATION', 1))

            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))

            # Check wether we can use inttypes (C99) formats
            if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']):
                moredefs.append(('NPY_USE_C99_FORMATS', 1))

            # visibility check
            hidden_visibility = visibility_define(config_cmd)
            moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility))

            # Add the C API/ABI versions
            moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION))
            moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION))

            # Add moredefs to header
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # Define __STDC_FORMAT_MACROS
            target_f.write("""
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS 1
#endif
""")
            target_f.close()

            # Dump the numpyconfig.h header to stdout
            print('File: %s' % target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        config.add_data_files((header_dir, target))
        return target

    def generate_api_func(module_name):
        def generate_api(ext, build_dir):
            script = join(codegen_dir, module_name + '.py')
            sys.path.insert(0, codegen_dir)
            try:
                m = __import__(module_name)
                log.info('executing %s', script)
                h_file, c_file, doc_file = m.generate_api(
                    os.path.join(build_dir, header_dir))
            finally:
                del sys.path[0]
            config.add_data_files((header_dir, h_file), (header_dir, doc_file))
            return (h_file, )

        return generate_api

    generate_numpy_api = generate_api_func('generate_numpy_api')
    generate_ufunc_api = generate_api_func('generate_ufunc_api')

    config.add_include_dirs(join(local_dir, "src", "private"))
    config.add_include_dirs(join(local_dir, "src"))
    config.add_include_dirs(join(local_dir))

    config.add_data_files('include/numpy/*.h')
    config.add_include_dirs(join('src', 'npymath'))
    config.add_include_dirs(join('src', 'multiarray'))
    config.add_include_dirs(join('src', 'umath'))
    config.add_include_dirs(join('src', 'npysort'))

    config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")])
    config.add_define_macros([("_FILE_OFFSET_BITS", "64")])
    config.add_define_macros([('_LARGEFILE_SOURCE', '1')])
    config.add_define_macros([('_LARGEFILE64_SOURCE', '1')])

    config.numpy_include_dirs.extend(config.paths('include'))

    deps = [
        join('src', 'npymath', '_signbit.c'),
        join('include', 'numpy', '*object.h'),
        'include/numpy/fenv/fenv.c',
        'include/numpy/fenv/fenv.h',
        join(codegen_dir, 'genapi.py'),
    ]

    # Don't install fenv unless we need them.
    if sys.platform == 'cygwin':
        config.add_data_dir('include/numpy/fenv')

    #######################################################################
    #                            dummy module                             #
    #######################################################################

    # npymath needs the config.h and numpyconfig.h files to be generated, but
    # build_clib cannot handle generate_config_h and generate_numpyconfig_h
    # (don't ask). Because clib are generated before extensions, we have to
    # explicitly add an extension which has generate_config_h and
    # generate_numpyconfig_h as sources *before* adding npymath.

    config.add_extension('_dummy',
                         sources=[
                             join('src', 'dummymodule.c'),
                             generate_config_h,
                             generate_numpyconfig_h,
                             generate_numpy_api,
                             generate_ufunc_api,
                         ])

    #######################################################################
    #                          npymath library                            #
    #######################################################################

    subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")])

    def get_mathlib_info(*args):
        # Another ugly hack: the mathlib info is known once build_src is run,
        # but we cannot use add_installed_pkg_config here either, so we only
        # update the substition dictionary during npymath build
        config_cmd = config.get_config_cmd()

        # Check that the toolchain works, to fail early if it doesn't
        # (avoid late errors with MATHLIB which are confusing if the
        # compiler does not work).
        st = config_cmd.try_link('int main(void) { return 0;}')
        if not st:
            raise RuntimeError(
                "Broken toolchain: cannot link a simple C program")
        mlibs = check_mathlib(config_cmd)

        posix_mlib = ' '.join(['-l%s' % l for l in mlibs])
        msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs])
        subst_dict["posix_mathlib"] = posix_mlib
        subst_dict["msvc_mathlib"] = msvc_mlib

    npymath_sources = [
        join('src', 'npymath', 'npy_math.c.src'),
        join('src', 'npymath', 'ieee754.c.src'),
        join('src', 'npymath', 'npy_math_complex.c.src'),
        join('src', 'npymath', 'halffloat.c')
    ]
    config.add_installed_library('npymath',
                                 sources=npymath_sources + [get_mathlib_info],
                                 install_dir='lib')
    config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config",
                              subst_dict)
    config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config", subst_dict)

    #######################################################################
    #                         npysort library                             #
    #######################################################################

    # This library is created for the build but it is not installed
    npysort_sources = [
        join('src', 'npysort', 'quicksort.c.src'),
        join('src', 'npysort', 'mergesort.c.src'),
        join('src', 'npysort', 'heapsort.c.src'),
        join('src', 'private', 'npy_partition.h.src'),
        join('src', 'npysort', 'selection.c.src'),
        join('src', 'private', 'npy_binsearch.h.src'),
        join('src', 'npysort', 'binsearch.c.src'),
    ]
    if '__pypy__' not in sys.builtin_module_names:
        config.add_library('npysort', sources=npysort_sources, include_dirs=[])

    #######################################################################
    #                        multiarray module                            #
    #######################################################################

    # Multiarray version: this function is needed to build foo.c from foo.c.src
    # when foo.c is included in another file and as such not in the src
    # argument of build_ext command
    def generate_multiarray_templated_sources(ext, build_dir):
        from numpy.distutils.misc_util import get_cmd

        subpath = join('src', 'multiarray')
        sources = [
            join(local_dir, subpath, 'scalartypes.c.src'),
            join(local_dir, subpath, 'arraytypes.c.src'),
            join(local_dir, subpath, 'nditer_templ.c.src'),
            join(local_dir, subpath, 'lowlevel_strided_loops.c.src'),
            join(local_dir, subpath, 'einsum.c.src')
        ]

        # numpy.distutils generate .c from .c.src in weird directories, we have
        # to add them there as they depend on the build_dir
        config.add_include_dirs(join(build_dir, subpath))
        cmd = get_cmd('build_src')
        cmd.ensure_finalized()
        cmd.template_sources(sources, ext)

    multiarray_deps = [
        join('src', 'multiarray', 'arrayobject.h'),
        join('src', 'multiarray', 'arraytypes.h'),
        join('src', 'multiarray', 'array_assign.h'),
        join('src', 'multiarray', 'buffer.h'),
        join('src', 'multiarray', 'calculation.h'),
        join('src', 'multiarray', 'common.h'),
        join('src', 'multiarray', 'convert_datatype.h'),
        join('src', 'multiarray', 'convert.h'),
        join('src', 'multiarray', 'conversion_utils.h'),
        join('src', 'multiarray', 'ctors.h'),
        join('src', 'multiarray', 'descriptor.h'),
        join('src', 'multiarray', 'getset.h'),
        join('src', 'multiarray', 'hashdescr.h'),
        join('src', 'multiarray', 'iterators.h'),
        join('src', 'multiarray', 'mapping.h'),
        join('src', 'multiarray', 'methods.h'),
        join('src', 'multiarray', 'multiarraymodule.h'),
        join('src', 'multiarray', 'nditer_impl.h'),
        join('src', 'multiarray', 'numpymemoryview.h'),
        join('src', 'multiarray', 'number.h'),
        join('src', 'multiarray', 'numpyos.h'),
        join('src', 'multiarray', 'refcount.h'),
        join('src', 'multiarray', 'scalartypes.h'),
        join('src', 'multiarray', 'sequence.h'),
        join('src', 'multiarray', 'shape.h'),
        join('src', 'multiarray', 'ucsnarrow.h'),
        join('src', 'multiarray', 'usertypes.h'),
        join('src', 'private', 'lowlevel_strided_loops.h'),
        join('include', 'numpy', 'arrayobject.h'),
        join('include', 'numpy', '_neighborhood_iterator_imp.h'),
        join('include', 'numpy', 'npy_endian.h'),
        join('include', 'numpy', 'arrayscalars.h'),
        join('include', 'numpy', 'noprefix.h'),
        join('include', 'numpy', 'npy_interrupt.h'),
        join('include', 'numpy', 'npy_3kcompat.h'),
        join('include', 'numpy', 'npy_math.h'),
        join('include', 'numpy', 'halffloat.h'),
        join('include', 'numpy', 'npy_common.h'),
        join('include', 'numpy', 'npy_os.h'),
        join('include', 'numpy', 'utils.h'),
        join('include', 'numpy', 'ndarrayobject.h'),
        join('include', 'numpy', 'npy_cpu.h'),
        join('include', 'numpy', 'numpyconfig.h'),
        join('include', 'numpy', 'ndarraytypes.h'),
        join('include', 'numpy', 'npy_1_7_deprecated_api.h'),
        join('include', 'numpy', '_numpyconfig.h.in'),
        # add library sources as distuils does not consider libraries
        # dependencies
    ] + npysort_sources + npymath_sources

    multiarray_src = [
        join('src', 'multiarray', 'alloc.c'),
        join('src', 'multiarray', 'arrayobject.c'),
        join('src', 'multiarray', 'arraytypes.c.src'),
        join('src', 'multiarray', 'array_assign.c'),
        join('src', 'multiarray', 'array_assign_scalar.c'),
        join('src', 'multiarray', 'array_assign_array.c'),
        join('src', 'multiarray', 'buffer.c'),
        join('src', 'multiarray', 'calculation.c'),
        join('src', 'multiarray', 'common.c'),
        join('src', 'multiarray', 'convert.c'),
        join('src', 'multiarray', 'convert_datatype.c'),
        join('src', 'multiarray', 'conversion_utils.c'),
        join('src', 'multiarray', 'ctors.c'),
        join('src', 'multiarray', 'datetime.c'),
        join('src', 'multiarray', 'datetime_strings.c'),
        join('src', 'multiarray', 'datetime_busday.c'),
        join('src', 'multiarray', 'datetime_busdaycal.c'),
        join('src', 'multiarray', 'descriptor.c'),
        join('src', 'multiarray', 'dtype_transfer.c'),
        join('src', 'multiarray', 'einsum.c.src'),
        join('src', 'multiarray', 'flagsobject.c'),
        join('src', 'multiarray', 'getset.c'),
        join('src', 'multiarray', 'hashdescr.c'),
        join('src', 'multiarray', 'item_selection.c'),
        join('src', 'multiarray', 'iterators.c'),
        join('src', 'multiarray', 'lowlevel_strided_loops.c.src'),
        join('src', 'multiarray', 'mapping.c'),
        join('src', 'multiarray', 'methods.c'),
        join('src', 'multiarray', 'multiarraymodule.c'),
        join('src', 'multiarray', 'nditer_templ.c.src'),
        join('src', 'multiarray', 'nditer_api.c'),
        join('src', 'multiarray', 'nditer_constr.c'),
        join('src', 'multiarray', 'nditer_pywrap.c'),
        join('src', 'multiarray', 'number.c'),
        join('src', 'multiarray', 'numpymemoryview.c'),
        join('src', 'multiarray', 'numpyos.c'),
        join('src', 'multiarray', 'refcount.c'),
        join('src', 'multiarray', 'sequence.c'),
        join('src', 'multiarray', 'shape.c'),
        join('src', 'multiarray', 'scalarapi.c'),
        join('src', 'multiarray', 'scalartypes.c.src'),
        join('src', 'multiarray', 'usertypes.c'),
        join('src', 'multiarray', 'ucsnarrow.c')
    ]

    if not ENABLE_SEPARATE_COMPILATION:
        multiarray_deps.extend(multiarray_src)
        multiarray_src = [
            join('src', 'multiarray', 'multiarraymodule_onefile.c')
        ]
        multiarray_src.append(generate_multiarray_templated_sources)

    if '__pypy__' not in sys.builtin_module_names:
        config.add_extension(
            'multiarray',
            sources=multiarray_src + [
                generate_config_h, generate_numpyconfig_h, generate_numpy_api,
                join(codegen_dir, 'generate_numpy_api.py'),
                join('*.py')
            ],
            depends=deps + multiarray_deps,
            libraries=['npymath', 'npysort'])

    #######################################################################
    #                           umath module                              #
    #######################################################################

    # umath version: this function is needed to build foo.c from foo.c.src
    # when foo.c is included in another file and as such not in the src
    # argument of build_ext command
    def generate_umath_templated_sources(ext, build_dir):
        from numpy.distutils.misc_util import get_cmd

        subpath = join('src', 'umath')
        sources = [
            join(local_dir, subpath, 'loops.h.src'),
            join(local_dir, subpath, 'loops.c.src'),
            join(local_dir, subpath, 'simd.inc.src')
        ]

        # numpy.distutils generate .c from .c.src in weird directories, we have
        # to add them there as they depend on the build_dir
        config.add_include_dirs(join(build_dir, subpath))
        cmd = get_cmd('build_src')
        cmd.ensure_finalized()
        cmd.template_sources(sources, ext)

    def generate_umath_c(ext, build_dir):
        target = join(build_dir, header_dir, '__umath_generated.c')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        script = generate_umath_py
        if newer(script, target):
            f = open(target, 'w')
            f.write(
                generate_umath.make_code(generate_umath.defdict,
                                         generate_umath.__file__))
            f.close()
        return []

    umath_src = [
        join('src', 'umath', 'umathmodule.c'),
        join('src', 'umath', 'reduction.c'),
        join('src', 'umath', 'funcs.inc.src'),
        join('src', 'umath', 'simd.inc.src'),
        join('src', 'umath', 'loops.h.src'),
        join('src', 'umath', 'loops.c.src'),
        join('src', 'umath', 'ufunc_object.c'),
        join('src', 'umath', 'ufunc_type_resolution.c')
    ]

    umath_deps = [
        generate_umath_py,
        join('src', 'multiarray', 'common.h'),
        join('src', 'umath', 'simd.inc.src'),
        join(codegen_dir, 'generate_ufunc_api.py'),
        join('src', 'private', 'ufunc_override.h')
    ] + npymath_sources

    if not ENABLE_SEPARATE_COMPILATION:
        umath_deps.extend(umath_src)
        umath_src = [join('src', 'umath', 'umathmodule_onefile.c')]
        umath_src.append(generate_umath_templated_sources)
        umath_src.append(join('src', 'umath', 'funcs.inc.src'))
        umath_src.append(join('src', 'umath', 'simd.inc.src'))

    if '__pypy__' not in sys.builtin_module_names:
        config.add_extension(
            'umath',
            sources=umath_src + [
                generate_config_h, generate_numpyconfig_h, generate_umath_c,
                generate_ufunc_api
            ],
            depends=deps + umath_deps,
            libraries=['npymath'],
        )

    #######################################################################
    #                         scalarmath module                           #
    #######################################################################

    if '__pypy__' not in sys.builtin_module_names:
        config.add_extension(
            'scalarmath',
            sources=[
                join('src', 'scalarmathmodule.c.src'),
                join('src', 'private',
                     'scalarmathmodule.h.src'), generate_config_h,
                generate_numpyconfig_h, generate_numpy_api, generate_ufunc_api
            ],
            depends=deps + npymath_sources,
            libraries=['npymath'],
        )

    #######################################################################
    #                          _dotblas module                            #
    #######################################################################

    # Configure blasdot
    blas_info = get_info('blas_opt', 0)

    #blas_info = {}
    def get_dotblas_sources(ext, build_dir):
        if blas_info:
            if ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', []):
                return None  # dotblas needs ATLAS, Fortran compiled blas will not be sufficient.
            return ext.depends[:1]
        return None  # no extension module will be built

    if '__pypy__' not in sys.builtin_module_names:
        config.add_extension('_dotblas',
                             sources=[get_dotblas_sources],
                             depends=[
                                 join('blasdot', '_dotblas.c'),
                                 join('blasdot', 'cblas.h'),
                             ],
                             include_dirs=['blasdot'],
                             extra_info=blas_info)

    #######################################################################
    #                        umath_tests module                           #
    #######################################################################

    if '__pypy__' not in sys.builtin_module_names:
        config.add_extension(
            'umath_tests', sources=[join('src', 'umath', 'umath_tests.c.src')])

    #######################################################################
    #                   custom rational dtype module                      #
    #######################################################################

    if '__pypy__' not in sys.builtin_module_names:
        config.add_extension(
            'test_rational',
            sources=[join('src', 'umath', 'test_rational.c.src')])

    #######################################################################
    #                        struct_ufunc_test module                     #
    #######################################################################

    if '__pypy__' not in sys.builtin_module_names:
        config.add_extension(
            'struct_ufunc_test',
            sources=[join('src', 'umath', 'struct_ufunc_test.c.src')])

    #######################################################################
    #                     multiarray_tests module                         #
    #######################################################################

    if '__pypy__' not in sys.builtin_module_names:
        config.add_extension(
            'multiarray_tests',
            sources=[join('src', 'multiarray', 'multiarray_tests.c.src')])

    #######################################################################
    #                        operand_flag_tests module                    #
    #######################################################################

    if '__pypy__' not in sys.builtin_module_names:
        config.add_extension(
            'operand_flag_tests',
            sources=[join('src', 'umath', 'operand_flag_tests.c.src')])

    config.add_data_dir('tests')
    config.add_data_dir('tests/data')

    config.make_svn_version_py()

    return config
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, dot_join
    from numpy.distutils.system_info import get_info

    config = Configuration('core', parent_package, top_path)
    local_dir = config.local_path
    codegen_dir = join(local_dir, 'code_generators')

    if is_released(config):
        warnings.simplefilter('error', MismatchCAPIWarning)

    # Check whether we have a mismatch between the set C API VERSION and the
    # actual C API VERSION
    check_api_version(C_API_VERSION, codegen_dir)

    generate_umath_py = join(codegen_dir, 'generate_umath.py')
    n = dot_join(config.name, 'generate_umath')
    generate_umath = npy_load_module('_'.join(n.split('.')), generate_umath_py,
                                     ('.py', 'U', 1))

    header_dir = 'include/numpy'  # this is relative to config.path_in_package

    cocache = CallOnceOnly()

    def generate_config_h(ext, build_dir):
        target = join(build_dir, header_dir, 'config.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)

        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir)

            # Check math library and C99 math funcs availability
            mathlibs = check_mathlib(config_cmd)
            moredefs.append(('MATHLIB', ','.join(mathlibs)))

            check_math_capabilities(config_cmd, moredefs, mathlibs)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[0])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0])

            # Signal check
            if is_npy_no_signal():
                moredefs.append('__NPY_PRIVATE_NO_SIGNAL')

            # Windows checks
            if sys.platform == 'win32' or os.name == 'nt':
                win32_checks(moredefs)

            # C99 restrict keyword
            moredefs.append(('NPY_RESTRICT', config_cmd.check_restrict()))

            # Inline check
            inline = config_cmd.check_inline()

            # Use relaxed stride checking
            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))

            # Use bogus stride debug aid when relaxed strides are enabled
            if NPY_RELAXED_STRIDES_DEBUG:
                moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1))

            # Get long double representation
            rep = check_long_double_representation(config_cmd)
            moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1))

            # Py3K check
            if sys.version_info[0] == 3:
                moredefs.append(('NPY_PY3K', 1))

            # Generate the config.h file from moredefs
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # define inline to our keyword, or nothing
            target_f.write('#ifndef __cplusplus\n')
            if inline == 'inline':
                target_f.write('/* #undef inline */\n')
            else:
                target_f.write('#define inline %s\n' % inline)
            target_f.write('#endif\n')

            # add the guard to make sure config.h is never included directly,
            # but always through npy_config.h
            target_f.write("""
#ifndef _NPY_NPY_CONFIG_H_
#error config.h should never be included directly, include npy_config.h instead
#endif
""")

            target_f.close()
            print('File:', target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        else:
            mathlibs = []
            target_f = open(target)
            for line in target_f:
                s = '#define MATHLIB'
                if line.startswith(s):
                    value = line[len(s):].strip()
                    if value:
                        mathlibs.extend(value.split(','))
            target_f.close()

        # Ugly: this can be called within a library and not an extension,
        # in which case there is no libraries attributes (and none is
        # needed).
        if hasattr(ext, 'libraries'):
            ext.libraries.extend(mathlibs)

        incl_dir = os.path.dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)

        return target

    def generate_numpyconfig_h(ext, build_dir):
        """Depends on config.h: generate_config_h has to be called before !"""
        # put common include directory in build_dir on search path
        # allows using code generation in headers headers
        config.add_include_dirs(join(build_dir, "src", "common"))
        config.add_include_dirs(join(build_dir, "src", "npymath"))

        target = join(build_dir, header_dir, '_numpyconfig.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)
        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir)

            if is_npy_no_signal():
                moredefs.append(('NPY_NO_SIGNAL', 1))

            if is_npy_no_smp():
                moredefs.append(('NPY_NO_SMP', 1))
            else:
                moredefs.append(('NPY_NO_SMP', 0))

            mathlibs = check_mathlib(config_cmd)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[1])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1])

            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))

            if NPY_RELAXED_STRIDES_DEBUG:
                moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1))

            # Check whether we can use inttypes (C99) formats
            if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']):
                moredefs.append(('NPY_USE_C99_FORMATS', 1))

            # visibility check
            hidden_visibility = visibility_define(config_cmd)
            moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility))

            # Add the C API/ABI versions
            moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION))
            moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION))

            # Add moredefs to header
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # Define __STDC_FORMAT_MACROS
            target_f.write("""
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS 1
#endif
""")
            target_f.close()

            # Dump the numpyconfig.h header to stdout
            print('File: %s' % target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        config.add_data_files((header_dir, target))
        return target

    def generate_api_func(module_name):
        def generate_api(ext, build_dir):
            script = join(codegen_dir, module_name + '.py')
            sys.path.insert(0, codegen_dir)
            try:
                m = __import__(module_name)
                log.info('executing %s', script)
                h_file, c_file, doc_file = m.generate_api(
                    os.path.join(build_dir, header_dir))
            finally:
                del sys.path[0]
            config.add_data_files((header_dir, h_file), (header_dir, doc_file))
            return (h_file, )

        return generate_api

    generate_numpy_api = generate_api_func('generate_numpy_api')
    generate_ufunc_api = generate_api_func('generate_ufunc_api')

    config.add_include_dirs(join(local_dir, "src", "common"))
    config.add_include_dirs(join(local_dir, "src"))
    config.add_include_dirs(join(local_dir))

    config.add_data_files('include/numpy/*.h')
    config.add_include_dirs(join('src', 'npymath'))
    config.add_include_dirs(join('src', 'multiarray'))
    config.add_include_dirs(join('src', 'umath'))
    config.add_include_dirs(join('src', 'npysort'))

    config.add_define_macros([
        ("NPY_INTERNAL_BUILD", "1")
    ])  # this macro indicates that Numpy build is in process
    config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")])
    if sys.platform[:3] == "aix":
        config.add_define_macros([("_LARGE_FILES", None)])
    else:
        config.add_define_macros([("_FILE_OFFSET_BITS", "64")])
        config.add_define_macros([('_LARGEFILE_SOURCE', '1')])
        config.add_define_macros([('_LARGEFILE64_SOURCE', '1')])

    config.numpy_include_dirs.extend(config.paths('include'))

    deps = [
        join('src', 'npymath', '_signbit.c'),
        join('include', 'numpy', '*object.h'),
        join(codegen_dir, 'genapi.py'),
    ]

    #######################################################################
    #                            dummy module                             #
    #######################################################################

    # npymath needs the config.h and numpyconfig.h files to be generated, but
    # build_clib cannot handle generate_config_h and generate_numpyconfig_h
    # (don't ask). Because clib are generated before extensions, we have to
    # explicitly add an extension which has generate_config_h and
    # generate_numpyconfig_h as sources *before* adding npymath.

    config.add_extension('_dummy',
                         sources=[
                             join('src', 'dummymodule.c'), generate_config_h,
                             generate_numpyconfig_h, generate_numpy_api
                         ])

    #######################################################################
    #                          npymath library                            #
    #######################################################################

    subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")])

    def get_mathlib_info(*args):
        # Another ugly hack: the mathlib info is known once build_src is run,
        # but we cannot use add_installed_pkg_config here either, so we only
        # update the substitution dictionary during npymath build
        config_cmd = config.get_config_cmd()

        # Check that the toolchain works, to fail early if it doesn't
        # (avoid late errors with MATHLIB which are confusing if the
        # compiler does not work).
        st = config_cmd.try_link('int main(void) { return 0;}')
        if not st:
            raise RuntimeError(
                "Broken toolchain: cannot link a simple C program")
        mlibs = check_mathlib(config_cmd)

        posix_mlib = ' '.join(['-l%s' % l for l in mlibs])
        msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs])
        subst_dict["posix_mathlib"] = posix_mlib
        subst_dict["msvc_mathlib"] = msvc_mlib

    npymath_sources = [
        join('src', 'npymath', 'npy_math_internal.h.src'),
        join('src', 'npymath', 'npy_math.c'),
        join('src', 'npymath', 'ieee754.c.src'),
        join('src', 'npymath', 'npy_math_complex.c.src'),
        join('src', 'npymath', 'halffloat.c')
    ]

    # Must be true for CRT compilers but not MinGW/cygwin. See gh-9977.
    # Intel and Clang also don't seem happy with /GL
    is_msvc = (platform.platform().startswith('Windows')
               and platform.python_compiler().startswith('MS'))
    config.add_installed_library(
        'npymath',
        sources=npymath_sources + [get_mathlib_info],
        install_dir='lib',
        build_info={
            'include_dirs':
            [],  # empty list required for creating npy_math_internal.h
            'extra_compiler_args': (['/GL-'] if is_msvc else []),
        })
    config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config",
                              subst_dict)
    config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config", subst_dict)

    #######################################################################
    #                         npysort library                             #
    #######################################################################

    # This library is created for the build but it is not installed
    npysort_sources = [
        join('src', 'common', 'npy_sort.h.src'),
        join('src', 'npysort', 'quicksort.c.src'),
        join('src', 'npysort', 'mergesort.c.src'),
        join('src', 'npysort', 'heapsort.c.src'),
        join('src', 'common', 'npy_partition.h.src'),
        join('src', 'npysort', 'selection.c.src'),
        join('src', 'common', 'npy_binsearch.h.src'),
        join('src', 'npysort', 'binsearch.c.src'),
    ]
    config.add_library('npysort', sources=npysort_sources, include_dirs=[])

    #######################################################################
    #                     multiarray_tests module                         #
    #######################################################################

    config.add_extension('_multiarray_tests',
                         sources=[
                             join('src', 'multiarray',
                                  '_multiarray_tests.c.src'),
                             join('src', 'common', 'mem_overlap.c')
                         ],
                         depends=[
                             join('src', 'common', 'mem_overlap.h'),
                             join('src', 'common', 'npy_extint128.h')
                         ],
                         libraries=['npymath'])

    #######################################################################
    #             _multiarray_umath module - common part                  #
    #######################################################################

    common_deps = [
        join('src', 'common', 'array_assign.h'),
        join('src', 'common', 'binop_override.h'),
        join('src', 'common', 'cblasfuncs.h'),
        join('src', 'common', 'lowlevel_strided_loops.h'),
        join('src', 'common', 'mem_overlap.h'),
        join('src', 'common', 'npy_cblas.h'),
        join('src', 'common', 'npy_config.h'),
        join('src', 'common', 'npy_ctypes.h'),
        join('src', 'common', 'npy_extint128.h'),
        join('src', 'common', 'npy_import.h'),
        join('src', 'common', 'npy_longdouble.h'),
        join('src', 'common', 'templ_common.h.src'),
        join('src', 'common', 'ucsnarrow.h'),
        join('src', 'common', 'ufunc_override.h'),
        join('src', 'common', 'umathmodule.h'),
        join('src', 'common', 'numpyos.h'),
    ]

    common_src = [
        join('src', 'common', 'array_assign.c'),
        join('src', 'common', 'mem_overlap.c'),
        join('src', 'common', 'npy_longdouble.c'),
        join('src', 'common', 'templ_common.h.src'),
        join('src', 'common', 'ucsnarrow.c'),
        join('src', 'common', 'ufunc_override.c'),
        join('src', 'common', 'numpyos.c'),
    ]

    blas_info = get_info('blas_opt', 0)
    if blas_info and ('HAVE_CBLAS', None) in blas_info.get(
            'define_macros', []):
        extra_info = blas_info
        # These files are also in MANIFEST.in so that they are always in
        # the source distribution independently of HAVE_CBLAS.
        common_src.extend([
            join('src', 'common', 'cblasfuncs.c'),
            join('src', 'common', 'python_xerbla.c'),
        ])
        if uses_accelerate_framework(blas_info):
            common_src.extend(get_sgemv_fix())
    else:
        extra_info = {}

    #######################################################################
    #             _multiarray_umath module - multiarray part              #
    #######################################################################

    multiarray_deps = [
        join('src', 'multiarray', 'arrayobject.h'),
        join('src', 'multiarray', 'arraytypes.h'),
        join('src', 'multiarray', 'arrayfunction_override.h'),
        join('src', 'multiarray', 'buffer.h'),
        join('src', 'multiarray', 'calculation.h'),
        join('src', 'multiarray', 'common.h'),
        join('src', 'multiarray', 'convert_datatype.h'),
        join('src', 'multiarray', 'convert.h'),
        join('src', 'multiarray', 'conversion_utils.h'),
        join('src', 'multiarray', 'ctors.h'),
        join('src', 'multiarray', 'descriptor.h'),
        join('src', 'multiarray', 'dragon4.h'),
        join('src', 'multiarray', 'getset.h'),
        join('src', 'multiarray', 'hashdescr.h'),
        join('src', 'multiarray', 'iterators.h'),
        join('src', 'multiarray', 'mapping.h'),
        join('src', 'multiarray', 'methods.h'),
        join('src', 'multiarray', 'multiarraymodule.h'),
        join('src', 'multiarray', 'nditer_impl.h'),
        join('src', 'multiarray', 'number.h'),
        join('src', 'multiarray', 'refcount.h'),
        join('src', 'multiarray', 'scalartypes.h'),
        join('src', 'multiarray', 'sequence.h'),
        join('src', 'multiarray', 'shape.h'),
        join('src', 'multiarray', 'strfuncs.h'),
        join('src', 'multiarray', 'typeinfo.h'),
        join('src', 'multiarray', 'usertypes.h'),
        join('src', 'multiarray', 'vdot.h'),
        join('include', 'numpy', 'arrayobject.h'),
        join('include', 'numpy', '_neighborhood_iterator_imp.h'),
        join('include', 'numpy', 'npy_endian.h'),
        join('include', 'numpy', 'arrayscalars.h'),
        join('include', 'numpy', 'noprefix.h'),
        join('include', 'numpy', 'npy_interrupt.h'),
        join('include', 'numpy', 'npy_3kcompat.h'),
        join('include', 'numpy', 'npy_math.h'),
        join('include', 'numpy', 'halffloat.h'),
        join('include', 'numpy', 'npy_common.h'),
        join('include', 'numpy', 'npy_os.h'),
        join('include', 'numpy', 'utils.h'),
        join('include', 'numpy', 'ndarrayobject.h'),
        join('include', 'numpy', 'npy_cpu.h'),
        join('include', 'numpy', 'numpyconfig.h'),
        join('include', 'numpy', 'ndarraytypes.h'),
        join('include', 'numpy', 'npy_1_7_deprecated_api.h'),
        # add library sources as distuils does not consider libraries
        # dependencies
    ] + npysort_sources + npymath_sources

    multiarray_src = [
        join('src', 'multiarray', 'alloc.c'),
        join('src', 'multiarray', 'arrayobject.c'),
        join('src', 'multiarray', 'arraytypes.c.src'),
        join('src', 'multiarray', 'array_assign_scalar.c'),
        join('src', 'multiarray', 'array_assign_array.c'),
        join('src', 'multiarray', 'arrayfunction_override.c'),
        join('src', 'multiarray', 'buffer.c'),
        join('src', 'multiarray', 'calculation.c'),
        join('src', 'multiarray', 'compiled_base.c'),
        join('src', 'multiarray', 'common.c'),
        join('src', 'multiarray', 'convert.c'),
        join('src', 'multiarray', 'convert_datatype.c'),
        join('src', 'multiarray', 'conversion_utils.c'),
        join('src', 'multiarray', 'ctors.c'),
        join('src', 'multiarray', 'datetime.c'),
        join('src', 'multiarray', 'datetime_strings.c'),
        join('src', 'multiarray', 'datetime_busday.c'),
        join('src', 'multiarray', 'datetime_busdaycal.c'),
        join('src', 'multiarray', 'descriptor.c'),
        join('src', 'multiarray', 'dragon4.c'),
        join('src', 'multiarray', 'dtype_transfer.c'),
        join('src', 'multiarray', 'einsum.c.src'),
        join('src', 'multiarray', 'flagsobject.c'),
        join('src', 'multiarray', 'getset.c'),
        join('src', 'multiarray', 'hashdescr.c'),
        join('src', 'multiarray', 'item_selection.c'),
        join('src', 'multiarray', 'iterators.c'),
        join('src', 'multiarray', 'lowlevel_strided_loops.c.src'),
        join('src', 'multiarray', 'mapping.c'),
        join('src', 'multiarray', 'methods.c'),
        join('src', 'multiarray', 'multiarraymodule.c'),
        join('src', 'multiarray', 'nditer_templ.c.src'),
        join('src', 'multiarray', 'nditer_api.c'),
        join('src', 'multiarray', 'nditer_constr.c'),
        join('src', 'multiarray', 'nditer_pywrap.c'),
        join('src', 'multiarray', 'number.c'),
        join('src', 'multiarray', 'refcount.c'),
        join('src', 'multiarray', 'sequence.c'),
        join('src', 'multiarray', 'shape.c'),
        join('src', 'multiarray', 'scalarapi.c'),
        join('src', 'multiarray', 'scalartypes.c.src'),
        join('src', 'multiarray', 'strfuncs.c'),
        join('src', 'multiarray', 'temp_elide.c'),
        join('src', 'multiarray', 'typeinfo.c'),
        join('src', 'multiarray', 'usertypes.c'),
        join('src', 'multiarray', 'vdot.c'),
    ]

    #######################################################################
    #             _multiarray_umath module - umath part                   #
    #######################################################################

    def generate_umath_c(ext, build_dir):
        target = join(build_dir, header_dir, '__umath_generated.c')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        script = generate_umath_py
        if newer(script, target):
            f = open(target, 'w')
            f.write(
                generate_umath.make_code(generate_umath.defdict,
                                         generate_umath.__file__))
            f.close()
        return []

    umath_src = [
        join('src', 'umath', 'umathmodule.c'),
        join('src', 'umath', 'reduction.c'),
        join('src', 'umath', 'funcs.inc.src'),
        join('src', 'umath', 'simd.inc.src'),
        join('src', 'umath', 'loops.h.src'),
        join('src', 'umath', 'loops.c.src'),
        join('src', 'umath', 'matmul.h.src'),
        join('src', 'umath', 'matmul.c.src'),
        join('src', 'umath', 'ufunc_object.c'),
        join('src', 'umath', 'extobj.c'),
        join('src', 'umath', 'cpuid.c'),
        join('src', 'umath', 'scalarmath.c.src'),
        join('src', 'umath', 'ufunc_type_resolution.c'),
        join('src', 'umath', 'override.c'),
    ]

    umath_deps = [
        generate_umath_py,
        join('include', 'numpy', 'npy_math.h'),
        join('include', 'numpy', 'halffloat.h'),
        join('src', 'multiarray', 'common.h'),
        join('src', 'multiarray', 'number.h'),
        join('src', 'common', 'templ_common.h.src'),
        join('src', 'umath', 'simd.inc.src'),
        join('src', 'umath', 'override.h'),
        join(codegen_dir, 'generate_ufunc_api.py'),
    ]

    config.add_extension(
        '_multiarray_umath',
        sources=multiarray_src + umath_src + npymath_sources + common_src + [
            generate_config_h,
            generate_numpyconfig_h,
            generate_numpy_api,
            join(codegen_dir, 'generate_numpy_api.py'),
            join('*.py'),
            generate_umath_c,
            generate_ufunc_api,
        ],
        depends=deps + multiarray_deps + umath_deps + common_deps,
        libraries=['npymath', 'npysort'],
        extra_info=extra_info)

    #######################################################################
    #                        umath_tests module                           #
    #######################################################################

    config.add_extension('_umath_tests',
                         sources=[join('src', 'umath', '_umath_tests.c.src')])

    #######################################################################
    #                   custom rational dtype module                      #
    #######################################################################

    config.add_extension(
        '_rational_tests',
        sources=[join('src', 'umath', '_rational_tests.c.src')])

    #######################################################################
    #                        struct_ufunc_test module                     #
    #######################################################################

    config.add_extension(
        '_struct_ufunc_tests',
        sources=[join('src', 'umath', '_struct_ufunc_tests.c.src')])

    #######################################################################
    #                        operand_flag_tests module                    #
    #######################################################################

    config.add_extension(
        '_operand_flag_tests',
        sources=[join('src', 'umath', '_operand_flag_tests.c.src')])

    config.add_data_dir('tests')
    config.add_data_dir('tests/data')

    config.make_svn_version_py()

    return config
Beispiel #34
0
#from Cython.Distutils import build_ext

base_path = os.path.abspath(os.path.dirname(__file__))

lapack_opt = None

def win():
    """
    Return True if a windows system
    """
    if platform.system() in ["Windows", "win32"]:
        return True
    return False
if not win():    
    try:
        lapack_opt = get_info('lapack_opt',notfound_action=2)
    except:
        print('LAPACK not found, no sundials solvers')

if win():
    print ('In win')
    INCL_DIRS_LAPACK = ['C:/MinGW/lib/Lapack/lapack-3.4.1/SRC']
    LIB_DIRS_LAPACK = ['C:/MinGW/lib/Lapack/lib']
    LIBS_LAPACK = ['lapack', 'blas']    
    INCL_DIRS_SUNDIALS = ['C:/Program Files/sundials/include']
    LIB_DIRS_SUNDIALS  = ['C:/Program Files/sundials', 'C:/Program Files/sundials/lib']
    LIBS_SUNDIALS = ['sundials_nvecserial']
    LIBS_IDA   = ['sundials_ida']
    LIBS_CVODE = ['sundials_cvode']
    LIB_DIRS_GFORTRAN = ['C:/MinGW/lib/gcc/mingw32/4.6.2']
    LIBS_FORTRAN = ['gfortran']
Beispiel #35
0
VERSION_FILE_PATH = os.path.join(BASE_PATH, '_version.py')
C_SRC_PATH = os.path.join(BASE_PATH, 'src')
LAPACK_LITE_PATH = os.path.join(C_SRC_PATH, 'lapack_lite')

versioneer.versionfile_source = VERSION_FILE_PATH
versioneer.versionfile_build = VERSION_FILE_PATH
versioneer.tag_prefix = ''
versioneer.parentdir_prefix = 'gulinalg-'

# Use information about the LAPACK library used in NumPy.
# if not present, fallback to using the included lapack-lite

MODULE_SOURCES = [os.path.join(C_SRC_PATH, 'gulinalg.c.src')]
MODULE_DEPENDENCIES = copy.copy(MODULE_SOURCES)

lapack_info = np_sys_info.get_info('lapack_opt', 0)
lapack_lite_files = [
    os.path.join(LAPACK_LITE_PATH, f) for f in [
        'python_xerbla.c', 'zlapack_lite.c', 'dlapack_lite.c', 'blas_lite.c',
        'dlamch.c', 'f2c_lite.c', 'f2c.h'
    ]
]

if not lapack_info:
    # No LAPACK in NumPy
    print('### Warning: Using unoptimized blas/lapack @@@')
    MODULE_SOURCES.extend(lapack_lite_files[:-1])  # all but f2c.h
    MODULE_DEPENDENCIES.extend(lapack_lite_files)
else:
    if sys.platform == 'win32':
        print('### Warning: python.xerbla.c is disabled ###')
Beispiel #36
0
def make_extension(python, **extra):
    # load platform specific configuration then user configuration
    cfg = init_cfg('pythran.cfg', 'pythran-{}.cfg'.format(sys.platform),
                   '.pythranrc', extra.get('config', None))

    if 'config' in extra:
        extra.pop('config')

    def parse_define(define):
        index = define.find('=')
        if index < 0:
            return (define, None)
        else:
            return define[:index], define[index + 1:]

    extension = {
        "language":
        "c++",
        # forcing str conversion to handle Unicode case (the default on MS)
        "define_macros":
        [str(x) for x in cfg.get('compiler', 'defines').split()],
        "undef_macros":
        [str(x) for x in cfg.get('compiler', 'undefs').split()],
        "include_dirs":
        [str(x) for x in cfg.get('compiler', 'include_dirs').split()],
        "library_dirs":
        [str(x) for x in cfg.get('compiler', 'library_dirs').split()],
        "libraries": [str(x) for x in cfg.get('compiler', 'libs').split()],
        "extra_compile_args":
        [str(x) for x in cfg.get('compiler', 'cflags').split()],
        "extra_link_args":
        [str(x) for x in cfg.get('compiler', 'ldflags').split()],
        "extra_objects": []
    }

    if python:
        extension['define_macros'].append('ENABLE_PYTHON_MODULE')
    extension['define_macros'].append('__PYTHRAN__={}'.format(
        sys.version_info.major))

    here = os.path.dirname(os.path.dirname(__file__)) or '.'
    # using / as separator as advised in the distutils doc
    extension["include_dirs"].append(here + '/pythran')

    extra.pop('language', None)  # forced to c++ anyway
    cxx = extra.pop('cxx', None)
    cc = extra.pop('cc', None)

    if cxx is None:
        cxx = compiler()
    if cxx is not None:
        extension['cxx'] = cxx
        extension['cc'] = cc or cxx

    for k, w in extra.items():
        extension[k].extend(w)
    if cfg.getboolean('pythran', 'complex_hook'):
        # the patch is *not* portable
        extension["include_dirs"].append(here + '/pythran/pythonic/patch')

    # Numpy can pollute stdout with warning message which should be on stderr
    old_stdout = sys.stdout
    try:
        sys.stdout = sys.stderr

        # numpy specific
        if python:
            extension['include_dirs'].append(numpy.get_include())

        # blas dependency
        reserved_blas_entries = 'pythran-openblas', 'none'
        user_blas = cfg.get('compiler', 'blas')
        if user_blas == 'pythran-openblas':
            try:
                import pythran_openblas as openblas
                # required to cope with atlas missing extern "C"
                extension['define_macros'].append('PYTHRAN_BLAS_OPENBLAS')
                extension['include_dirs'].extend(openblas.include_dirs)
                extension['extra_objects'].append(
                    os.path.join(openblas.library_dir,
                                 openblas.static_library))
            except ImportError:
                logger.warning("Failed to find 'pythran-openblas' package. "
                               "Please install it or change the compiler.blas "
                               "setting. Defaulting to 'blas'")
                user_blas = 'blas'
        elif user_blas == 'none':
            extension['define_macros'].append('PYTHRAN_BLAS_NONE')

        if user_blas not in reserved_blas_entries:
            numpy_blas = numpy_sys.get_info(user_blas)
            # required to cope with atlas missing extern "C"
            extension['define_macros'].append('PYTHRAN_BLAS_{}'.format(
                user_blas.upper()))
            extension['libraries'].extend(numpy_blas.get('libraries', []))
            extension['library_dirs'].extend(numpy_blas.get(
                'library_dirs', []))
            extension['include_dirs'].extend(numpy_blas.get(
                'include_dirs', []))
    finally:
        sys.stdout = old_stdout

    # final macro normalization
    extension["define_macros"] = [
        dm if isinstance(dm, tuple) else parse_define(dm)
        for dm in extension["define_macros"]
    ]
    return extension
Beispiel #37
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info

    config = Configuration('lapack', parent_package, top_path)

    lapack_opt = get_info('lapack_opt', notfound_action=2)

    atlas_version = ([v[3:-3] for k,v in lapack_opt.get('define_macros',[]) \
                      if k=='ATLAS_INFO']+[None])[0]
    if atlas_version:
        print('ATLAS version: %s' % atlas_version)

    target_dir = ''
    skip_names = {'clapack': [], 'flapack': []}
    if skip_single_routines:
        target_dir = 'dbl'
        skip_names['clapack'].extend(\
            'sgesv cgesv sgetrf cgetrf sgetrs cgetrs sgetri cgetri'\
            ' sposv cposv spotrf cpotrf spotrs cpotrs spotri cpotri'\
            ' slauum clauum strtri ctrtri'.split())
        skip_names['flapack'].extend(skip_names['clapack'])
        skip_names['flapack'].extend(\
            'sgesdd cgesdd sgelss cgelss sgeqrf cgeqrf sgeev cgeev'\
            ' sgegv cgegv ssyev cheev slaswp claswp sgees cgees'
            ' sggev cggev'.split())

    if atlas_version == '3.2.1_pre3.3.6':
        target_dir = os.path.join(target_dir, 'atlas321')
        skip_names['clapack'].extend(\
            'sgetri dgetri cgetri zgetri spotri dpotri cpotri zpotri'\
            ' slauum dlauum clauum zlauum strtri dtrtri ctrtri ztrtri'.split())
    elif atlas_version and atlas_version > '3.4.0' and atlas_version <= '3.5.12':
        skip_names['clapack'].extend('cpotrf zpotrf'.split())

    # flapack:
    config.add_extension('flapack',
                         sources=['flapack.pyf.src'],
                         depends=[__file__, 'flapack_*.pyf.src'],
                         f2py_options=['skip:'] + skip_names['flapack'] +
                         [':'],
                         extra_info=lapack_opt)

    # clapack:
    def get_clapack_source(ext, build_dir):
        name = ext.name.split('.')[-1]
        assert name == 'clapack', repr(name)
        if atlas_version is None:
            target = os.path.join(build_dir, target_dir, 'clapack.pyf')
            from distutils.dep_util import newer
            if newer(__file__, target):
                f = open(target, 'w')
                f.write(tmpl_empty_clapack_pyf)
                f.close()
        else:
            target = ext.depends[0]
            assert os.path.basename(target) == 'clapack.pyf.src'
        return target

    config.add_extension('clapack',
                         sources=[get_clapack_source],
                         depends=['clapack.pyf.src'],
                         f2py_options=['skip:'] + skip_names['clapack'] +
                         [':'],
                         extra_info=lapack_opt)

    # calc_lwork:
    config.add_extension('calc_lwork',
                         sources=['calc_lwork.f'],
                         extra_info=lapack_opt)

    # atlas_version:
    if os.name == 'nt' and 'FPATH' in os.environ:
        define_macros = [('NO_ATLAS_INFO', 1)]
    else:
        define_macros = []

    config.add_extension('atlas_version',
                         sources=['atlas_version.c'],
                         extra_info=lapack_opt,
                         define_macros=define_macros)

    config.add_data_dir('tests')

    return config
Beispiel #38
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info
    config = Configuration('integrate', parent_package, top_path)

    blas_opt = get_info('blas_opt', notfound_action=2)

    linpack_lite_src = [join('linpack_lite', '*.f')]
    mach_src = [join('mach', '*.f')]
    quadpack_src = [join('quadpack', '*.f')]
    odepack_src = [join('odepack', '*.f')]
    dop_src = [join('dop', '*.f')]
    quadpack_test_src = [join('tests', '_test_multivariate.c')]

    config.add_library('linpack_lite', sources=linpack_lite_src)
    config.add_library('mach',
                       sources=mach_src,
                       config_fc={'noopt': (__file__, 1)})
    config.add_library('quadpack', sources=quadpack_src)
    config.add_library('odepack', sources=odepack_src)
    config.add_library('dop', sources=dop_src)
    # should we try to weed through files and replace with calls to
    # LAPACK routines?
    # Yes, someday...

    # Extensions
    # quadpack:

    config.add_extension('_quadpack',
                         sources=['_quadpackmodule.c'],
                         libraries=['quadpack', 'linpack_lite', 'mach'],
                         depends=(['quadpack.h', '__quadpack.h'] +
                                  quadpack_src + linpack_lite_src + mach_src))
    # odepack
    libs = ['odepack', 'linpack_lite', 'mach']

    # Remove libraries key from blas_opt
    if 'libraries' in blas_opt:  # key doesn't exist on OS X ...
        libs.extend(blas_opt['libraries'])
    newblas = {}
    for key in blas_opt:
        if key == 'libraries':
            continue
        newblas[key] = blas_opt[key]
    config.add_extension('_odepack',
                         sources=['_odepackmodule.c'],
                         libraries=libs,
                         depends=(['__odepack.h', 'multipack.h'] +
                                  odepack_src + linpack_lite_src + mach_src),
                         **newblas)

    # vode
    config.add_extension('vode',
                         sources=['vode.pyf'],
                         libraries=libs,
                         depends=(odepack_src + linpack_lite_src + mach_src),
                         **newblas)

    # lsoda
    config.add_extension('lsoda',
                         sources=['lsoda.pyf'],
                         libraries=libs,
                         depends=(odepack_src + linpack_lite_src + mach_src),
                         **newblas)

    # dop
    config.add_extension('_dop',
                         sources=['dop.pyf'],
                         libraries=['dop'],
                         depends=dop_src)

    config.add_extension('_test_multivariate', sources=quadpack_test_src)
    config.add_data_dir('tests')
    return config
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info
    config = Configuration('integrate', parent_package, top_path)

    # Get a local copy of lapack_opt_info
    lapack_opt = dict(get_info('lapack_opt', notfound_action=2))
    # Pop off the libraries list so it can be combined with
    # additional required libraries
    lapack_libs = lapack_opt.pop('libraries', [])

    mach_src = [join('mach', '*.f')]
    quadpack_src = [join('quadpack', '*.f')]
    lsoda_src = [
        join('odepack', fn) for fn in [
            'blkdta000.f', 'bnorm.f', 'cfode.f', 'ewset.f', 'fnorm.f',
            'intdy.f', 'lsoda.f', 'prja.f', 'solsy.f', 'srcma.f', 'stoda.f',
            'vmnorm.f', 'xerrwv.f', 'xsetf.f', 'xsetun.f'
        ]
    ]
    vode_src = [join('odepack', 'vode.f'), join('odepack', 'zvode.f')]
    dop_src = [join('dop', '*.f')]
    quadpack_test_src = [join('tests', '_test_multivariate.c')]
    odeint_banded_test_src = [join('tests', 'banded5x5.f')]

    config.add_library('mach',
                       sources=mach_src,
                       config_fc={'noopt': (__file__, 1)})
    config.add_library('quadpack', sources=quadpack_src)
    config.add_library('lsoda', sources=lsoda_src)
    config.add_library('vode', sources=vode_src)
    config.add_library('dop', sources=dop_src)

    # Extensions
    # quadpack:
    include_dirs = [join(os.path.dirname(__file__), '..', '_lib', 'src')]
    if 'include_dirs' in lapack_opt:
        lapack_opt = dict(lapack_opt)
        include_dirs.extend(lapack_opt.pop('include_dirs'))

    config.add_extension('_quadpack',
                         sources=['_quadpackmodule.c'],
                         libraries=['quadpack', 'mach'] + lapack_libs,
                         depends=(['__quadpack.h'] + quadpack_src + mach_src),
                         include_dirs=include_dirs,
                         **lapack_opt)

    # odepack/lsoda-odeint
    odepack_opts = lapack_opt.copy()
    odepack_opts.update(numpy_nodepr_api)
    config.add_extension('_odepack',
                         sources=['_odepackmodule.c'],
                         libraries=['lsoda', 'mach'] + lapack_libs,
                         depends=(lsoda_src + mach_src),
                         **odepack_opts)

    # vode
    config.add_extension('vode',
                         sources=['vode.pyf'],
                         libraries=['vode'] + lapack_libs,
                         depends=vode_src,
                         **lapack_opt)

    # lsoda
    config.add_extension('lsoda',
                         sources=['lsoda.pyf'],
                         libraries=['lsoda', 'mach'] + lapack_libs,
                         depends=(lsoda_src + mach_src),
                         **lapack_opt)

    # dop
    config.add_extension('_dop',
                         sources=['dop.pyf'],
                         libraries=['dop'],
                         depends=dop_src)

    config.add_extension('_test_multivariate', sources=quadpack_test_src)

    # Fortran+f2py extension module for testing odeint.
    config.add_extension('_test_odeint_banded',
                         sources=odeint_banded_test_src,
                         libraries=['lsoda', 'mach'] + lapack_libs,
                         depends=(lsoda_src + mach_src),
                         **lapack_opt)

    config.add_subpackage('_ivp')

    config.add_data_dir('tests')
    return config
Beispiel #40
0
def find_version(*paths):
    fname = os.path.join(os.path.dirname(__file__), *paths)
    with open(fname, encoding='utf-8') as fp:
        code = fp.read()
    match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", code, re.M)
    if match:
        return match.group(1)
    raise RuntimeError("Unable to find version string.")


ext = [
    Extension(name='telewavesim.rmat_f',
              sources=['src/rmat.f90', 'src/rmat_sub.f90'],
              libraries=['lapack'],
              library_dirs=get_info('lapack_opt', 1).get('library_dirs'))
]

setup(name='telewavesim',
      version=find_version('telewavesim', '__init__.py'),
      description='Python package for teleseismic body-wave modeling',
      author='Pascal Audet, Colin J. Thomson, Michael G. Bostock',
      maintainer='Pascal Audet',
      author_email='*****@*****.**',
      url='https://github.com/paudetseis/Telewavesim',
      classifiers=[
          'Development Status :: 3 - Alpha',
          'License :: OSI Approved :: MIT License',
          'Programming Language :: Fortran',
          'Programming Language :: Python :: 3.5',
          'Programming Language :: Python :: 3.6',
Beispiel #41
0
    env_vars = {}
    if env_lib_dirs:
        env_vars['library_dirs'] = env_lib_dirs.split(':')
    if env_libs:
        env_vars['libraries'] = env_libs.split(':')
    install_scs(USE_64_BIT_BLAS=USE_64_BIT_BLAS,
                blas_info=env_vars,
                lapack_info={},
                USE_OPENMP=USE_OPENMP,
                rootDir=rootDir)
else:
    # environment variables not set, using defaults instead
    try:
        print("using blas_opt / lapack_opt")
        install_scs(USE_64_BIT_BLAS=USE_64_BIT_BLAS,
                    blas_info=get_info('blas_opt'),
                    lapack_info=get_info('lapack_opt'),
                    USE_OPENMP=USE_OPENMP,
                    rootDir=rootDir)
    except SystemExit as e:  # catch permission denied error
        print("SystemExit")
        print(e)
    except:
        # print("error:", sys.exc_info()[0])
        print("blas_opt / lapack_opt install failed, trying blas / lapack")
        try:
            install_scs(USE_64_BIT_BLAS=USE_64_BIT_BLAS,
                        blas_info=get_info('blas'),
                        lapack_info=get_info('lapack'),
                        USE_OPENMP=USE_OPENMP,
                        rootDir=rootDir)
Beispiel #42
0
def run_compile():
    """
    Do it all in one call!
    """
    import tempfile, os, shutil

    i = sys.argv.index('-c')
    del sys.argv[i]

    remove_build_dir = 0
    try:
        i = sys.argv.index('--build-dir')
    except ValueError:
        i = None
    if i is not None:
        build_dir = sys.argv[i + 1]
        del sys.argv[i + 1]
        del sys.argv[i]
    else:
        remove_build_dir = 1
        build_dir = os.path.join(tempfile.mktemp())

    sysinfo_flags = filter(re.compile(r'[-][-]link[-]').match, sys.argv[1:])
    sys.argv = filter(lambda a, flags=sysinfo_flags: a not in flags, sys.argv)
    if sysinfo_flags:
        sysinfo_flags = [f[7:] for f in sysinfo_flags]

    f2py_flags = filter(
        re.compile(
            r'[-][-]((no[-]|)(wrap[-]functions|lower)|debug[-]capi|quiet)|[-]include'
        ).match, sys.argv[1:])
    sys.argv = filter(lambda a, flags=f2py_flags: a not in flags, sys.argv)
    f2py_flags2 = []
    fl = 0
    for a in sys.argv[1:]:
        if a in ['only:', 'skip:']:
            fl = 1
        elif a == ':':
            fl = 0
        if fl or a == ':':
            f2py_flags2.append(a)
    if f2py_flags2 and f2py_flags2[-1] != ':':
        f2py_flags2.append(':')
    f2py_flags.extend(f2py_flags2)

    sys.argv = filter(lambda a, flags=f2py_flags2: a not in flags, sys.argv)

    flib_flags = filter(
        re.compile(
            r'[-][-]((f(90)?compiler([-]exec|)|compiler)=|help[-]compiler)').
        match, sys.argv[1:])
    sys.argv = filter(lambda a, flags=flib_flags: a not in flags, sys.argv)
    fc_flags = filter(
        re.compile(
            r'[-][-]((f(77|90)(flags|exec)|opt|arch)=|(debug|noopt|noarch|help[-]fcompiler))'
        ).match, sys.argv[1:])
    sys.argv = filter(lambda a, flags=fc_flags: a not in flags, sys.argv)

    if 1:
        del_list = []
        for s in flib_flags:
            v = '--fcompiler='
            if s[:len(v)] == v:
                from numpy.distutils import fcompiler
                allowed_keys = fcompiler.fcompiler_class.keys()
                nv = ov = s[len(v):].lower()
                if ov not in allowed_keys:
                    vmap = {}  # XXX
                    try:
                        nv = vmap[ov]
                    except KeyError:
                        if ov not in vmap.values():
                            print 'Unknown vendor: "%s"' % (s[len(v):])
                    nv = ov
                i = flib_flags.index(s)
                flib_flags[i] = '--fcompiler=' + nv
                continue
        for s in del_list:
            i = flib_flags.index(s)
            del flib_flags[i]
        assert len(flib_flags) <= 2, ` flib_flags `
    setup_flags = filter(re.compile(r'[-][-](verbose)').match, sys.argv[1:])
    sys.argv = filter(lambda a, flags=setup_flags: a not in flags, sys.argv)
    if '--quiet' in f2py_flags:
        setup_flags.append('--quiet')

    modulename = 'untitled'
    sources = sys.argv[1:]
    if '-m' in sys.argv:
        i = sys.argv.index('-m')
        modulename = sys.argv[i + 1]
        del sys.argv[i + 1], sys.argv[i]
        sources = sys.argv[1:]
    else:
        from numpy.distutils.command.build_src import get_f2py_modulename
        pyf_files, sources = filter_files('', '[.]pyf([.]src|)', sources)
        sources = pyf_files + sources
        for f in pyf_files:
            modulename = get_f2py_modulename(f)
            if modulename:
                break

    extra_objects, sources = filter_files('', '[.](o|a|so)', sources)
    include_dirs, sources = filter_files('-I', '', sources, remove_prefix=1)
    library_dirs, sources = filter_files('-L', '', sources, remove_prefix=1)
    libraries, sources = filter_files('-l', '', sources, remove_prefix=1)
    undef_macros, sources = filter_files('-U', '', sources, remove_prefix=1)
    define_macros, sources = filter_files('-D', '', sources, remove_prefix=1)
    using_numarray = 0
    using_numeric = 0
    for i in range(len(define_macros)):
        name_value = string.split(define_macros[i], '=', 1)
        if len(name_value) == 1:
            name_value.append(None)
        if len(name_value) == 2:
            define_macros[i] = tuple(name_value)
        else:
            print 'Invalid use of -D:', name_value

    from numpy.distutils.system_info import get_info

    num_include_dir = None
    num_info = {}
    #import numpy
    #n = 'numpy'
    #p = get_prefix(numpy)
    #from numpy.distutils.misc_util import get_numpy_include_dirs
    #num_info = {'include_dirs': get_numpy_include_dirs()}

    if num_info:
        include_dirs.extend(num_info.get('include_dirs', []))

    from numpy.distutils.core import setup, Extension
    ext_args = {
        'name': modulename,
        'sources': sources,
        'include_dirs': include_dirs,
        'library_dirs': library_dirs,
        'libraries': libraries,
        'define_macros': define_macros,
        'undef_macros': undef_macros,
        'extra_objects': extra_objects,
        'f2py_options': f2py_flags,
    }

    if sysinfo_flags:
        from numpy.distutils.misc_util import dict_append
        for n in sysinfo_flags:
            i = get_info(n)
            if not i:
                outmess('No %s resources found in system'\
                        ' (try `f2py --help-link`)\n' % (`n`))
            dict_append(ext_args, **i)

    ext = Extension(**ext_args)
    sys.argv = [sys.argv[0]] + setup_flags
    sys.argv.extend([
        'build', '--build-temp', build_dir, '--build-base', build_dir,
        '--build-platlib', '.'
    ])
    if fc_flags:
        sys.argv.extend(['config_fc'] + fc_flags)
    if flib_flags:
        sys.argv.extend(['build_ext'] + flib_flags)

    setup(ext_modules=[ext])

    if remove_build_dir and os.path.exists(build_dir):
        outmess('Removing build directory %s\n' % (build_dir))
        shutil.rmtree(build_dir)
Beispiel #43
0
    'platforms': ['linux'],
    'requires': ['obspy', 'basemap'],
    'py_modules': [
        'seispy.burrow', 'seispy.event', 'seispy.gather', 'seispy.geoid',
        'seispy.geometry', 'seispy.locate', 'seispy.network', 'seispy.station',
        'seispy.trace', 'seispy.ttgrid', 'seispy.util', 'seispy.velocity'
    ],
    'scripts': [
        'scripts/fetch_data', 'scripts/fm3d_ttimes', 'scripts/mt-3dloc',
        'scripts/mt-shear', 'scripts/mt-synth', 'scripts/plot_events',
        'scripts/synthetics2db'
    ]
}

# Get some information about BLAS and LAPACK libraries.
blas_opt = get_info('blas', notfound_action=2)
lapack_opt = get_info('lapack', notfound_action=2)

# Compile resource information needed by seispy.signal.statistics module.
config_path = "%s/lib/python%d.%d/config" % (
    sys.prefix, sys.version_info.major, sys.version_info.minor)
libs = [blas_opt['libraries'][0], lapack_opt['libraries'][0]]
lib_dirs = [
    blas_opt['library_dirs'][0], lapack_opt['library_dirs'][0], config_path
]
# Compile resource information needed by seispy.signal.detect module
eigen_path = os.getcwd() + "/seispy/signal/src"
# Add statistics and detect extension modules from the seispy.signal
# sub-module.
kwargs['ext_modules'] = [
    Extension('seispy.signal.statistics', ['seispy/signal/statistics.f90'],
Beispiel #44
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info, BlasNotFoundError
    import numpy

    libraries = []
    if os.name == 'posix':
        libraries.append('m')

    config = Configuration('sklearn', parent_package, top_path)

    config.add_subpackage('__check_build')
    config.add_subpackage('svm')
    config.add_subpackage('datasets')
    config.add_subpackage('datasets/tests')
    config.add_subpackage('feature_extraction')
    config.add_subpackage('feature_extraction/tests')
    config.add_subpackage('cluster')
    config.add_subpackage('cluster/tests')
    config.add_subpackage('cluster/bicluster')
    config.add_subpackage('cluster/bicluster/tests')
    config.add_subpackage('covariance')
    config.add_subpackage('covariance/tests')
    config.add_subpackage('cross_decomposition')
    config.add_subpackage('decomposition')
    config.add_subpackage('decomposition/tests')
    config.add_subpackage("ensemble")
    config.add_subpackage("ensemble/tests")
    config.add_subpackage('feature_selection')
    config.add_subpackage('feature_selection/tests')
    config.add_subpackage('utils')
    config.add_subpackage('utils/tests')
    config.add_subpackage('externals')
    config.add_subpackage('mixture')
    config.add_subpackage('mixture/tests')
    config.add_subpackage('gaussian_process')
    config.add_subpackage('gaussian_process/tests')
    config.add_subpackage('neighbors')
    config.add_subpackage('neural_network')
    config.add_subpackage('preprocessing')
    config.add_subpackage('manifold')
    config.add_subpackage('metrics')
    config.add_subpackage('semi_supervised')
    config.add_subpackage("tree")
    config.add_subpackage("tree/tests")
    config.add_subpackage('metrics/tests')
    config.add_subpackage('metrics/cluster')
    config.add_subpackage('metrics/cluster/tests')

    # add cython extension module for hmm
    config.add_extension(
        '_hmmc',
        sources=['_hmmc.c'],
        include_dirs=[numpy.get_include()],
        libraries=libraries,
    )
    config.add_extension(
        '_isotonic',
        sources=['_isotonic.c'],
        include_dirs=[numpy.get_include()],
        libraries=libraries,
    )

    # some libs needs cblas, fortran-compiled BLAS will not be sufficient
    blas_info = get_info('blas_opt', 0)
    if (not blas_info) or (('NO_ATLAS_INFO', 1) in blas_info.get(
            'define_macros', [])):
        config.add_library('cblas', sources=[join('src', 'cblas', '*.c')])
        warnings.warn(BlasNotFoundError.__doc__)

    # the following packages depend on cblas, so they have to be build
    # after the above.
    config.add_subpackage('linear_model')
    config.add_subpackage('utils')

    # add the test directory
    config.add_subpackage('tests')

    return config
Beispiel #45
0
def _has_blas_lib(libname):
    from numpy.distutils.system_info import get_info
    return libname in get_info('blas_opt').get('libraries', [])
Beispiel #46
0
    """
    def build_extension(self, ext):
        pass


if cython:
    # we have cython and generate c codes directly
    suffix = ".pyx"
    cmdclass["cython"] = CythonCommand
else:
    suffix = ".c"

# Retrieve the compiler information
from numpy.distutils.system_info import get_info
# use flags defined in numpy
all_info = get_info('ALL')

# Define compilation flags
extra_compile_args = ""
extra_link_args = extra_compile_args

# in numpy>=1.16.0, silence build warnings about deprecated API usage
macros.append(("NPY_NO_DEPRECATED_API", "0"))
# Do not expose multiple platform Cython code.
# We do not need it
#  https://cython.readthedocs.io/en/latest/src/userguide/source_files_and_compilation.html#integrating-multiple-modules
macros.append(("CYTHON_NO_PYINIT_EXPORT", "1"))


class EnsureSource_sdist(sdist):
    """Ensure Cython has runned on all pyx files (i.e. we need c sources)."""
Beispiel #47
0

def find_version(*paths):
    fname = os.path.join(os.path.dirname(__file__), *paths)
    with open(fname) as fp:
        code = fp.read()
    match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", code, re.M)
    if match:
        return match.group(1)
    raise RuntimeError("Unable to find version string.")


ext_cpwt = Extension(name='plateflex.cpwt',
                     sources=['src/cpwt/cpwt.f90', 'src/cpwt/cpwt_sub.f90'],
                     libraries=['gfortran'],
                     library_dirs=get_info('gfortran').get('library_dirs'))
ext_flex = Extension(name='plateflex.flex',
                     sources=['src/flex/flex.f90'],
                     libraries=['gfortran'],
                     library_dirs=get_info('gfortran').get('library_dirs'))

setup(
    name='plateflex',
    version=find_version('plateflex', '__init__.py'),
    description='Python package for estimating lithospheric elastic thickness',
    author='Pascal Audet',
    maintainer='Pascal Audet',
    author_email='*****@*****.**',
    url='https://github.com/paudetseis/PlateFlex',
    classifiers=[
        'Development Status :: 3 - Alpha',
Beispiel #48
0
from warnings import warn

from numpy.distutils.system_info import get_info

# should synthesizeNTF run the optimization routine?
optimize_NTF = True

# how many iterations should be allowed in NTF synthesis?
# see synthesizeNTF() for more
itn_limit = 500

# debug
_debug = False

# get blas information to compile the cython extensions
blas_info = get_info("blas")
if len(blas_info) == 0 and _debug:
    warn("Numpy did not detect the BLAS library in the system")
# Let's make an educated guess
if 'linux' in sys.platform or 'darwin' in sys.platform:
    guessed_include = '/usr/include'
else:
    guessed_include = None
# wrap it up: numpy or user-set environment var or a lucky guess on our side is
# needed to get the cblas.h header path. If not found, simulateDSM() will use
# a CPython implementation (slower).
setup_args = {
    "script_args": (["--compiler=mingw32"] if sys.platform == 'win32' else [])
}
lib_include = [np.get_include()]
if "include_dirs" not in blas_info and "BLAS_H" not in os.environ and \
Beispiel #49
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info, system_info
    config = Configuration('linalg', parent_package, top_path)

    config.add_subpackage('tests')

    # Configure lapack_lite

    src_dir = 'lapack_lite'
    lapack_lite_src = [
        os.path.join(src_dir, 'python_xerbla.c'),
        os.path.join(src_dir, 'f2c_z_lapack.c'),
        os.path.join(src_dir, 'f2c_c_lapack.c'),
        os.path.join(src_dir, 'f2c_d_lapack.c'),
        os.path.join(src_dir, 'f2c_s_lapack.c'),
        os.path.join(src_dir, 'f2c_lapack.c'),
        os.path.join(src_dir, 'f2c_blas.c'),
        os.path.join(src_dir, 'f2c_config.c'),
        os.path.join(src_dir, 'f2c.c'),
    ]
    all_sources = config.paths(lapack_lite_src)

    if os.environ.get('NPY_USE_BLAS_ILP64', "0") != "0":
        lapack_info = get_info('lapack_ilp64_opt', 2)
    else:
        lapack_info = get_info('lapack_opt', 0)  # and {}

    use_lapack_lite = not lapack_info

    if use_lapack_lite:
        # This makes numpy.distutils write the fact that lapack_lite
        # is being used to numpy.__config__
        class numpy_linalg_lapack_lite(system_info):
            def calc_info(self):
                info = {'language': 'c'}
                if sys.maxsize > 2**32:
                    # Build lapack-lite in 64-bit integer mode.
                    # The suffix is arbitrary (lapack_lite symbols follow it),
                    # but use the "64_" convention here.
                    info['define_macros'] = [('HAVE_BLAS_ILP64', None),
                                             ('BLAS_SYMBOL_SUFFIX', '64_')]
                self.set_info(**info)

        lapack_info = numpy_linalg_lapack_lite().get_info(2)

    def get_lapack_lite_sources(ext, build_dir):
        if use_lapack_lite:
            print("### Warning:  Using unoptimized lapack ###")
            return all_sources
        else:
            if sys.platform == 'win32':
                print("### Warning:  python_xerbla.c is disabled ###")
                return []
            return [all_sources[0]]

    config.add_extension(
        'lapack_lite',
        sources=['lapack_litemodule.c', get_lapack_lite_sources],
        depends=['lapack_lite/f2c.h'],
        extra_info=lapack_info,
    )

    # umath_linalg module
    config.add_extension(
        '_umath_linalg',
        sources=['umath_linalg.c.src', get_lapack_lite_sources],
        depends=['lapack_lite/f2c.h'],
        extra_info=lapack_info,
        libraries=['npymath'],
    )
    config.add_data_files('*.pyi')
    return config
Beispiel #50
0
def get_extensions():
    try:
        from numpy.distutils.system_info import get_info
        mkl_info = get_info('mkl')
    except ImportError:
        mkl_root = os.environ['MKLROOT']
        mkl_info = {
            'include_dirs': [join(mkl_root, 'include')],
            'library_dirs': [join(mkl_root, 'lib'), join(mkl_root, 'lib', 'intel64')],
            'libraries': ['mkl_rt']
        }

    mkl_include_dirs = mkl_info.get('include_dirs', [])
    mkl_library_dirs = mkl_info.get('library_dirs', [])
    mkl_libraries = mkl_info.get('libraries', ['mkl_rt'])

    defs = []
    if any(['mkl_rt' in li for li in mkl_libraries]):
        #libs += ['dl'] - by default on Linux
        defs += [('USING_MKL_RT', None)]

    pdir = 'mkl'
    try:
        from Cython.Build import cythonize
        sources = [join(pdir, '_mkl_service.pyx')]
        have_cython = True
    except ImportError as e:
        have_cython = False
        sources = [join(pdir, '_mkl_service.c')]
        if not exists(sources[0]):
            raise ValueError(str(e) + '. ' +
                             'Cython is required to build the initial .c file.')

    extensions = []
    extensions.append(
        setuptools.extension.Extension(
            'mkl._mklinit',
            sources=['mkl/_mklinitmodule.c'],
            define_macros=defs,
            include_dirs=mkl_include_dirs,
            libraries=mkl_libraries,
            library_dirs=mkl_library_dirs,
            extra_compile_args=[
                '-DNDEBUG'
                # '-g', '-O2', '-Wall',
            ]
        )
    )

    extensions.append(
        setuptools.extension.Extension(
            'mkl._py_mkl_service',
            sources=sources,
            include_dirs=mkl_include_dirs,
            library_dirs=mkl_library_dirs,
            libraries=mkl_libraries,
            extra_compile_args=[
                '-DNDEBUG'
                # '-g', '-O2', '-Wall',
            ]
        )
    )

    if have_cython:
        extensions = cythonize(extensions, include_path=[join(__file__, pdir)])

    return extensions
Beispiel #51
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info

    config = Configuration('mkl_random', parent_package, top_path)
    mkl_root = os.getenv('MKLROOT', None)
    if mkl_root:
        mkl_info = {
            'include_dirs': [join(mkl_root, 'include')],
            'library_dirs':
            [join(mkl_root, 'lib'),
             join(mkl_root, 'lib', 'intel64')],
            'libraries': ['mkl_rt']
        }
    else:
        mkl_info = get_info('mkl')

    mkl_include_dirs = mkl_info.get('include_dirs', [])
    mkl_library_dirs = mkl_info.get('library_dirs', [])
    libs = mkl_info.get('libraries', ['mkl_rt'])
    if sys.platform == 'win32':
        libs.append('Advapi32')

    Q = '/Q' if sys.platform.startswith(
        'win') or sys.platform == 'cygwin' else '-'

    pdir = 'mkl_random'
    wdir = join(pdir, 'src')

    eca = [Q + 'std=c++11']
    if sys.platform == "linux":
        eca.extend(["-Wno-unused-but-set-variable", "-Wno-unused-function"])

    config.add_library(
        'mkl_dists',
        sources=join(wdir, 'mkl_distributions.cpp'),
        libraries=libs,
        include_dirs=[wdir, pdir,
                      get_numpy_include(),
                      get_python_include()],
        extra_compiler_args=eca,
        depends=[
            join(wdir, '*.h'),
        ],
        language='c++',
    )

    try:
        from Cython.Build import cythonize
        sources = [join(pdir, 'mklrand.pyx')]
        have_cython = True
    except ImportError as e:
        have_cython = False
        sources = [join(pdir, 'mklrand.c')]
        if not exists(sources[0]):
            raise ValueError(
                str(e) + '. ' +
                'Cython is required to build the initial .c file.')

    # enable unix large file support on 32 bit systems
    # (64 bit off_t, lseek -> lseek64 etc.)
    defs = [('_FILE_OFFSET_BITS', '64'), ('_LARGEFILE_SOURCE', '1'),
            ('_LARGEFILE64_SOURCE', '1')]
    if needs_mingw_ftime_workaround():
        defs.append(("NEED_MINGW_TIME_WORKAROUND", None))

    sources = sources + [join(wdir, x) for x in ['randomkit.c']]
    libs = libs + ['mkl_dists']
    config.add_extension(
        name='mklrand',
        sources=sources,
        libraries=libs,
        include_dirs=[wdir, pdir] + mkl_include_dirs,
        library_dirs=mkl_library_dirs,
        define_macros=defs,
    )

    config.add_data_files(('.', join('src', 'randomkit.h')))
    config.add_data_files(('.', join('src', 'mkl_distributions.h')))
    config.add_data_dir('tests')

    if have_cython:
        config.ext_modules = cythonize(config.ext_modules,
                                       include_path=[pdir, wdir])

    return config
Beispiel #52
0
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info
    config = Configuration('optimize',parent_package, top_path)

    minpack_src = [join('minpack','*f')]
    config.add_library('minpack',sources=minpack_src)
    config.add_extension('_minpack',
                         sources=['_minpackmodule.c'],
                         libraries=['minpack'],
                         depends=(["minpack.h","__minpack.h"]
                                  + minpack_src),
                         **numpy_nodepr_api)

    rootfind_src = [join('Zeros','*.c')]
    rootfind_hdr = [join('Zeros','zeros.h')]
    config.add_library('rootfind',
                       sources=rootfind_src,
                       headers=rootfind_hdr,
                         **numpy_nodepr_api)

    config.add_extension('_zeros',
                         sources=['zeros.c'],
                         libraries=['rootfind'],
                         depends=(rootfind_src + rootfind_hdr),
                         **numpy_nodepr_api)

    lapack = get_info('lapack_opt')
    if 'define_macros' in numpy_nodepr_api:
        if ('define_macros' in lapack) and (lapack['define_macros'] is not None):
            lapack['define_macros'] = (lapack['define_macros'] +
                                       numpy_nodepr_api['define_macros'])
        else:
            lapack['define_macros'] = numpy_nodepr_api['define_macros']
    sources = ['lbfgsb.pyf', 'lbfgsb.f', 'linpack.f', 'timer.f']
    config.add_extension('_lbfgsb',
                         sources=[join('lbfgsb',x) for x in sources],
                         **lapack)

    sources = ['moduleTNC.c','tnc.c']
    config.add_extension('moduleTNC',
                         sources=[join('tnc',x) for x in sources],
                         depends=[join('tnc','tnc.h')],
                         **numpy_nodepr_api)

    config.add_extension('_cobyla',
                         sources=[join('cobyla',x) for x in ['cobyla.pyf',
                                                             'cobyla2.f',
                                                             'trstlp.f']],
                         **numpy_nodepr_api)

    sources = ['minpack2.pyf', 'dcsrch.f', 'dcstep.f']
    config.add_extension('minpack2',
                         sources=[join('minpack2',x) for x in sources],
                         **numpy_nodepr_api)

    sources = ['slsqp.pyf', 'slsqp_optmz.f']
    config.add_extension('_slsqp', sources=[join('slsqp', x) for x in sources],
                         **numpy_nodepr_api)

    config.add_extension('_nnls', sources=[join('nnls', x)
                                          for x in ["nnls.f","nnls.pyf"]],
                         **numpy_nodepr_api)

    config.add_data_dir('tests')
    config.add_data_dir('benchmarks')
    return config
Beispiel #53
0
            n, v = opt, True
        n = n[2:]  # remove --
        default_options['install'][n] = v

# Find Fortran source code files
source_dirs, wrap_sources, wrap_types, quip_libraries, quip_targets = find_wrap_sources(
    makefile, quip_root)
include_dirs.extend(source_dirs)
libraries = quip_libraries + libraries

# Add build.${QUIP_ARCH} to include and library paths
include_dirs.append(os.path.join(quip_root, 'build/%s' % quip_arch))
library_dirs.append(os.path.join(quip_root, 'build/%s' % quip_arch))

# arraydata extension module
f2py_info = get_info('f2py')
arraydata_ext = Extension(
    name='quippy.arraydata',
    sources=['arraydatamodule.c'] + f2py_info['sources'],
    include_dirs=f2py_info['include_dirs'] + include_dirs,
    define_macros=[('SIZEOF_FORTRAN_T', sizeof_fortran_t)],
    extra_link_args=extra_link_args)

# _quippy extension module
quippy_ext = Extension(
    name='quippy._quippy',
    sources=[
        F90WrapperBuilder(
            'quippy',
            wrap_sources=wrap_sources,
            cpp=cpp,
Beispiel #54
0
    BLAS = BLAS[0]
    argv.remove(BLAS)
    BLAS = BLAS.split('=')[1]
    assert BLAS in ['openblas', 'mkl', 'atlas', 'blas']
    libraries.append(BLAS)
    blas_inc_dirs = os.environ.get('BLAS_INCLUDE_DIRS')
    compile_args += [f'BLAS_INCLUDE_DIRS={blas_inc_dirs}']
    blas_lib_dirs = os.environ.get('BLAS_LIBRARY_DIRS')
    if blas_lib_dirs is not None:
        extra_link_args += [f'-Wl,-rpath,{blas_lib_dirs}']
else:
    # find the default BLAS library
    import numpy.distutils.system_info as sysinfo
    # Search blas in this order
    for blas in ['openblas', 'atlas', 'mkl', 'blas']:
        if 'libraries' in sysinfo.get_info(blas):
            BLAS = blas
            libraries += sysinfo.get_info(blas)['libraries']
            break
    else:
        # BLAS not found
        raise ImportError(' \
\nBLAS not found from numpy.distutils.system_info.get_info. \
\nPlease specify BLAS with: python setup.py install --blas=openblas" \
\nfor more information, please visit https://github.com/StanfordVL/MinkowskiEngine/wiki/Installation'
                          )

print(f'\nUsing BLAS={BLAS}')

compile_args += ['BLAS=' + BLAS]
Beispiel #55
0
# This script should run without errors whenever we update the
# kaggle/python container. It checks that all our most popular packages can
# be loaded and used without errors.

import numpy as np
print("Numpy imported ok")
print("Your lucky number is: " + str(np.random.randint(100)))

# Numpy must be linked to the MKL. (Occasionally, a third-party package will muck up the installation
# and numpy will be reinstalled with an OpenBLAS backing.)
from numpy.distutils.system_info import get_info
# This will throw an exception if the MKL is not linked correctly.
get_info("blas_mkl")

import pandas as pd
print("Pandas imported ok")

from sklearn import datasets
print("sklearn imported ok")
iris = datasets.load_iris()
X, y = iris.data, iris.target

from sklearn.ensemble import RandomForestClassifier
rf1 = RandomForestClassifier()
rf1.fit(X, y)
print("sklearn RandomForestClassifier: ok")

from sklearn.linear_model import LinearRegression
boston = datasets.load_boston()
X, y = boston.data, boston.target
lr1 = LinearRegression()
Beispiel #56
0
def run_compile():
    """
    Do it all in one call!
    """
    import tempfile

    i = sys.argv.index('-c')
    del sys.argv[i]

    remove_build_dir = 0
    try:
        i = sys.argv.index('--build-dir')
    except ValueError:
        i = None
    if i is not None:
        build_dir = sys.argv[i + 1]
        del sys.argv[i + 1]
        del sys.argv[i]
    else:
        remove_build_dir = 1
        build_dir = tempfile.mkdtemp()

    _reg1 = re.compile(r'--link-')
    sysinfo_flags = [_m for _m in sys.argv[1:] if _reg1.match(_m)]
    sys.argv = [_m for _m in sys.argv if _m not in sysinfo_flags]
    if sysinfo_flags:
        sysinfo_flags = [f[7:] for f in sysinfo_flags]

    _reg2 = re.compile(
        r'--((no-|)(wrap-functions|lower)|debug-capi|quiet)|-include')
    f2py_flags = [_m for _m in sys.argv[1:] if _reg2.match(_m)]
    sys.argv = [_m for _m in sys.argv if _m not in f2py_flags]
    f2py_flags2 = []
    fl = 0
    for a in sys.argv[1:]:
        if a in ['only:', 'skip:']:
            fl = 1
        elif a == ':':
            fl = 0
        if fl or a == ':':
            f2py_flags2.append(a)
    if f2py_flags2 and f2py_flags2[-1] != ':':
        f2py_flags2.append(':')
    f2py_flags.extend(f2py_flags2)

    sys.argv = [_m for _m in sys.argv if _m not in f2py_flags2]
    _reg3 = re.compile(r'--((f(90)?compiler(-exec|)|compiler)=|help-compiler)')
    flib_flags = [_m for _m in sys.argv[1:] if _reg3.match(_m)]
    sys.argv = [_m for _m in sys.argv if _m not in flib_flags]
    _reg4 = re.compile(
        r'--((f(77|90)(flags|exec)|opt|arch)=|(debug|noopt|noarch|help-fcompiler))'
    )
    fc_flags = [_m for _m in sys.argv[1:] if _reg4.match(_m)]
    sys.argv = [_m for _m in sys.argv if _m not in fc_flags]

    del_list = []
    for s in flib_flags:
        v = '--fcompiler='
        if s[:len(v)] == v:
            from numpy.distutils import fcompiler
            fcompiler.load_all_fcompiler_classes()
            allowed_keys = list(fcompiler.fcompiler_class.keys())
            nv = ov = s[len(v):].lower()
            if ov not in allowed_keys:
                vmap = {}  # XXX
                try:
                    nv = vmap[ov]
                except KeyError:
                    if ov not in vmap.values():
                        print('Unknown vendor: "%s"' % (s[len(v):]))
                nv = ov
            i = flib_flags.index(s)
            flib_flags[i] = '--fcompiler=' + nv
            continue
    for s in del_list:
        i = flib_flags.index(s)
        del flib_flags[i]
    assert len(flib_flags) <= 2, repr(flib_flags)

    _reg5 = re.compile(r'--(verbose)')
    setup_flags = [_m for _m in sys.argv[1:] if _reg5.match(_m)]
    sys.argv = [_m for _m in sys.argv if _m not in setup_flags]

    if '--quiet' in f2py_flags:
        setup_flags.append('--quiet')

    modulename = 'untitled'
    sources = sys.argv[1:]

    for optname in ['--include_paths', '--include-paths', '--f2cmap']:
        if optname in sys.argv:
            i = sys.argv.index(optname)
            f2py_flags.extend(sys.argv[i:i + 2])
            del sys.argv[i + 1], sys.argv[i]
            sources = sys.argv[1:]

    if '-m' in sys.argv:
        i = sys.argv.index('-m')
        modulename = sys.argv[i + 1]
        del sys.argv[i + 1], sys.argv[i]
        sources = sys.argv[1:]
    else:
        from numpy.distutils.command.build_src import get_f2py_modulename
        pyf_files, sources = filter_files('', '[.]pyf([.]src|)', sources)
        sources = pyf_files + sources
        for f in pyf_files:
            modulename = get_f2py_modulename(f)
            if modulename:
                break

    extra_objects, sources = filter_files('', '[.](o|a|so|dylib)', sources)
    include_dirs, sources = filter_files('-I', '', sources, remove_prefix=1)
    library_dirs, sources = filter_files('-L', '', sources, remove_prefix=1)
    libraries, sources = filter_files('-l', '', sources, remove_prefix=1)
    undef_macros, sources = filter_files('-U', '', sources, remove_prefix=1)
    define_macros, sources = filter_files('-D', '', sources, remove_prefix=1)
    for i in range(len(define_macros)):
        name_value = define_macros[i].split('=', 1)
        if len(name_value) == 1:
            name_value.append(None)
        if len(name_value) == 2:
            define_macros[i] = tuple(name_value)
        else:
            print('Invalid use of -D:', name_value)

    from numpy.distutils.system_info import get_info

    num_info = {}
    if num_info:
        include_dirs.extend(num_info.get('include_dirs', []))

    from numpy.distutils.core import setup, Extension
    ext_args = {
        'name': modulename,
        'sources': sources,
        'include_dirs': include_dirs,
        'library_dirs': library_dirs,
        'libraries': libraries,
        'define_macros': define_macros,
        'undef_macros': undef_macros,
        'extra_objects': extra_objects,
        'f2py_options': f2py_flags,
    }

    if sysinfo_flags:
        from numpy.distutils.misc_util import dict_append
        for n in sysinfo_flags:
            i = get_info(n)
            if not i:
                outmess('No %s resources found in system'
                        ' (try `f2py --help-link`)\n' % (repr(n)))
            dict_append(ext_args, **i)

    ext = Extension(**ext_args)
    sys.argv = [sys.argv[0]] + setup_flags
    sys.argv.extend([
        'build',
        '--build-temp',
        build_dir,
        '--build-base',
        build_dir,
        '--build-platlib',
        '.',
        # disable CCompilerOpt
        '--disable-optimization'
    ])
    if fc_flags:
        sys.argv.extend(['config_fc'] + fc_flags)
    if flib_flags:
        sys.argv.extend(['build_ext'] + flib_flags)

    setup(ext_modules=[ext])

    if remove_build_dir and os.path.exists(build_dir):
        import shutil
        outmess('Removing build directory %s\n' % (build_dir))
        shutil.rmtree(build_dir)
Beispiel #57
0
def build_ext(config):
    # ==============================
    # = Compile Fortran extensions =
    # ==============================

    from numpy.distutils.system_info import get_info

    # If optimized lapack/ BLAS libraries are present, compile distributions that involve linear algebra against those.
    # Otherwise compile blas and lapack from netlib sources.
    lapack_info = get_info('lapack_opt', 1)
    f_sources = [
        'pymc/flib.f', 'pymc/histogram.f', 'pymc/flib_blas.f',
        'pymc/blas_wrap.f', 'pymc/math.f', 'pymc/gibbsit.f', 'cephes/i0.c',
        'cephes/c2f.c', 'cephes/chbevl.c'
    ]
    if lapack_info:
        config.add_extension(name='flib',
                             sources=f_sources,
                             extra_info=lapack_info,
                             f2py_options=['skip:ppnd7'])

    if not lapack_info or dist in ['bdist', 'sdist']:
        ##inc_dirs = ['blas/BLAS','lapack/double']
        print(
            'No optimized BLAS or Lapack libraries found, building from source. This may take a while...'
        )
        for fname in os.listdir('blas/BLAS'):
            # Make sure this is a Fortran file, and not one of those weird hidden files that
            # pop up sometimes in the tarballs
            if fname[-2:] == '.f' and fname[0].find('_') == -1:
                f_sources.append('blas/BLAS/' + fname)

        for fname in [
                'dpotrs', 'dpotrf', 'dpotf2', 'ilaenv', 'dlamch', 'ilaver',
                'ieeeck', 'iparmq'
        ]:
            f_sources.append('lapack/double/' + fname + '.f')
        config.add_extension(name='flib', sources=f_sources)

    # ============================
    # = Compile Pyrex extensions =
    # ============================

    config.add_extension(name='LazyFunction', sources=['pymc/LazyFunction.c'])
    config.add_extension(name='Container_values',
                         sources='pymc/Container_values.c')

    config_dict = config.todict()
    try:
        config_dict.pop('packages')
    except:
        pass

    # ===========================================
    # = Compile GP package's Fortran extensions =
    # ===========================================

    # Compile linear algebra utilities
    if lapack_info:
        config.add_extension(
            name='gp.linalg_utils',
            sources=['pymc/gp/linalg_utils.f', 'pymc/blas_wrap.f'],
            extra_info=lapack_info)
        config.add_extension(name='gp.incomplete_chol',
                             sources=['pymc/gp/incomplete_chol.f'],
                             extra_info=lapack_info)

    if not lapack_info or dist in ['bdist', 'sdist']:
        print(
            'No optimized BLAS or Lapack libraries found, building from source. This may take a while...'
        )
        f_sources = ['pymc/blas_wrap.f']
        for fname in os.listdir('blas/BLAS'):
            if fname[-2:] == '.f':
                f_sources.append('blas/BLAS/' + fname)

        for fname in [
                'dpotrs', 'dpotrf', 'dpotf2', 'ilaenv', 'dlamch', 'ilaver',
                'ieeeck', 'iparmq'
        ]:
            f_sources.append('lapack/double/' + fname + '.f')

        config.add_extension(name='gp.linalg_utils',
                             sources=['pymc/gp/linalg_utils.f'] + f_sources)
        config.add_extension(name='gp.incomplete_chol',
                             sources=['pymc/gp/incomplete_chol.f'] + f_sources)

    # Compile covariance functions
    config.add_extension(name='gp.cov_funs.isotropic_cov_funs',\
    sources=['pymc/gp/cov_funs/isotropic_cov_funs.f','blas/BLAS/dscal.f'],\
    extra_info=lapack_info)

    config.add_extension(name='gp.cov_funs.distances',
                         sources=['pymc/gp/cov_funs/distances.f'],
                         extra_info=lapack_info)

    return config_dict
Beispiel #58
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration
    from numpy.distutils.system_info import get_info
    config = Configuration('suitesparse', parent_package, top_path)

    # SuiteSparse_config
    ss_config_opts = {
        'name':
        'suitesparseconfig',
        'sources': [
            str((SS / 'SuiteSparse_config/SuiteSparse_config.c').relative_to(
                SS.parent))
        ],
        'include_dirs':
        [str((SS / 'SuiteSparse_config').relative_to(SS.parent))],
        'language':
        'c',
    }
    config.add_library(**ss_config_opts)
    config.add_extension(**ss_config_opts,
                         extra_info={
                             'sources': ['suitesparseconfig_impl.c'],
                             'libraries': ['suitesparseconfig'],
                         })

    # AMD
    if not (tmp / 'AMDI/Include').exists():
        (tmp / 'AMDI/Include').mkdir(exist_ok=True, parents=True)
        (tmp / 'AMDL/Include').mkdir(exist_ok=True, parents=True)
        shutil.copyfile(SS / 'AMD/Include/amd_internal.h',
                        tmp / 'AMDI/Include/amd_i_internal.h')
        shutil.copyfile(SS / 'AMD/Include/amd_internal.h',
                        tmp / 'AMDL/Include/amd_l_internal.h')
        _add_macros(f=(tmp / 'AMDI/Include/amd_i_internal.h'), macros=['DINT'])
        _add_macros(f=(tmp / 'AMDL/Include/amd_l_internal.h'),
                    macros=['DLONG'])
    if not (tmp / 'AMDI/Source').exists():
        shutil.copytree(SS / 'AMD/Source', tmp / 'AMDI/Source')
        shutil.copytree(SS / 'AMD/Source', tmp / 'AMDL/Source')
        for type_ in (('DINT', 'i', 'I'), ('DLONG', 'l', 'L')):
            for f in (tmp / f'AMD{type_[2]}/Source').glob('amd_*.c'):
                fnew = f.parent / f.name.replace('amd_', f'amd_{type_[1]}_')
                shutil.move(f, fnew)
                _add_macros(f=fnew, macros=[type_[0]])
                _redirect_headers(f=fnew,
                                  headers=[('amd_internal.h',
                                            f'amd_{type_[1]}_internal.h')])

    amd_opts = {
        'name':
        'amd',
        'sources': ([
            str(f.relative_to(SS.parent))
            for f in (tmp / 'AMDI/Source').glob('amd_*.c')
        ] + [
            str(f.relative_to(SS.parent))
            for f in (tmp / 'AMDL/Source').glob('amd_*.c')
        ]),
        'include_dirs': [
            str((SS / 'SuiteSparse_config').relative_to(SS.parent)),
            str((SS / 'AMD/Include').relative_to(SS.parent)),
            str((tmp / 'AMDI/Include').relative_to(SS.parent)),
            str((tmp / 'AMDL/Include').relative_to(SS.parent)),
        ],
        'libraries': ['suitesparseconfig'],
        'language':
        'c',
    }
    config.add_library(**amd_opts)
    config.add_extension(**amd_opts,
                         extra_info={
                             'sources': ['amd_impl.c'],
                             'export_symbols': [
                                 'amd_order',
                                 'amd_l_order',
                                 'amd_2',
                                 'amd_l2',
                                 'amd_valid',
                                 'amd_l_valid',
                                 'amd_defaults',
                                 'amd_l_defaults',
                                 'amd_control',
                                 'amd_l_control',
                                 'amd_info',
                                 'amd_l_info',
                             ],
                             'libraries': ['amd'],
                         })

    # CAMD
    if not (tmp / 'CAMDI/Include').exists():
        (tmp / 'CAMDI/Include').mkdir(exist_ok=True, parents=True)
        (tmp / 'CAMDL/Include').mkdir(exist_ok=True, parents=True)
        shutil.copyfile(SS / 'CAMD/Include/camd_internal.h',
                        tmp / 'CAMDI/Include/camd_i_internal.h')
        shutil.copyfile(SS / 'CAMD/Include/camd_internal.h',
                        tmp / 'CAMDL/Include/camd_l_internal.h')
        _add_macros(f=(tmp / f'CAMDI/Include/camd_i_internal.h'),
                    macros=['DINT'])
        _add_macros(f=(tmp / f'CAMDL/Include/camd_l_internal.h'),
                    macros=['DLONG'])
    if not (tmp / 'CAMDI/Source').exists():
        shutil.copytree(SS / 'CAMD/Source', tmp / 'CAMDI/Source')
        shutil.copytree(SS / 'CAMD/Source', tmp / 'CAMDL/Source')
        for type_ in (('DINT', 'i', 'I'), ('DLONG', 'l', 'L')):
            for f in (tmp / f'CAMD{type_[2]}/Source').glob('camd_*.c'):
                fnew = f.parent / f.name.replace('camd_', f'camd_{type_[1]}_')
                shutil.move(f, fnew)
                _add_macros(f=fnew, macros=[type_[0]])
                _redirect_headers(f=fnew,
                                  headers=[('camd_internal.h',
                                            f'camd_{type_[1]}_internal.h')])

    camd_opts = {
        'name':
        'camd',
        'sources': ([
            str(f.relative_to(SS.parent))
            for f in (tmp / 'CAMDI/Source').glob('camd_*.c')
        ] + [
            str(f.relative_to(SS.parent))
            for f in (tmp / 'CAMDL/Source').glob('camd_*.c')
        ]),
        'include_dirs': [
            str((SS / 'SuiteSparse_config').relative_to(SS.parent)),
            str((SS / 'CAMD/Include').relative_to(SS.parent)),
            str((tmp / 'CAMDI/Include').relative_to(SS.parent)),
            str((tmp / 'CAMDL/Include').relative_to(SS.parent)),
        ],
        'libraries': ['suitesparseconfig'],
        'language':
        'c',
    }

    config.add_library(**camd_opts)
    config.add_extension(**camd_opts,
                         extra_info={
                             'sources': ['camd_impl.c'],
                             'export_symbols': [
                                 'camd_order',
                                 'camd_l_order',
                                 'camd_2',
                                 'camd_l2',
                                 'camd_valid',
                                 'camd_l_valid',
                                 'camd_cvalid',
                                 'camd_l_cvalid',
                                 'camd_defaults',
                                 'camd_l_defaults',
                                 'camd_control',
                                 'camd_l_control',
                                 'camd_info',
                                 'camd_l_info',
                             ],
                             'libraries': ['camd'],
                         })

    # COLAMD
    if not (tmp / 'COLAMDI/Source').exists():
        shutil.copytree(SS / 'COLAMD/Source', tmp / 'COLAMDI/Source')
        shutil.copytree(SS / 'COLAMD/Source', tmp / 'COLAMDL/Source')
        for type_ in (('', '', 'I'), ('DLONG', '_l', 'L')):
            f = (tmp / f'COLAMD{type_[2]}/Source/colamd.c')
            fnew = f.parent / f'colamd{type_[1]}.c'
            shutil.move(f, fnew)
            _add_macros(f=fnew, macros=[type_[0]])

    colamd_opts = {
        'name':
        'colamd',
        'sources': [
            str((tmp / 'COLAMDI/Source/colamd.c').relative_to(SS.parent)),
            str((tmp / 'COLAMDL/Source/colamd_l.c').relative_to(SS.parent)),
        ],
        'include_dirs': [
            str((SS / 'SuiteSparse_config').relative_to(SS.parent)),
            str((SS / 'COLAMD/Include').relative_to(SS.parent)),
        ],
        'libraries': ['suitesparseconfig'],
        'language':
        'c',
    }

    config.add_library(**colamd_opts)
    config.add_extension(**colamd_opts,
                         extra_info={
                             'sources': ['colamd_impl.c'],
                             'libraries': ['colamd'],
                             'export_symbols': [
                                 'colamd_recommended',
                                 'colamd_l_recommended',
                                 'colamd_set_defaults',
                                 'colamd_l_set_defaults',
                                 'colamd',
                                 'colamd_l',
                                 'symamd',
                                 'symamd_l',
                                 'colamd_report',
                                 'colamd_l_report',
                                 'symamd_report',
                                 'symamd_l_report',
                             ],
                         })

    # CCOLAMD
    if not (tmp / 'CCOLAMDI/Source').exists():
        shutil.copytree(SS / 'CCOLAMD/Source', tmp / 'CCOLAMDI/Source')
        shutil.copytree(SS / 'CCOLAMD/Source', tmp / 'CCOLAMDL/Source')
        for type_ in (('', '', 'I'), ('DLONG', '_l', 'L')):
            f = (tmp / f'CCOLAMD{type_[2]}/Source/ccolamd.c')
            fnew = f.parent / f'ccolamd{type_[1]}.c'
            shutil.move(f, fnew)
            _add_macros(f=fnew, macros=[type_[0]])

    ccolamd_opts = {
        'name':
        'ccolamd',
        'sources': [
            str((tmp / 'CCOLAMDI/Source/ccolamd.c').relative_to(SS.parent)),
            str((tmp / 'CCOLAMDL/Source/ccolamd_l.c').relative_to(SS.parent)),
        ],
        'include_dirs': [
            str((SS / 'SuiteSparse_config').relative_to(SS.parent)),
            str((SS / 'CCOLAMD/Include').relative_to(SS.parent)),
        ],
        'libraries': ['suitesparseconfig'],
        'language':
        'c',
    }

    config.add_library(**ccolamd_opts)
    config.add_extension(**ccolamd_opts,
                         extra_info={
                             'sources': ['ccolamd_impl.c'],
                             'libraries': ['ccolamd'],
                             'export_symbols': [
                                 'ccolamd_recommended',
                                 'ccolamd_l_recommended',
                                 'ccolamd_set_defaults',
                                 'ccolamd_l_set_defaults',
                                 'ccolamd',
                                 'ccolamd_l',
                                 'csymamd',
                                 'csymamd_l',
                                 'ccolamd_report',
                                 'ccolamd_l_report',
                                 'csymamd_report',
                                 'csymamd_l_report',
                                 'ccolamd2',
                                 'ccolamd2_l',
                                 'ccolamd_apply_order',
                                 'ccolamd_l_apply_order',
                                 'ccolamd_fsize',
                                 'ccolamd_l_fsize',
                                 'ccolamd_postorder',
                                 'ccolamd_l_postorder',
                                 'ccolamd_post_tree',
                                 'ccolamd_l_post_tree',
                             ],
                         })

    # CHOLMOD/Check module
    cholmod_sources = [
        str((SS / 'CHOLMOD/Check/cholmod_check.c').relative_to(SS.parent)),
        str((SS / 'CHOLMOD/Check/cholmod_read.c').relative_to(SS.parent)),
        str((SS / 'CHOLMOD/Check/cholmod_write.c').relative_to(SS.parent)),
        str((tmp / 'CHOLMODL/Check/cholmod_l_check.c').relative_to(SS.parent)),
        str((tmp / 'CHOLMODL/Check/cholmod_l_read.c').relative_to(SS.parent)),
        str((tmp / 'CHOLMODL/Check/cholmod_l_write.c').relative_to(SS.parent))
    ]
    cholmod_includes = [
        str((SS / 'AMD/Include').relative_to(SS.parent)),
        str((SS / 'AMD/Source').relative_to(SS.parent)),
        str((SS / 'COLAMD/Include').relative_to(SS.parent)),
        str((SS / 'CHOLMOD/Include').relative_to(SS.parent)),
        str((tmp / 'CHOLMODL/Include').relative_to(SS.parent))
    ]

    cholmod_l_hdrs = [(hdr.name, hdr.name.replace('cholmod', 'cholmod_l'))
                      for hdr in (SS / 'CHOLMOD/Include').glob('*.h')]
    if not (tmp / 'CHOLMODL/Include').exists():
        shutil.copytree(SS / 'CHOLMOD/Include', tmp / 'CHOLMODL/Include')
        for f in (tmp / 'CHOLMODL/Include').glob('*.h'):
            fnew = f.parent / f.name.replace('cholmod', 'cholmod_l')
            shutil.move(f, fnew)
            _add_macros(f=fnew, macros=['DLONG'])
            _redirect_headers(f=fnew, headers=cholmod_l_hdrs)

    if not (tmp / 'CHOLMODL/Check').exists():
        shutil.copytree(SS / 'CHOLMOD/Check', tmp / 'CHOLMODL/Check')
        for f in (tmp / 'CHOLMODL/Check').glob('cholmod_*.c'):
            fnew = f.parent / f.name.replace('cholmod_', 'cholmod_l_')
            shutil.move(f, fnew)
            _add_macros(f=fnew, macros=['DLONG'])
            _redirect_headers(
                f=fnew,
                headers=[('cholmod_internal.h', 'cholmod_l_internal.h'),
                         ('cholmod_check.h', 'cholmod_l_check.h'),
                         ('cholmod_config.h', 'cholmod_l_config.h'),
                         ('cholmod_matrixops.h', 'cholmod_l_matrixops.h')])

    # CHOLMOD/Core module
    cholmod_sources += [
        str(f.relative_to(SS.parent))
        for f in (SS / 'CHOLMOD/Core').glob('cholmod_*.c')
    ]
    cholmod_sources += [
        str(f.relative_to(SS.parent))
        for f in (tmp / 'CHOLMODL/Core').glob('cholmod_*.c')
    ]
    if not (tmp / 'CHOLMODL/Core').exists():
        shutil.copytree(SS / 'CHOLMOD/Core', tmp / 'CHOLMODL/Core')
        for f in (tmp / 'CHOLMODL/Core').glob('*.c'):
            fnew = f.parent / f.name.replace('cholmod_', 'cholmod_l_')
            shutil.move(f, fnew)
            _add_macros(f=fnew, macros=['DLONG'])
            _redirect_headers(
                f=fnew,
                headers=cholmod_l_hdrs +
                [('t_cholmod_change_factor.c', 't_cholmod_l_change_factor.c'),
                 ('t_cholmod_dense.c', 't_cholmod_l_dense.c'),
                 ('t_cholmod_transpose.c', 't_cholmod_l_transpose.c'),
                 ('t_cholmod_triplet.c', 't_cholmod_l_triplet.c')])

    # CHOLMOD/Cholesky module
    cholmod_sources += [
        str(f.relative_to(SS.parent))
        for f in (SS / 'CHOLMOD/Cholesky').glob('cholmod_*.c')
    ]
    cholmod_sources += [
        str(f.relative_to(SS.parent))
        for f in (tmp / 'CHOLMODL/Cholesky').glob('cholmod_*.c')
    ]
    if not (tmp / 'CHOLMODL/Cholesky').exists():
        shutil.copytree(SS / 'CHOLMOD/Cholesky', tmp / 'CHOLMODL/Cholesky')
        for f in (tmp / 'CHOLMODL/Cholesky').glob('*.c'):
            fnew = f.parent / f.name.replace('cholmod_', 'cholmod_l_')
            shutil.move(f, fnew)
            _add_macros(f=fnew, macros=['DLONG'])
            _redirect_headers(
                f=fnew,
                headers=cholmod_l_hdrs +
                [('t_cholmod_lsolve.c', 't_cholmod_l_lsolve.c'),
                 ('t_cholmod_ltsolve.c', 't_cholmod_l_ltsolve.c'),
                 ('t_cholmod_rowfac.c', 't_cholmod_l_rowfac.c'),
                 ('t_cholmod_solve.c', 't_cholmod_l_solve.c'),
                 ('t_cholmod_dense.c', 't_cholmod_l_dense.c')])

    # CHOLMOD/Partition module
    cholmod_includes += [
        str((SS / 'metis-5.1.0/include').relative_to(SS.parent)),
        str((SS / 'CAMD/Include').relative_to(SS.parent)),
        str((SS / 'CCOLAMD/Include').relative_to(SS.parent)),
    ]
    cholmod_sources += [
        str(f.relative_to(SS.parent))
        for f in (SS / 'CHOLMOD/Partition').glob('cholmod_*.c')
    ]
    cholmod_sources += [
        str(f.relative_to(SS.parent))
        for f in (tmp / 'CHOLMODL/Partition').glob('cholmod_l_*.c')
    ]
    if not (tmp / 'CHOLMODL/Parititon').exists():
        shutil.copytree(SS / 'CHOLMOD/Partition', tmp / 'CHOLMODL/Parititon')
        for f in (tmp / 'CHOLMODL/Parititon').glob('*.c'):
            fnew = f.parent / f.name.replace('cholmod_', 'cholmod_l_')
            shutil.move(f, fnew)
            _add_macros(f=fnew, macros=['DLONG'])
            _redirect_headers(f=fnew, headers=cholmod_l_hdrs)

    # CHOLMOD/MatrixOps module
    cholmod_sources += [
        str(f.relative_to(SS.parent))
        for f in (SS / 'CHOLMOD/MatrixOps').glob('cholmod_*.c')
    ]
    cholmod_sources += [
        str(f.relative_to(SS.parent))
        for f in (tmp / 'CHOLMODL/MatrixOps').glob('cholmod_l_*.c')
    ]
    if not (tmp / 'CHOLMODL/MatrixOps').exists():
        shutil.copytree(SS / 'CHOLMOD/MatrixOps', tmp / 'CHOLMODL/MatrixOps')
        for f in (tmp / 'CHOLMODL/MatrixOps').glob('*.c'):
            fnew = f.parent / f.name.replace('cholmod_', 'cholmod_l_')
            shutil.move(f, fnew)
            _add_macros(f=fnew, macros=['DLONG'])
            _redirect_headers(f=fnew,
                              headers=cholmod_l_hdrs +
                              [('t_cholmod_sdmult.c', 't_cholmod_l_sdmult.c')])

    # CHOLMOD/Modify module
    cholmod_sources += [
        str(f.relative_to(SS.parent))
        for f in (SS / 'CHOLMOD/Modify').glob('cholmod_*.c')
    ]
    cholmod_sources += [
        str(f.relative_to(SS.parent))
        for f in (tmp / 'CHOLMODL/Modify').glob('cholmod_l_*.c')
    ]
    if not (tmp / 'CHOLMODL/Modify').exists():
        shutil.copytree(SS / 'CHOLMOD/Modify', tmp / 'CHOLMODL/Modify')
        for f in (tmp / 'CHOLMODL/Modify').glob('*.c'):
            fnew = f.parent / f.name.replace('cholmod_', 'cholmod_l_')
            shutil.move(f, fnew)
            _add_macros(f=fnew, macros=['DLONG'])
            _redirect_headers(
                f=fnew,
                headers=cholmod_l_hdrs +
                [('t_cholmod_updown.c', 't_cholmod_l_updown.c'),
                 ('t_cholmod_updown_numkr.c', 't_cholmod_l_updown_numkr.c')])

    # CHOLMOD/Supernodal module
    cholmod_sources += [
        str(f.relative_to(SS.parent))
        for f in (SS / 'CHOLMOD/Supernodal').glob('cholmod_*.c')
    ]
    cholmod_sources += [
        str(f.relative_to(SS.parent))
        for f in (tmp / 'CHOLMODL/Supernodal').glob('cholmod_l_*.c')
    ]
    if not (tmp / 'CHOLMODL/Supernodal').exists():
        shutil.copytree(SS / 'CHOLMOD/Supernodal', tmp / 'CHOLMODL/Supernodal')
        for f in (tmp / 'CHOLMODL/Supernodal').glob('*.c'):
            fnew = f.parent / f.name.replace('cholmod_', 'cholmod_l_')
            shutil.move(f, fnew)
            _add_macros(f=fnew, macros=['DLONG'])
            _redirect_headers(
                f=fnew,
                headers=cholmod_l_hdrs +
                [('t_cholmod_super_numeric.c', 't_cholmod_l_super_numeric.c'),
                 ('t_cholmod_super_solve.c', 't_cholmod_l_super_solve.c')])

    # CHOLMOD
    #lapack_info = get_info('lapack')
    cholmod_opts = {
        'name':
        'cholmod',
        'sources':
        cholmod_sources,
        'include_dirs':
        [str((SS / 'SuiteSparse_config').relative_to(SS.parent))] +
        cholmod_includes,
        'libraries': ['amd', 'colamd', 'suitesparseconfig'],
        'language':
        'c',
    }
    config.add_library(**cholmod_opts, macros=[('NPARTITION', None)])
    config.add_extension(**cholmod_opts,
                         extra_info={
                             'sources': ['cholmod_impl.c'],
                             'libraries': ['cholmod', 'openblas'],
                             'define_macros': [('NPARTITION', None)],
                             'export_symbols': [],
                         })

    return config

    # SPQR
    config.add_library(
        'spqr',
        sources=[
            str(f.relative_to(SS.parent))
            for f in (SS / 'SPQR/Source').glob('*.cpp')
        ],
        include_dirs=[
            str((SS / 'SPQR/Include').relative_to(SS.parent)),
            str((SS / 'CHOLMOD/Include').relative_to(SS.parent)),
            str((SS / 'SuiteSparse_config').relative_to(SS.parent)),
        ],
        libraries=['amd', 'colamd', 'cholmod', 'suitesparseconfig'],
        language='c')

    shutil.copytree(SS / 'UMFPACK/Source', tmp / 'UMFPACKI/Source')
    shutil.copytree(SS / 'UMFPACK/Source', tmp / 'UMFPACKL/Source')
    shutil.copytree(SS / 'UMFPACK/Source', tmp / 'UMFPACKDI/Source')
    shutil.copytree(SS / 'UMFPACK/Source', tmp / 'UMFPACKDL/Source')
    shutil.copytree(SS / 'UMFPACK/Source', tmp / 'UMFPACKZI/Source')
    shutil.copytree(SS / 'UMFPACK/Source', tmp / 'UMFPACKZL/Source')
    umf_hdrs = {}
    for type_, macros in [('i', ['DINT']), ('l', ['DLONG']), ('di', ['DINT']),
                          ('dl', ['DLONG']), ('zi', ['ZINT']),
                          ('zl', ['ZLONG'])]:
        umf_hdrs[type_] = (
            [(f.name, f.name.replace('umf_', f'umf_{type_}_'))
             for f in (SS / 'UMFPACK/Source').glob('umf_*.h')] +
            [(f.name, f.name.replace('umfpack_', f'umfpack_{type_}_'))
             for f in (SS / 'UMFPACK/Source').glob('umfpack_*.h')] +
            [('amd_internal.h',
              f'amd_{type_[-1] if type_ != "zl" else type_}_internal.h')])
        for f in (tmp / f'UMFPACK{type_.upper()}/Source/').glob('umf_*.h'):
            fnew = f.parent / f.name.replace('umf_', f'umf_{type_}_')
            shutil.move(f, fnew.relative_to(SS.parent))
            _add_macros(f=fnew, macros=macros)
            _redirect_headers(f=fnew, headers=umf_hdrs[type_])

    # non-user-callable umf_*.[ch] files, int/SuiteSparse_long versions only
    # (no real/complex):
    UMFINT = [
        'umf_analyze', 'umf_apply_order', 'umf_colamd', 'umf_free',
        'umf_fsize', 'umf_is_permutation', 'umf_malloc', 'umf_realloc',
        'umf_report_perm', 'umf_singletons', 'umf_cholmod'
    ]
    umfpack_sources = []
    for type_, macro in [('i', 'DINT'), ('l', 'DLONG')]:
        for f0 in UMFINT:
            f = tmp / f'UMFPACK{type_.upper()}/Source/{f0}.c'
            fnew = f.parent / f.name.replace('umf_', f'umf_{type_}_')
            umfpack_sources.append(str(fnew.relative_to(SS.parent)))
            shutil.move(f, fnew)
            _add_macros(f=fnew, macros=[macro])
            _redirect_headers(f=fnew, headers=umf_hdrs[type_])

    # non-user-callable, created from umf_ltsolve.c, umf_utsolve.c,
    # umf_triplet.c, and umf_assemble.c , with int/SuiteSparse_long
    # and real/complex versions:
    UMF_CREATED = [
        'umf_lhsolve', 'umf_uhsolve', 'umf_triplet_map_nox',
        'umf_triplet_nomap_x', 'umf_triplet_nomap_nox', 'umf_triplet_map_x',
        'umf_assemble_fixq', 'umf_store_lu_drop'
    ]

    # non-user-callable umf_*.[ch] files:
    UMFCH = [
        'umf_assemble', 'umf_blas3_update', 'umf_build_tuples',
        'umf_create_element', 'umf_dump', 'umf_extend_front',
        'umf_garbage_collection', 'umf_get_memory', 'umf_init_front',
        'umf_kernel', 'umf_kernel_init', 'umf_kernel_wrapup',
        'umf_local_search', 'umf_lsolve', 'umf_ltsolve',
        'umf_mem_alloc_element', 'umf_mem_alloc_head_block',
        'umf_mem_alloc_tail_block', 'umf_mem_free_tail_block',
        'umf_mem_init_memoryspace', 'umf_report_vector', 'umf_row_search',
        'umf_scale_column', 'umf_set_stats', 'umf_solve', 'umf_symbolic_usage',
        'umf_transpose', 'umf_tuple_lengths', 'umf_usolve', 'umf_utsolve',
        'umf_valid_numeric', 'umf_valid_symbolic', 'umf_grow_front',
        'umf_start_front', 'umf_store_lu', 'umf_scale'
    ]

    # non-user-callable, int/SuiteSparse_long and real/complex versions:
    UMF = UMF_CREATED + UMFCH

    # user-callable umfpack_*.[ch] files (int/SuiteSparse_long and real/complex):
    UMFPACK = [
        'umfpack_col_to_triplet', 'umfpack_defaults', 'umfpack_free_numeric',
        'umfpack_free_symbolic', 'umfpack_get_numeric', 'umfpack_get_lunz',
        'umfpack_get_symbolic', 'umfpack_get_determinant', 'umfpack_numeric',
        'umfpack_qsymbolic', 'umfpack_report_control', 'umfpack_report_info',
        'umfpack_report_matrix', 'umfpack_report_numeric',
        'umfpack_report_perm', 'umfpack_report_status',
        'umfpack_report_symbolic', 'umfpack_report_triplet',
        'umfpack_report_vector', 'umfpack_solve', 'umfpack_symbolic',
        'umfpack_transpose', 'umfpack_triplet_to_col', 'umfpack_scale',
        'umfpack_load_numeric', 'umfpack_save_numeric',
        'umfpack_load_symbolic', 'umfpack_save_symbolic'
    ]

    # user-callable, created from umfpack_solve.c (umfpack_wsolve.h exists, though):
    # with int/SuiteSparse_long and real/complex versions:
    UMFPACKW = ['umfpack_wsolve']

    UMFUSER = UMFPACKW + UMFPACK

    _special_macros = {
        r'umf_[dz][il]_\whsolve': ['CONJUGATE_SOLVE'],
        r'umf_[dz][il]_triplet_map_x': ['DO_MAP', 'DO_VALUES'],
        r'umf_[dz][il]_triplet_map_nox': ['DO_MAP'],
        r'umf_[dz][il]_triplet_nomap_x': ['DO_VALUES'],
        r'umf_[dz][il]_assemble_fixq': ['FIXQ'],
        r'umf_[dz][il]_store_lu_drop': ['DROP'],
        r'umfpack_[dz][il]_wsolve': ['WSOLVE'],
    }

    do_copy = False
    for type_, macro in [('di', ['DINT']), ('dl', ['DLONG']), ('zi', ['ZINT']),
                         ('zl', ['ZLONG'])]:
        for f0 in UMF + UMFUSER:  # TODO: UMFUSER targets not building!
            f = tmp / f'UMFPACK{type_.upper()}/Source/{f0}.c'
            if f0.startswith('umf_'):
                fnew = f.parent / f.name.replace('umf_', f'umf_{type_}_')
            else:
                fnew = f.parent / f.name.replace('umfpack_',
                                                 f'umfpack_{type_}_')
            # convert targets to correct source files names:
            if 'hsolve' in fnew.name:
                f = f.parent / f.name.replace('hsolve', 'tsolve')
            elif 'umf_triplet' in f0:
                f = f.parent / 'umf_triplet.c'
                do_copy = True
            elif 'assemble' in fnew.name:
                f = f.parent / 'umf_assemble.c'
                do_copy = True
            elif 'store_lu' in fnew.name:
                f = f.parent / 'umf_store_lu.c'
                do_copy = True
            elif 'wsolve':
                f = f.parent / 'umfpack_solve.c'
                do_copy = True
            umfpack_sources.append(str(fnew.relative_to(SS.parent)))
            if not do_copy:
                shutil.move(f, fnew)
            else:
                shutil.copyfile(f, fnew)
                do_copy = False
            # Do any extra macros apply to this file?
            extra_macros = []
            for regex in _special_macros:
                match = re.search(regex, fnew.name)
                if match:
                    extra_macros += _special_macros[regex]
                break
            _add_macros(f=fnew, macros=macro + extra_macros)
            _redirect_headers(f=fnew, headers=umf_hdrs[type_])

    # user-callable, only one version for int/SuiteSparse_long,
    # real/complex, *.[ch] files:
    GENERIC = ['umfpack_timer', 'umfpack_tictoc']
    for f0 in GENERIC:
        f = SS / f'UMFPACK/Source/{f0}.c'
        umfpack_sources.append(str(f.relative_to(SS.parent)))

    # UMFPACK
    config.add_library(
        'umfpack',
        sources=umfpack_sources,
        include_dirs=[
            str((tmp / 'UMFPACKI/Source').relative_to(SS.parent)),
            str((tmp / 'UMFPACKL/Source').relative_to(SS.parent)),
            str((tmp / 'UMFPACKDI/Source').relative_to(SS.parent)),
            str((tmp / 'UMFPACKDL/Source').relative_to(SS.parent)),
            str((tmp / 'UMFPACKZI/Source').relative_to(SS.parent)),
            str((tmp / 'UMFPACKZL/Source').relative_to(SS.parent)),
            str((tmp / 'AMDI/Include').relative_to(SS.parent)),
            str((tmp / 'AMDL/Include').relative_to(SS.parent)),
            str((tmp / 'AMDZL/Include').relative_to(SS.parent)),
            str((SS / 'UMFPACK/Include').relative_to(SS.parent)),
            str((SS / 'UMFPACK/Source').relative_to(SS.parent)),
            str((SS / 'AMD/Include').relative_to(SS.parent)),
            str((SS / 'SuiteSparse_config').relative_to(SS.parent)),
            str((SS / 'CHOLMOD/Include').relative_to(SS.parent)),
        ],
        libraries=['amd', 'cholmod', 'suitesparseconfig'],
        language='c')

    # UMFPACK test
    lapack_opt = get_info('lapack')
    config.add_extension(
        'umfpack_demo',
        sources=['umfpack_demo.c'],
        include_dirs=[
            str((SS / 'SuiteSparse_config').relative_to(SS.parent)),
            str((SS / 'AMD/Include').relative_to(SS.parent)),
            str((SS / 'UMFPACK/Include').relative_to(SS.parent)),
        ],
        libraries=['umfpack'],
        language='c',
        extra_info=lapack_opt,
    )

    return config
def configuration(parent_package='', top_path=None):
    from distutils.sysconfig import get_python_inc
    from numpy.distutils.system_info import get_info, NotFoundError, numpy_info
    from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
    from scipy._build_utils import (get_sgemv_fix, get_g77_abi_wrappers,
                                    split_fortran_files)

    config = Configuration('linalg', parent_package, top_path)

    lapack_opt = get_info('lapack_opt')

    if not lapack_opt:
        raise NotFoundError('no lapack/blas resources found')

    atlas_version = ([v[3:-3] for k, v in lapack_opt.get('define_macros', [])
                      if k == 'ATLAS_INFO']+[None])[0]
    if atlas_version:
        print(('ATLAS version: %s' % atlas_version))

    # fblas:
    sources = ['fblas.pyf.src']
    sources += get_g77_abi_wrappers(lapack_opt)
    sources += get_sgemv_fix(lapack_opt)

    config.add_extension('_fblas',
                         sources=sources,
                         depends=['fblas_l?.pyf.src'],
                         extra_info=lapack_opt
                         )

    # flapack:
    sources = ['flapack.pyf.src']
    sources += get_g77_abi_wrappers(lapack_opt)
    dep_pfx = join('src', 'lapack_deprecations')
    deprecated_lapack_routines = [join(dep_pfx, c + 'gegv.f') for c in 'cdsz']
    sources += deprecated_lapack_routines

    config.add_extension('_flapack',
                         sources=sources,
                         depends=['flapack_user.pyf.src'],
                         extra_info=lapack_opt
                         )

    if atlas_version is not None:
        # cblas:
        config.add_extension('_cblas',
                             sources=['cblas.pyf.src'],
                             depends=['cblas.pyf.src', 'cblas_l1.pyf.src'],
                             extra_info=lapack_opt
                             )

        # clapack:
        config.add_extension('_clapack',
                             sources=['clapack.pyf.src'],
                             depends=['clapack.pyf.src'],
                             extra_info=lapack_opt
                             )

    # _flinalg:
    config.add_extension('_flinalg',
                         sources=[join('src', 'det.f'), join('src', 'lu.f')],
                         extra_info=lapack_opt
                         )

    # _interpolative:
    routines_to_split = [
        'dfftb1',
        'dfftf1',
        'dffti1',
        'dsint1',
        'dzfft1',
        'id_srand',
        'idd_copyints',
        'idd_id2svd0',
        'idd_pairsamps',
        'idd_permute',
        'idd_permuter',
        'idd_random_transf0',
        'idd_random_transf0_inv',
        'idd_random_transf_init0',
        'idd_subselect',
        'iddp_asvd0',
        'iddp_rsvd0',
        'iddr_asvd0',
        'iddr_rsvd0',
        'idz_estrank0',
        'idz_id2svd0',
        'idz_permute',
        'idz_permuter',
        'idz_random_transf0_inv',
        'idz_random_transf_init0',
        'idz_random_transf_init00',
        'idz_realcomp',
        'idz_realcomplex',
        'idz_reco',
        'idz_subselect',
        'idzp_aid0',
        'idzp_aid1',
        'idzp_asvd0',
        'idzp_rsvd0',
        'idzr_asvd0',
        'idzr_reco',
        'idzr_rsvd0',
        'zfftb1',
        'zfftf1',
        'zffti1',
    ]
    print('Splitting linalg.interpolative Fortran source files')
    dirname = os.path.split(os.path.abspath(__file__))[0]
    fnames = split_fortran_files(join(dirname, 'src', 'id_dist', 'src'),
                                 routines_to_split)
    fnames = [join('src', 'id_dist', 'src', f) for f in fnames]
    config.add_extension('_interpolative', fnames + ["interpolative.pyf"],
                         extra_info=lapack_opt
                         )

    # _solve_toeplitz:
    config.add_extension('_solve_toeplitz',
                         sources=[('_solve_toeplitz.c')],
                         include_dirs=[get_numpy_include_dirs()])

    config.add_data_dir('tests')

    # Cython BLAS/LAPACK
    config.add_data_files('cython_blas.pxd')
    config.add_data_files('cython_lapack.pxd')

    sources = ['_blas_subroutine_wrappers.f', '_lapack_subroutine_wrappers.f']
    sources += get_g77_abi_wrappers(lapack_opt)
    sources += get_sgemv_fix(lapack_opt)
    includes = numpy_info().get_include_dirs() + [get_python_inc()]
    config.add_library('fwrappers', sources=sources, include_dirs=includes)

    config.add_extension('cython_blas',
                         sources=['cython_blas.c'],
                         depends=['cython_blas.pyx', 'cython_blas.pxd',
                                  'fortran_defs.h', '_blas_subroutines.h'],
                         include_dirs=['.'],
                         libraries=['fwrappers'],
                         extra_info=lapack_opt)

    config.add_extension('cython_lapack',
                         sources=['cython_lapack.c'],
                         depends=['cython_lapack.pyx', 'cython_lapack.pxd',
                                  'fortran_defs.h', '_lapack_subroutines.h'],
                         include_dirs=['.'],
                         libraries=['fwrappers'],
                         extra_info=lapack_opt)

    config.add_extension('_decomp_update',
                         sources=['_decomp_update.c'])

    # Add any license files
    config.add_data_files('src/id_dist/doc/doc.tex')
    config.add_data_files('src/lapack_deprecations/LICENSE')

    return config
Beispiel #60
0
def configuration(parent_package='', top_path=None):
    from numpy.distutils.misc_util import Configuration, dot_join
    from numpy.distutils.system_info import get_info, default_lib_dirs

    config = Configuration('core', parent_package, top_path)
    local_dir = config.local_path
    codegen_dir = join(local_dir, 'code_generators')

    generate_umath_py = join(codegen_dir, 'generate_umath.py')
    n = dot_join(config.name, 'generate_umath')
    generate_umath = imp.load_module('_'.join(n.split('.')),
                                     open(generate_umath_py, 'U'),
                                     generate_umath_py, ('.py', 'U', 1))

    header_dir = 'include/numpy'  # this is relative to config.path_in_package

    def generate_config_h(ext, build_dir):
        target = join(build_dir, header_dir, 'config.h')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)
            tc = generate_testcode(target)
            from distutils import sysconfig
            python_include = sysconfig.get_python_inc()
            python_h = join(python_include, 'Python.h')
            if not os.path.isfile(python_h):
                raise SystemError,\
                      "Non-existing %s. Perhaps you need to install"\
                      " python-dev|python-devel." % (python_h)
            result = config_cmd.try_run(tc,
                                        include_dirs=[python_include],
                                        library_dirs=default_lib_dirs)
            if not result:
                raise SystemError,"Failed to test configuration. "\
                      "See previous error messages for more information."

            moredefs = []
            #
            mathlibs = []
            tc = testcode_mathlib()
            mathlibs_choices = [[], ['m'], ['cpml']]
            mathlib = os.environ.get('MATHLIB')
            if mathlib:
                mathlibs_choices.insert(0, mathlib.split(','))
            for libs in mathlibs_choices:
                if config_cmd.try_run(tc, libraries=libs):
                    mathlibs = libs
                    break
            else:
                raise EnvironmentError("math library missing; rerun "
                                       "setup.py after setting the "
                                       "MATHLIB env variable")
            ext.libraries.extend(mathlibs)
            moredefs.append(('MATHLIB', ','.join(mathlibs)))

            check_math_capabilities(config_cmd, moredefs, mathlibs)

            if is_npy_no_signal():
                moredefs.append('__NPY_PRIVATE_NO_SIGNAL')

            if sys.platform == 'win32' or os.name == 'nt':
                from numpy.distutils.misc_util import get_build_architecture
                a = get_build_architecture()
                print 'BUILD_ARCHITECTURE: %r, os.name=%r, sys.platform=%r' % (
                    a, os.name, sys.platform)
                if a == 'AMD64':
                    moredefs.append('DISTUTILS_USE_SDK')

            if sys.version[:3] < '2.4':
                if config_cmd.check_func('strtod',
                                         decl=False,
                                         headers=['stdlib.h']):
                    moredefs.append(('PyOS_ascii_strtod', 'strtod'))

            target_f = open(target, 'a')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # Keep those for backward compatibility for now
            target_f.write("""
#ifdef HAVE_EXPL
#define HAVE_LONGDOUBLE_FUNCS
#endif

#ifdef HAVE_EXPF
#define HAVE_FLOAT_FUNCS
#endif
""")
            target_f.close()
            print 'File:', target
            target_f = open(target)
            print target_f.read()
            target_f.close()
            print 'EOF'
        else:
            mathlibs = []
            target_f = open(target)
            for line in target_f.readlines():
                s = '#define MATHLIB'
                if line.startswith(s):
                    value = line[len(s):].strip()
                    if value:
                        mathlibs.extend(value.split(','))
            target_f.close()

        ext.libraries.extend(mathlibs)

        incl_dir = os.path.dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)

        return target

    def generate_numpyconfig_h(ext, build_dir):
        """Depends on config.h: generate_config_h has to be called before !"""
        target = join(build_dir, header_dir, 'numpyconfig.h')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)
            testcode = generate_numpyconfig_code(target)

            from distutils import sysconfig
            python_include = sysconfig.get_python_inc()
            python_h = join(python_include, 'Python.h')
            if not os.path.isfile(python_h):
                raise SystemError,\
                      "Non-existing %s. Perhaps you need to install"\
                      " python-dev|python-devel." % (python_h)

            config.numpy_include_dirs
            result = config_cmd.try_run(testcode,
                                include_dirs = [python_include] + \
                                                       config.numpy_include_dirs,
                                        library_dirs = default_lib_dirs)

            if not result:
                raise SystemError,"Failed to generate numpy configuration. "\
                      "See previous error messages for more information."

            moredefs = []

            # Check wether we can use inttypes (C99) formats
            if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']):
                moredefs.append(('NPY_USE_C99_FORMATS', 1))
            else:
                moredefs.append(('NPY_USE_C99_FORMATS', 0))

            # Add moredefs to header
            target_f = open(target, 'a')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # Define __STDC_FORMAT_MACROS
            target_f.write("""
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS 1
#endif
""")
            target_f.close()

            # Dump the numpyconfig.h header to stdout
            print 'File: %s' % target
            target_f = open(target)
            print target_f.read()
            target_f.close()
            print 'EOF'
        config.add_data_files((header_dir, target))
        return target

    def generate_api_func(module_name):
        def generate_api(ext, build_dir):
            script = join(codegen_dir, module_name + '.py')
            sys.path.insert(0, codegen_dir)
            try:
                m = __import__(module_name)
                log.info('executing %s', script)
                h_file, c_file, doc_file = m.generate_api(
                    os.path.join(build_dir, header_dir))
            finally:
                del sys.path[0]
            config.add_data_files((header_dir, h_file), (header_dir, doc_file))
            return (h_file, )

        return generate_api

    generate_numpy_api = generate_api_func('generate_numpy_api')
    generate_ufunc_api = generate_api_func('generate_ufunc_api')

    def generate_umath_c(ext, build_dir):
        target = join(build_dir, header_dir, '__umath_generated.c')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        script = generate_umath_py
        if newer(script, target):
            f = open(target, 'w')
            f.write(
                generate_umath.make_code(generate_umath.defdict,
                                         generate_umath.__file__))
            f.close()
        return []

    config.add_data_files('include/numpy/*.h')
    config.add_include_dirs('src')

    config.numpy_include_dirs.extend(config.paths('include'))

    deps = [
        join('src', 'arrayobject.c'),
        join('src', 'arraymethods.c'),
        join('src', 'scalartypes.inc.src'),
        join('src', 'arraytypes.inc.src'),
        join('src', '_signbit.c'),
        join('src', 'ucsnarrow.c'),
        join('include', 'numpy', '*object.h'), 'include/numpy/fenv/fenv.c',
        'include/numpy/fenv/fenv.h',
        join(codegen_dir, 'genapi.py'),
        join(codegen_dir, '*.txt')
    ]

    # Don't install fenv unless we need them.
    if sys.platform == 'cygwin':
        config.add_data_dir('include/numpy/fenv')

    config.add_extension(
        'multiarray',
        sources=[
            join('src', 'multiarraymodule.c'), generate_config_h,
            generate_numpyconfig_h, generate_numpy_api,
            join('src', 'scalartypes.inc.src'),
            join('src', 'arraytypes.inc.src'),
            join(codegen_dir, 'generate_numpy_api.py'),
            join('*.py')
        ],
        depends=deps,
    )

    config.add_extension(
        'umath',
        sources=[
            generate_config_h,
            generate_numpyconfig_h,
            join('src', 'umathmodule.c.src'),
            generate_umath_c,
            generate_ufunc_api,
            join('src', 'scalartypes.inc.src'),
            join('src', 'arraytypes.inc.src'),
            join('src', 'umath_funcs_c99.inc.src'),
            join('src', 'umath_funcs.inc.src'),
            join('src', 'umath_loops.inc.src'),
        ],
        depends=[
            join('src', 'umath_ufunc_object.inc'),
            generate_umath_py,
            join(codegen_dir, 'generate_ufunc_api.py'),
        ] + deps,
    )

    config.add_extension(
        '_sort',
        sources=[
            join('src', '_sortmodule.c.src'),
            generate_config_h,
            generate_numpyconfig_h,
            generate_numpy_api,
        ],
    )

    config.add_extension(
        'scalarmath',
        sources=[
            join('src', 'scalarmathmodule.c.src'), generate_config_h,
            generate_numpyconfig_h, generate_numpy_api, generate_ufunc_api
        ],
    )

    # Configure blasdot
    blas_info = get_info('blas_opt', 0)

    #blas_info = {}
    def get_dotblas_sources(ext, build_dir):
        if blas_info:
            if ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', []):
                return None  # dotblas needs ATLAS, Fortran compiled blas will not be sufficient.
            return ext.depends[:1]
        return None  # no extension module will be built

    config.add_extension('_dotblas',
                         sources=[get_dotblas_sources],
                         depends=[
                             join('blasdot', '_dotblas.c'),
                             join('blasdot', 'cblas.h'),
                         ],
                         include_dirs=['blasdot'],
                         extra_info=blas_info)

    #    config.add_extension('umath_tests',
    #                         sources = [join('src','umath_tests.c.src'),
    #                                    ],
    #                         depends = [join('blasdot','cblas.h'),] + deps,
    #                         include_dirs = ['blasdot'],
    #                         extra_info = blas_info
    #                         )

    config.add_data_dir('tests')
    config.add_data_dir('tests/data')

    config.make_svn_version_py()

    return config