示例#1
0
文件: setup.py 项目: minrk/distarray
def find_ext_modules():
    import sys
    
    maps = Extension(
        name='distarray.core.maps_fast',
        sources=['distarray/core/maps_fast.c']
    )
    # This extension shows how to call mpi4py's C layer using Cython
    mpi_test = Extension(
        name='distarray.mpi.tests.helloworld',
        sources=['distarray/mpi/tests/helloworld.c'],
        include_dirs = [mpi4py.get_include()]
    )
    
    allext = [maps, mpi_test]
    if not fftw_dir is None:
        py_fftw = Extension(
            name = 'distarray.fft.py_fftw',
            library_dirs = [fftw_dir+"/lib"],
            include_dirs = [
                fftw_dir+"/include", 
                mpi4py.get_include(),
                numpy.get_include()],
            libraries = ['fftw3_mpi', 'fftw3', 'fftw3f_mpi','fftw3f'],
            sources = ['distarray/fft/py_fftw.c'],
        )
        allext.append(py_fftw)
        print "FFTW found, including distarray.fft"
    
    return allext
示例#2
0
    def _make_extensions(config):
        """ Produce a list of Extension instances which can be passed to
        cythonize().
        
        This is the point at which custom directories, MPI options, etc.
        enter the build process.
        """
        import numpy

        settings = COMPILER_SETTINGS.copy()
        settings['include_dirs'] += [numpy.get_include()]
        if config.mpi:
            import mpi4py
            settings['include_dirs'] += [mpi4py.get_include()]

        # Ensure a custom location appears first, so we don't get a copy of
        # HDF5 from some default location in COMPILER_SETTINGS
        if config.hdf5 is not None:
            settings['include_dirs'].insert(0, op.join(config.hdf5, 'include'))
            settings['library_dirs'].insert(0, op.join(config.hdf5, 'lib'))

        # TODO: should this only be done on UNIX?
        if os.name != 'nt':
            settings['runtime_library_dirs'] = settings['library_dirs']

        def make_extension(module):
            sources = [localpath('h5py', module + '.pyx')] + EXTRA_SRC.get(
                module, [])
            return Extension('h5py.' + module, sources, **settings)

        return [make_extension(m) for m in MODULES]
示例#3
0
def make_extension(ext_name,
                   ext_libraries=(),
                   is_directory=False,
                   enableGPU=0):
    try:
        sb.check_call(["ld", "-ljdftx"], stderr=open("/dev/null"))
        if enableGPU:
            sb.check_call(["ld", "-ljdftx_gpu"])
        jdftxLibDir = ""
    except sb.CalledProcessError:
        jdftxLibDir = installJDFTx(isRoot, enableGPU)

    jdftxIncDirs = ["jdftx", ".", mpi4py.get_include()]

    if enableGPU:  # adds only the default directory for now
        jdftxIncDirs.append("/usr/local/cuda/include")

    ext_path = ext_name
    if is_directory:
        ext_path += ".__init__"
    return Extension(
        ext_name,
        [ext_path.replace(".", os.path.sep) + ".pyx"],
        include_dirs=jdftxIncDirs,
        language="c++",
        libraries=ext_libraries,
        library_dirs=[jdftxLibDir],
        runtime_library_dirs=[jdftxLibDir],
        extra_compile_args=['-std=c++0x', '-O3', '-DMPI_ENABLED'] +
        ['-DGPU_ENABLED'] * enableGPU,
        #depends=["jdftx/libjdftx.so"],
    )
示例#4
0
    def _make_extensions(config):
        """ Produce a list of Extension instances which can be passed to
        cythonize().
        
        This is the point at which custom directories, MPI options, etc.
        enter the build process.
        """
        import numpy

        settings = COMPILER_SETTINGS.copy()
        settings['include_dirs'] += [numpy.get_include()]
        if config.mpi:
            import mpi4py
            settings['include_dirs'] += [mpi4py.get_include()]
            
        # Ensure a custom location appears first, so we don't get a copy of
        # HDF5 from some default location in COMPILER_SETTINGS
        if config.hdf5 is not None:
            settings['include_dirs'].insert(0, op.join(config.hdf5, 'include'))
            settings['library_dirs'].insert(0, op.join(config.hdf5, 'lib'))

        # TODO: should this only be done on UNIX?
        if os.name != 'nt':
            settings['runtime_library_dirs'] = settings['library_dirs']
        
        def make_extension(module):
            sources = [localpath('h5py', module+'.pyx')] + EXTRA_SRC.get(module, [])
            return Extension('h5py.'+module, sources, **settings)

        return [make_extension(m) for m in MODULES]
示例#5
0
def make_extension(ext_name, ext_libraries=(), is_directory=False, enableGPU=0):
    try:
        sb.check_call(["ld", "-ljdftx"], stderr=open("/dev/null"))
        if enableGPU:
            sb.check_call(["ld", "-ljdftx_gpu"])
        jdftxLibDir = ""
    except sb.CalledProcessError:
        jdftxLibDir = installJDFTx(isRoot, enableGPU)

    jdftxIncDirs = ["jdftx", ".", mpi4py.get_include()]

    if enableGPU: # adds only the default directory for now
        jdftxIncDirs.append("/usr/local/cuda/include")

    ext_path = ext_name
    if is_directory:
        ext_path += ".__init__"
    return Extension(
        ext_name,
        [ext_path.replace(".", os.path.sep) + ".pyx"],
        include_dirs=jdftxIncDirs,
        language="c++",
        libraries=ext_libraries,
        library_dirs=[jdftxLibDir],
        runtime_library_dirs=[jdftxLibDir],
        extra_compile_args=['-std=c++0x', '-O3', '-DMPI_ENABLED'] +
                            ['-DGPU_ENABLED'] * enableGPU,
        #depends=["jdftx/libjdftx.so"],
    )
示例#6
0
def gen_parmetis_ext():
    from distutils.sysconfig import get_config_var
    _ext_suffix = get_config_var('EXT_SUFFIX')
    if _ext_suffix is None:
        # py2
        from distutils.ccompiler import get_default_compiler, new_compiler
        _ext_suffix = new_compiler(get_default_compiler()).shared_lib_extension
    _src_root = _join('pymetis_mesh', 'src')
    _metis_src = _join(_src_root, 'metis')
    _srcs = [_join('pymetis_mesh', '_parwrapper.c')]
    _srcs += glob.glob(_join(_src_root, 'libparmetis', '*.c'))
    _inc_dirs = [
        '.',
        numpy.get_include(),
        mpi4py.get_include(),
        _join(_src_root, 'include'),
        _join(_src_root, 'libparmetis'),
        _join(_metis_src, 'GKlib'),
        _join(_metis_src, 'libmetis'),
        _join(_metis_src, 'include'),
    ]
    # the following one is not portable, but since for MPI runs, most likely
    # we will deal with Linux, and considering the package is built for doing
    # HPC with large PDE problems, it's safe to assume linux
    return [
        Extension('pymetis_mesh._parwrapper',
                  _srcs,
                  include_dirs=_inc_dirs,
                  libraries=['mpi', ':_wrapper{}'.format(_ext_suffix)],
                  extra_link_args=['-Wl,-rpath=$ORIGIN/.'])
    ]
示例#7
0
文件: setup.py 项目: ljuillen/janus
def mpicc_show():
    """Use ``mpicc --show`` to retrieve the mpicc arguments.

    Works with both openmpi and mpich.
    Returns a dictionary that can be passed to Extension().
    """
    import mpi4py
    import subprocess
    mpicc = mpi4py.get_config()['mpicc']
    mpicc_show = subprocess.check_output([mpicc, '-show']).decode().strip()
    # Strip command line from first part, which is the name of the compiler
    mpicc_show = re.sub('\S+\s', '', mpicc_show, count=1)

    def my_filter(regex, iterable, group=0):
        matching = []
        non_matching = []
        for item in iterable:
            m = re.search(regex, item)
            if m is not None:
                matching.append(m.group(group))
            else:
                non_matching.append(item)
        return matching, non_matching

    cflags = split_quoted(mpicc_show)
    incdirs, cflags = my_filter('^-I(.*)', cflags, 1)
    libdirs, cflags = my_filter('^-L(.*)', cflags, 1)
    ldflags, cflags = my_filter('^-W?l.*', cflags)
    ldflags += cflags
    incdirs.append(mpi4py.get_include())

    return {'include_dirs': incdirs,
            'library_dirs': libdirs,
            'extra_compile_args': cflags,
            'extra_link_args': ldflags}
示例#8
0
    def run(self):
        import numpy
        import mpi4py

        # not sure if necissary
        from configparser import ConfigParser
        import os

        lammps_config = ConfigParser()
        # Give precedence to config file in user home config directory
        if os.path.isfile(os.path.expanduser('~/.config/lammps-site.cfg')):
            lammps_config.read(os.path.expanduser('~/.config/lammps-site.cfg'))
        else:
            lammps_config.read('lammps.cfg')

        def config_to_list(key1, key2):
            return [
                s.strip() for s in lammps_config.get(key1, key2).split(',')
            ]

        # Add mpi4py, numpy, and custom headers to include_dirs
        self.include_dirs.extend([numpy.get_include(), mpi4py.get_include()])
        self.include_dirs.extend(config_to_list('lammps',
                                                'lammps_include_dir'))
        self.include_dirs.extend(config_to_list('mpi', 'mpi_include_dir'))

        self.library_dirs.extend(config_to_list('lammps',
                                                'lammps_library_dir'))
        self.library_dirs.extend(config_to_list('mpi', 'mpi_library_dir'))

        self.libraries.extend(config_to_list('lammps', 'lammps_library'))
        self.libraries.extend(config_to_list('mpi', 'mpi_library'))

        # Call original build_ext command
        _build_ext.run(self)
def get_mpi_flags():
    """Returns mpi_inc_dirs, mpi_compile_args, mpi_link_args.
    """
    global HAVE_MPI
    mpi_inc_dirs = []
    mpi_compile_args = []
    mpi_link_args = []
    if not HAVE_MPI:
        return mpi_inc_dirs, mpi_compile_args, mpi_link_args
    try:
        mpic = 'mpic++'
        if compiler == 'intel':
            link_args = check_output([mpic, '-cc=icc', '-link_info'],
                                     universal_newlines=True).strip()
            link_args = link_args[3:]
            compile_args = check_output([mpic, '-cc=icc', '-compile_info'],
                                        universal_newlines=True).strip()
            compile_args = compile_args[3:]
        else:
            link_args = check_output([mpic, '--showme:link'],
                                     universal_newlines=True).strip()
            compile_args = check_output([mpic, '--showme:compile'],
                                        universal_newlines=True).strip()
    except:  # noqa: E722
        print('-' * 80)
        print("Unable to run mpic++ correctly, skipping parallel build")
        print('-' * 80)
        HAVE_MPI = False
    else:
        mpi_link_args.extend(link_args.split())
        mpi_compile_args.extend(compile_args.split())
        mpi_inc_dirs.append(mpi4py.get_include())

    return mpi_inc_dirs, mpi_compile_args, mpi_link_args
示例#10
0
    def _make_extensions(config):
        """ Produce a list of Extension instances which can be passed to
        cythonize().

        This is the point at which custom directories, MPI options, etc.
        enter the build process.
        """
        import numpy
        import pkgconfig

        settings = COMPILER_SETTINGS.copy()

        # Ensure that if a custom HDF5 location is specified, prevent
        # pkg-config and fallback locations from appearing in the settings
        if config.hdf5 is not None:
            settings['include_dirs'].insert(0, op.join(config.hdf5, 'include'))
            settings['library_dirs'].insert(0, op.join(config.hdf5, 'lib'))
        else:
            try:
                if pkgconfig.exists('hdf5'):
                    pkgcfg = pkgconfig.parse("hdf5")
                    settings['include_dirs'].extend(pkgcfg['include_dirs'])
                    settings['library_dirs'].extend(pkgcfg['library_dirs'])
                    settings['define_macros'].extend(pkgcfg['define_macros'])
            except EnvironmentError:
                if os.name != 'nt':
                    print(
                        "h5py requires pkg-config unless the HDF5 path is explicitly specified",
                        file=sys.stderr)
                    raise
            settings['include_dirs'].extend(FALLBACK_PATHS['include_dirs'])
            settings['library_dirs'].extend(FALLBACK_PATHS['library_dirs'])

        try:
            numpy_includes = numpy.get_include()
        except AttributeError:
            # if numpy is not installed get the headers from the .egg directory
            import numpy.core
            numpy_includes = os.path.join(os.path.dirname(numpy.core.__file__),
                                          'include')

        settings['include_dirs'] += [numpy_includes]
        if config.mpi:
            import mpi4py
            settings['include_dirs'] += [mpi4py.get_include()]

        # TODO: should this only be done on UNIX?
        if os.name != 'nt':
            settings['runtime_library_dirs'] = settings['library_dirs']

        def make_extension(module):
            sources = [localpath('h5py', module + '.pyx')] + EXTRA_SRC.get(
                module, [])
            return Extension('h5py.' + module, sources, **settings)

        return [make_extension(m) for m in MODULES]
示例#11
0
def build(setup_kwargs):
    # * root dir
    root_dir = os.path.join(
        os.path.dirname(
            os.path.abspath(inspect.getfile(inspect.currentframe()))), "pyfk")
    # * MPI mode
    compile_time_env = {"PYFK_USE_MPI": "0"}
    PYFK_USE_MPI = os.getenv("PYFK_USE_MPI", "0")
    mpi_link_args = []
    mpi_include_dirs = [np.get_include()]
    if PYFK_USE_MPI == "1":
        os.environ["CC"] = "mpicc"
        compile_time_env["PYFK_USE_MPI"] = "1"
        mpi_link_args.append("-lmpi")
        try:
            import mpi4py
        except:
            raise Exception(
                "please install mpi4py first to enable the MPI mode!")
        mpi_include_dirs.append(mpi4py.get_include())

    # * cysignals
    def get_include_cysignals():
        import cysignals
        return os.path.join(os.path.dirname(cysignals.__file__), 'include')

    mpi_include_dirs.append(get_include_cysignals())

    # * only for debug purpose
    # ref to https://cython.readthedocs.io/en/latest/src/tutorial/profiling_tutorial.html#enabling-line-tracing
    CYTHON_TRACE = 0
    PYFK_USE_CYTHON_TRACE = os.getenv("PYFK_USE_CYTHON_TRACE", "0")
    if PYFK_USE_CYTHON_TRACE == "1":
        CYTHON_TRACE = 1

    # * extensions
    extensions = [
        Extension("pyfk.taup.taup", [os.path.join(root_dir, "taup/taup.pyx")],
                  include_dirs=[np.get_include()],
                  define_macros=[("CYTHON_TRACE", str(CYTHON_TRACE))],
                  language="c"),
        Extension("pyfk.gf.waveform_integration",
                  [os.path.join(root_dir, "gf/waveform_integration.pyx")],
                  include_dirs=mpi_include_dirs,
                  define_macros=[("CYTHON_TRACE", str(CYTHON_TRACE))],
                  language="c",
                  extra_link_args=mpi_link_args),
    ]
    # * update setup
    setup_kwargs.update(
        dict(ext_modules=cythonize(extensions,
                                   language_level=3,
                                   annotate=False,
                                   compile_time_env=compile_time_env),
             zip_safe=False))
 def dijitso_jit(jitable,
                 name,
                 params,
                 generate=None,
                 send=None,
                 receive=None,
                 wait=None):
     name = name.replace("dolfin", "multiphenics")
     params['build']['include_dirs'].append(multiphenics_root)
     params['build']['include_dirs'].append(mpi4py.get_include())
     return original_dijitso_jit(jitable, name, params, generate, send,
                                 receive, wait)
示例#13
0
 def dijitso_jit(jitable,
                 name,
                 params,
                 generate=None,
                 send=None,
                 receive=None,
                 wait=None):
     name = name.replace("dolfin", package_name)
     params["build"]["include_dirs"].append(mpi4py.get_include())
     params["build"]["include_dirs"].append(petsc4py.get_include())
     params["build"]["include_dirs"].extend(include_dirs)
     return original_dijitso_jit(jitable, name, params, generate, send,
                                 receive, wait)
示例#14
0
    def _make_extensions(config):
        """ Produce a list of Extension instances which can be passed to
        cythonize().

        This is the point at which custom directories, MPI options, etc.
        enter the build process.
        """
        import numpy
        import pkgconfig

        settings = COMPILER_SETTINGS.copy()

        try:
            if pkgconfig.exists("hdf5"):
                pkgcfg = pkgconfig.parse("hdf5")
                settings["include_dirs"].extend(pkgcfg["include_dirs"])
                settings["library_dirs"].extend(pkgcfg["library_dirs"])
                settings["define_macros"].extend(pkgcfg["define_macros"])
        except EnvironmentError:
            pass

        try:
            numpy_includes = numpy.get_include()
        except AttributeError:
            # if numpy is not installed get the headers from the .egg directory
            import numpy.core

            numpy_includes = os.path.join(os.path.dirname(numpy.core.__file__), "include")

        settings["include_dirs"] += [numpy_includes]
        if config.mpi:
            import mpi4py

            settings["include_dirs"] += [mpi4py.get_include()]

        # Ensure a custom location appears first, so we don't get a copy of
        # HDF5 from some default location in COMPILER_SETTINGS
        if config.hdf5 is not None:
            settings["include_dirs"].insert(0, op.join(config.hdf5, "include"))
            settings["library_dirs"].insert(0, op.join(config.hdf5, "lib"))

        # TODO: should this only be done on UNIX?
        if os.name != "nt":
            settings["runtime_library_dirs"] = settings["library_dirs"]

        def make_extension(module):
            sources = [localpath("h5py", module + ".pyx")] + EXTRA_SRC.get(module, [])
            return Extension("h5py." + module, sources, **settings)

        return [make_extension(m) for m in MODULES]
示例#15
0
    def _make_extensions(config):
        """ Produce a list of Extension instances which can be passed to
        cythonize().
        
        This is the point at which custom directories, MPI options, etc.
        enter the build process.
        """
        import numpy
        import pkgconfig

        settings = COMPILER_SETTINGS.copy()

        try:
            if pkgconfig.exists('hdf5'):
                pkgcfg = pkgconfig.parse("hdf5")
                settings['include_dirs'].extend(pkgcfg['include_dirs'])
                settings['library_dirs'].extend(pkgcfg['library_dirs'])
                settings['define_macros'].extend(pkgcfg['define_macros'])
        except EnvironmentError:
            pass

        try:
            numpy_includes = numpy.get_include()
        except AttributeError:
            # if numpy is not installed get the headers from the .egg directory
            import numpy.core
            numpy_includes = os.path.join(os.path.dirname(numpy.core.__file__),
                                          'include')

        settings['include_dirs'] += [numpy_includes]
        if config.mpi:
            import mpi4py
            settings['include_dirs'] += [mpi4py.get_include()]

        # Ensure a custom location appears first, so we don't get a copy of
        # HDF5 from some default location in COMPILER_SETTINGS
        if config.hdf5 is not None:
            settings['include_dirs'].insert(0, op.join(config.hdf5, 'include'))
            settings['library_dirs'].insert(0, op.join(config.hdf5, 'lib'))

        # TODO: should this only be done on UNIX?
        if os.name != 'nt':
            settings['runtime_library_dirs'] = settings['library_dirs']

        def make_extension(module):
            sources = [localpath('h5py', module + '.pyx')] + EXTRA_SRC.get(
                module, [])
            return Extension('h5py.' + module, sources, **settings)

        return [make_extension(m) for m in MODULES]
示例#16
0
    def _make_extensions(config):
        """ Produce a list of Extension instances which can be passed to
        cythonize().

        This is the point at which custom directories, MPI options, etc.
        enter the build process.
        """
        import numpy
        import pkgconfig

        settings = COMPILER_SETTINGS.copy()

        # Ensure that if a custom HDF5 location is specified, prevent
        # pkg-config and fallback locations from appearing in the settings
        if config.hdf5 is not None:
            settings['include_dirs'].insert(0, op.join(config.hdf5, 'include'))
            settings['library_dirs'].insert(0, op.join(config.hdf5, 'lib'))
        else:
            try:
                if pkgconfig.exists('hdf5'):
                    pkgcfg = pkgconfig.parse("hdf5")
                    settings['include_dirs'].extend(pkgcfg['include_dirs'])
                    settings['library_dirs'].extend(pkgcfg['library_dirs'])
                    settings['define_macros'].extend(pkgcfg['define_macros'])
            except EnvironmentError:
                pass
            settings['include_dirs'].extend(FALLBACK_PATHS['include_dirs'])
            settings['library_dirs'].extend(FALLBACK_PATHS['library_dirs'])

        try:
            numpy_includes = numpy.get_include()
        except AttributeError:
            # if numpy is not installed get the headers from the .egg directory
            import numpy.core
            numpy_includes = os.path.join(os.path.dirname(numpy.core.__file__), 'include')

        settings['include_dirs'] += [numpy_includes]
        if config.mpi:
            import mpi4py
            settings['include_dirs'] += [mpi4py.get_include()]

        # TODO: should this only be done on UNIX?
        if os.name != 'nt':
            settings['runtime_library_dirs'] = settings['library_dirs']

        def make_extension(module):
            sources = [localpath('h5py', module+'.pyx')] + EXTRA_SRC.get(module, [])
            return Extension('h5py.'+module, sources, **settings)

        return [make_extension(m) for m in MODULES]
示例#17
0
    def run(self):
        """Overridden method. Runs the build.
        Library directories and include directories are checked here, first.
        """
        # Check we can find the OSKAR library.
        directory = self.dir_contains('oskar.', self.library_dirs)
        if not directory:
            raise RuntimeError(
                "Could not find OSKAR library. "
                "Check that OSKAR has already been installed on this system, "
                "and set the library path to build_ext "
                "using -L or --library-dirs")
        self.rpath.append(directory)
        self.libraries.append('oskar')
        self.libraries.append('oskar_apps')
        self.libraries.append('oskar_binary')
        self.libraries.append('oskar_settings')
        if self.dir_contains('oskar_ms.', self.library_dirs):
            self.libraries.append('oskar_ms')

        # Check we can find the OSKAR headers.
        header = self.find_file(
            join('oskar', 'oskar_version.h'), self.include_dirs)
        if not header:
            raise RuntimeError(
                "Could not find oskar/oskar_version.h. "
                "Check that OSKAR has already been installed on this system, "
                "and set the include path to build_ext "
                "using -I or --include-dirs")
        self.include_dirs.insert(0, dirname(header))
        self.include_dirs.insert(0, get_include())

        # Optionally include mpi4py support
        if mpi4py:
            self.define = [('OSKAR_HAVE_MPI4PY', 1), ('OSKAR_HAVE_MPI', 1),
                           ('OMPI_SKIP_MPICXX', 1), ('MPICH_SKIP_MPICXX', 1)]
            self.include_dirs.insert(0, mpi4py.get_include())

        # Check the version of OSKAR is compatible.
        version = self.get_oskar_version(header)
        if not version.startswith(OSKAR_COMPATIBILITY_VERSION):
            raise RuntimeError(
                "The version of OSKAR found is not compatible with oskarpy. "
                "Found OSKAR %s, but require OSKAR %s." % (
                    version, OSKAR_COMPATIBILITY_VERSION)
            )
        build_ext.run(self)
示例#18
0
    def finalize_options(self):
        _build_ext.finalize_options(self)
        

        if not is_configured:
            configure_install(self)
            print_config()
        
        numpy_include = [numpy.get_include()]
        if has_mpi4py:
            mpi4py_include = [mpi4py.get_include()]
        else:
            mpi4py_include = []
        
        subs = ['HSS', 'HODLR', 'BLR', 'dense', 'misc', 'sparse', 'clustering']
        strumpackincsubdirs = [os.path.join(strumpackincdir, x) for x in subs]
        include_dirs=([strumpackincdir,] + strumpackincsubdirs + 
                      numpy_include + mpi4py_include)
        
        library_dirs = [strumpacklnkdir] + extlib_dirs

        include_dirs = [x for x in include_dirs if len(x) > 0]
        library_dirs = [x for x in library_dirs if len(x) > 0]
        libraries = ['strumpack', 'stdc++']

        sclpk = os.getenv("SCALAPACKLINK")
        if sclpk is not None:
            print("SCALAPAK flag is given:" + sclpk)
            for x in sclpk.split():
                if x.startswith('-L'): 
                    library_dirs.append(x[2:])
                elif x.startswith('-l'):
                    libraries.append(x[2:])
                else:
                    assert False, "unsupported option :" + x

        for x in self.extensions:
            x.include_dirs.extend(include_dirs)
            x.library_dirs.extend(library_dirs)
            x.libraries.extend(libraries)

        #os.environ['CC'] = mpicc_command
        os.environ['CC'] = mpicxx_command
        os.environ['CXX'] = mpicxx_command

        self.inplace = 0
示例#19
0
文件: setup.py 项目: ljuillen/janus
def mpicc_showme():
    """Use ``mpicc --showme`` to retrieve the mpicc arguments.

    Works with openmpi, not mpich.
    Returns a dictionary that can be passed to Extension().
    """

    import mpi4py
    from subprocess import check_output
    mpicc = mpi4py.get_config()['mpicc']

    def call_mpicc_showme(arg):
        out = check_output([mpicc, '--showme:'+arg])
        return out.decode('ascii').split()

    incdirs = call_mpicc_showme('incdirs')
    incdirs.append(mpi4py.get_include())

    return {'include_dirs': incdirs,
            'library_dirs': call_mpicc_showme('libdirs'),
            'extra_compile_args': call_mpicc_showme('compile'),
            'extra_link_args': call_mpicc_showme('link')}
示例#20
0
def generate_wrapper(self):
    '''
    run swig.
    '''
    if dry_run or verbose:
        print("generating SWIG wrapper")
    def ifiles():
        ifiles = os.listdir()
        ifiles = [x for x in ifiles if x.endswith('.i')]
        ifiles = [x for x in ifiles if not x.startswith('#')]
        ifiles = [x for x in ifiles if not x.startswith('.')]                
        return ifiles

    def check_new(ifile):
        wfile = ifile[:-2]+'_wrap.cxx'
        if not os.path.exists(wfile):
            return True
        return os.path.getmtime(ifile) > os.path.getmtime(wfile)

    swig_command = find_command('swig')
    if swig_command is None:
        assert False, "SWIG is not installed"

    pwd = chdir(os.path.join(rootdir, 'src', 'STRUMPACK'))

    swigflag = '-Wall -c++ -python -fastproxy -olddefs -keyword'.split(' ')

    stflag = ['-I'+ strumpackincdir]

    if has_mpi4py:
        stflag.append('-I'+ mpi4py.get_include())

    for file in ifiles():
        if not check_new(file):
            continue
        command = [swig_command] + swigflag + stflag + [file]
        make_call(command)

    os.chdir(pwd)
示例#21
0
    def _make_extensions(config):
        """ Produce a list of Extension instances which can be passed to
        cythonize().

        This is the point at which custom directories, MPI options, etc.
        enter the build process.
        """
        import numpy

        settings = COMPILER_SETTINGS.copy()

        settings['include_dirs'][:0] = config.hdf5_includedirs
        settings['library_dirs'][:0] = config.hdf5_libdirs
        settings['define_macros'].extend(config.hdf5_define_macros)

        try:
            numpy_includes = numpy.get_include()
        except AttributeError:
            # if numpy is not installed get the headers from the .egg directory
            import numpy.core
            numpy_includes = os.path.join(os.path.dirname(numpy.core.__file__),
                                          'include')

        settings['include_dirs'] += [numpy_includes]
        if config.mpi:
            import mpi4py
            settings['include_dirs'] += [mpi4py.get_include()]

        # TODO: should this only be done on UNIX?
        if os.name != 'nt':
            settings['runtime_library_dirs'] = settings['library_dirs']

        def make_extension(module):
            sources = [localpath('h5py', module + '.pyx')] + EXTRA_SRC.get(
                module, [])
            settings['libraries'] += EXTRA_LIBRARIES.get(module, [])
            return Extension('h5py.' + module, sources, **settings)

        return [make_extension(m) for m in MODULES]
import mpi4py
print( mpi4py.get_include() )
示例#23
0
inc_dirs, lib_dirs, libs = get_mpi_flags()

# Add funtofem-dev/lib as a runtime directory
runtime_lib_dirs = get_global_dir(['lib'])

# Relative paths for the include/library directories
rel_inc_dirs = ['src', 'include']
rel_lib_dirs = ['lib']
libs.extend(['transfer_schemes'])

# Convert from relative to absolute directories
inc_dirs.extend(get_global_dir(rel_inc_dirs))
lib_dirs.extend(get_global_dir(rel_lib_dirs))

# Add the numpy/mpi4py directories
inc_dirs.extend([numpy.get_include(), mpi4py.get_include()])

exts = []
for mod in ['TransferScheme']:
    exts.append(
        Ext('funtofem.%s' % (mod),
            sources=['funtofem/%s.pyx' % (mod)],
            include_dirs=inc_dirs,
            libraries=libs,
            library_dirs=lib_dirs,
            runtime_library_dirs=runtime_lib_dirs))

for e in exts:
    e.cython_directives = {"embedsignature": True, "binding": True}

setup(name='funtofem',
示例#24
0
          "pfespace", "pgridfunc",
          "plinearform", "pbilinearform", "pnonlinearform",
          "hypre", "restriction", "prestriction"]

if add_pumi != '':
    modules.append("pumi")
extra_compile_args = [cxx11flag, '-DSWIG_TYPE_TABLE=PyMFEM']

sources = {name: [name + "_wrap.cxx"] for name in modules}

proxy_names = {name: '_'+name for name in modules}

import numpy
numpyinc = numpy.get_include()
import mpi4py
mpi4pyinc = mpi4py.get_include()

libraries    = ['mfem', 'HYPRE', 'metis']
include_dirs = [mfembuilddir, mfemincdir, numpyinc, mpi4pyinc, hypreinc, metisinc]
#                mpichinc, hypreinc,]
library_dirs = [mfemlnkdir, hyprelib, metis5lib,]

if add_pumi != '':
   include_dirs.append(pumiinc)
   library_dirs.append(pumilib)
   
if add_strumpack:
    modules.append("strumpack")
    extra_compile_args.append('-std=c++11')
    sources["strumpack"] = ["strumpack_wrap.cxx"]
    proxy_names["strumpack"] = "_strumpack"
示例#25
0
文件: setup.py 项目: SMG2S/SMG2S
#!/usr/bin/env python

from distutils.core import setup, Extension
import mpi4py
from glob import glob

mpi_incdir = mpi4py.get_include()
print(mpi_incdir)

with open('README.txt') as file:
    long_description = file.read()

module = Extension(
    '_smg2s',
    sources=['smg2s/smg2s_wrap.cxx'],
    include_dirs=[mpi_incdir, './smg2s/include'],
    swig_opts=['-I./smg2s/include'],
    extra_compile_args=['-stdlib=libc++', '-std=c++0x'],
)

setup(
    name='smg2s',
    version='1.0.1',
    author="Xinzhe Wu",
    author_email='*****@*****.**',
    description='SMG2S: Scalable Matrix Generator with Given Spectrum',
    long_description=long_description,
    ext_modules=[module],
    py_modules=["smg2s/smg2s"],
    url='http://smg2s.github.io',
    license='GNU Lesser General Public License v3.0',
示例#26
0
#!/usr/bin/env python

"""
setup.py file for DMLL
"""

from distutils.core import setup, Extension
import commands
import mpi4py

#We need to create the wrapper script DMLLCpp_wrap.cxx, because the mpi4py version may be different depending on the operating system and version
get_mpi4py_folder = commands.getstatusoutput("cp -r $MPI_INCLUDE/mpi4py .".replace("$MPI_INCLUDE", mpi4py.get_include()))
create_swig_file = commands.getstatusoutput("swig -c++ -python DMLLCpp.i")

#Get the mpi compiler arguments
mpi_compile_args = commands.getstatusoutput("mpic++ -showme")[1].split()[1:] 

#Compile DMLLCpp
DMLLCpp_module = Extension('_DMLLCpp',
                           sources=['DMLLCpp_wrap.cxx'],
                           include_dirs = ['/usr/local/include'],
                           extra_compile_args=['-std=c++11'] + mpi_compile_args,
                           extra_link_args =['-std=c++11'] + mpi_compile_args,
)

setup (name = 'DMLLCpp',
       version = '0.1',
       author      = "SWIG Docs",
       description = """Simple swig example from docs""",
       ext_modules = [DMLLCpp_module],       
       py_modules = ["DMLLCpp"],
示例#27
0
# libraries. Locate them manually if GA was configured to use them.
linalg_include = []
linalg_library = []
linalg_lib = []
if 'Accelerate' in ga_clibs or 'vecLib' in ga_clibs:
    path = "/System/Library/Frameworks/Accelerate.framework/Frameworks/vecLib.framework/Versions/A"
    linalg_include = []
    if os.path.exists(path):
        linalg_library = [path]
        linalg_lib = ["LAPACK","BLAS"]
    # remove '-framework Accelerate' from flags
    ga_clibs = ga_clibs.replace("-framework","")
    ga_clibs = ga_clibs.replace("Accelerate","")
    ga_clibs = ga_clibs.replace("vecLib","")

include_dirs = [numpy.get_include(), mpi4py.get_include()]
library_dirs = []
libraries = []

# add the GA stuff
for dir in ga_cppflags.split():
    dir = dir.strip()
    include_dirs.append(dir.replace("-I",""))
for dir in ga_ldflags.split():
    dir = dir.strip()
    library_dirs.append(dir.replace("-L",""))
for part in ga_clibs.split():
    part = part.strip()
    if '-L' in part:
        library_dirs.append(part.replace("-L",""))
    elif '-l' in part:
示例#28
0
文件: setup.py 项目: hdkire/h5py
        COMPILER_SETTINGS["library_dirs"] += [op.join(HDF5, "dll")]
else:
    COMPILER_SETTINGS = {
        "libraries": ["hdf5", "hdf5_hl"],
        "include_dirs": [numpy.get_include(), localpath("lzf")],
        "library_dirs": [],
        "define_macros": [("H5_USE_16_API", None)],
    }
    if HDF5 is not None:
        COMPILER_SETTINGS["include_dirs"] += [op.join(HDF5, "include")]
        COMPILER_SETTINGS["library_dirs"] += [op.join(HDF5, "lib")]
    elif sys.platform == "darwin":
        COMPILER_SETTINGS["include_dirs"] += ["/opt/local/include"]
        COMPILER_SETTINGS["library_dirs"] += ["/opt/local/lib"]
    if MPI:
        COMPILER_SETTINGS["include_dirs"] += [mpi4py.get_include()]
    COMPILER_SETTINGS["runtime_library_dirs"] = [op.abspath(x) for x in COMPILER_SETTINGS["library_dirs"]]

MODULES = [
    "defs",
    "_errors",
    "_objects",
    "_proxy",
    "h5fd",
    "h5z",
    "h5",
    "h5i",
    "h5r",
    "utils",
    "_conv",
    "h5t",
示例#29
0
# ---

import mpi4py
try:
    mpi4py.get_include()
except:
    pass
try:
    mpi4py.get_config()
except:
    pass

# ---


def test_mp4py_rc():
    import mpi4py.rc
    mpi4py.rc(
        initialize=True,
        threads=True,
        thread_level='multiple',
        finalize=None,
        fast_reduce=True,
        recv_mprobe=True,
        errors='exception',
    )
    try:
        mpi4py.rc(qwerty=False)
    except TypeError:
        pass
    else:
示例#30
0
import mpi4py
from mpi4py import MPI
from neuron import h

print(mpi4py.get_config())
print(mpi4py.get_include())

h.load_file("stdlib.hoc")
h.load_file("stdrun.hoc")
root = 0
pc = h.ParallelContext()
id = int(pc.id())
nhost = int(pc.nhost())
print("I am %i of %i" % (id, nhost))
v = h.Vector(1)
if id == root:
    v.x[0] = 17
pc.broadcast(v, root)
print(v.x[0])
示例#31
0
文件: setup.py 项目: splevine/pyFFTW
    def __init__(self, compiler):
        log.debug("Compiler include_dirs: %s" % compiler.include_dirs)
        if hasattr(compiler, "initialize"):
            compiler.initialize()  # to set all variables
            log.debug("Compiler include_dirs after initialize: %s" %
                      compiler.include_dirs)
        self.compiler = compiler

        log.debug(
            sys.version)  # contains the compiler used to build this python

        # members with the info for the outside world
        self.include_dirs = get_include_dirs()
        self.objects = []
        self.libraries = []
        self.library_dirs = get_library_dirs()
        self.linker_flags = []
        self.compile_time_env = {}

        if self.compiler.compiler_type == 'msvc':
            if (sys.version_info.major, sys.version_info.minor) < (3, 3):
                # The check above is a nasty hack. We're using the python
                # version as a proxy for the MSVC version. 2008 doesn't
                # have stdint.h, so is needed. 2010 does.
                #
                # We need to add the path to msvc includes
                msvc_2008_path = (os.path.join(os.getcwd(), 'include',
                                               'msvc_2008'))
                self.include_dirs.append(msvc_2008_path)
            elif (sys.version_info.major, sys.version_info.minor) < (3, 5):
                # Actually, it seems that appveyor doesn't have a stdint that
                # works, so even for 2010 we use our own (hacked) version
                # of stdint.
                # This should be pretty safe in whatever case.
                msvc_2010_path = (os.path.join(os.getcwd(), 'include',
                                               'msvc_2010'))
                self.include_dirs.append(msvc_2010_path)

                # To avoid http://bugs.python.org/issue4431
                #
                # C:\Program Files\Microsoft
                # SDKs\Windows\v7.1\Bin\x64\mt.exe -nologo -manifest
                # C:\Users\appveyor\AppData\Local\Temp\1\pyfftw-9in6l66u\a.out.exe.manifest
                # -outputresource:C:\Users\appveyor\AppData\Local\Temp\1\pyfftw-9in6l66u\a.out.exe;1
                # C:\Users\appveyor\AppData\Local\Temp\1\pyfftw-9in6l66u\a.out.exe.manifest
                # : general error c1010070: Failed to load and parse
                # the manifest. The system cannot find the file
                # specified.
                self.compiler.ldflags_shared.append('/MANIFEST')

        if get_platform().startswith('linux'):
            # needed at least libm for linker checks to succeed
            self.libraries.append('m')

        # main fftw3 header is required
        if not self.has_header(['fftw3.h'], include_dirs=self.include_dirs):
            raise CompileError("Could not find the FFTW header 'fftw3.h'")

        # mpi is optional
        # self.support_mpi = self.has_header(['mpi.h', 'fftw3-mpi.h'])
        # TODO enable check when wrappers are included in Pyfftw
        self.support_mpi = False

        if self.support_mpi:
            try:
                import mpi4py
                self.include_dirs.append(mpi4py.get_include())
            except ImportError:
                log.error(
                    "Could not import mpi4py. Skipping support for FFTW MPI.")
                self.support_mpi = False

        self.search_dependencies()
示例#32
0
import mpi4py
import subprocess

subprocess.call(['cython', '_parallel_fft.pyx',
                 '-I' + mpi4py.get_include(),
                 '-I../../'])
    def build_worker(self):
        
        path = os.path.abspath(self.get_path_to_results())
        codefile = os.path.join(path,"code.o")
        interfacefile = os.path.join(path,"interface.o")
        headerfile = os.path.join(path,"worker_code.h")
        self.sofile = os.path.join(path,"interface.so")
        self.interfacec_o_file = os.path.join(path,"interfacec.o")
        self.exefile = os.path.join(path,"c_worker")
        
        compile_tools.fortran_compile(codefile, test_fortran_implementation.codestring,
                                      extra_args=["-fPIC"])
        
        
        uc = create_c.GenerateACHeaderStringFromASpecificationClass()
        uc.specification_class = test_fortran_implementation.ForTestingInterface
        uc.needs_mpi = False
        header =  uc.result
        
        
        with open(headerfile, "w") as f:
            f.write(header)
        
        
        root, ext = os.path.splitext(interfacefile)
        sourcename = root + '.pyx'
        cname = root + '.c'
        
        uc = create_cython.GenerateACythonSourcecodeStringFromASpecificationClass()
        uc.specification_class = test_fortran_implementation.ForTestingInterface
        uc.function_name_prefix = "ci_"
        uc.needs_mpi = True
        code =  uc.result
        
        with open(sourcename, "w") as f:
            f.write(code)


        uc = create_cython.GenerateACythonStartScriptStringFromASpecificationClass()
        uc.specification_class = test_fortran_implementation.ForTestingInterface
        uc.needs_mpi = True
        script =  uc.result
        
        with open(self.exefile, "w") as f:
            f.write(script)

        os.chmod(self.exefile, 0777)

        import mpi4py
        process, stdout, stderr = compile_tools.open_subprocess([config.compilers.cython, 
        '-I',
        mpi4py.get_include(),
         sourcename, '-o', cname])

        if process.returncode == 0:
            compile_tools.wait_for_file(cname)
        
        if process.returncode != 0 or not os.path.exists(cname):
            print "Could not cythonize {0}, error = {1}".format(sourcename, stderr)
            raise Exception("Could not cythonize {0}, error = {1}".format(sourcename, stderr))
        
        with open(cname, "r") as f:
            string = f.read()


        
        
        uc = create_cython.GenerateAFortranInterfaceSourcecodeStringFromASpecificationClass()
        uc.specification_class = test_fortran_implementation.ForTestingInterface
        uc.function_name_prefix = "ci_"
        uc.needs_mpi = False
        code =  uc.result

        compile_tools.fortran_compile(self.interfacec_o_file, code,
                                      extra_args=["-fPIC"])

        compile_tools.c_pythondev_compile(interfacefile, string)
        compile_tools.fortran_pythondev_buildso(self.sofile,  [interfacefile, codefile, self.interfacec_o_file] )
示例#34
0
文件: setup.py 项目: jrs65/scalapy
print("=============================================================================")
print("Building Scalapy....")
print()
print("  ScaLAPACK: %s" % scalapackversion)
print("  MPI: %s" % mpiversion)
print("  OpenMP: %s" % repr(use_omp))
print()
print("  Compile args: %s" % repr(mpicompileargs))
print("  Libraries: %s" % repr(scl_lib + mpilinkargs))
print("  Library path: %s" % repr(scl_libdir))
print()
print("=============================================================================")

## Setup the extensions we are going to build
mpi3_ext = Extension('scalapy.mpi3util', [cython_file('scalapy/mpi3util')],
                     include_dirs=['.', np.get_include(), mpi4py.get_include()],
                     extra_compile_args=mpicompileargs,
                     extra_link_args=mpilinkargs)

blacs_ext = Extension('scalapy.blacs', [cython_file('scalapy/blacs')],
                      include_dirs=['.', np.get_include(), mpi4py.get_include()],
                      library_dirs=scl_libdir, libraries=scl_lib,
                      extra_compile_args=mpicompileargs,
                      extra_link_args=mpilinkargs)

llredist_ext = Extension('scalapy.lowlevel.redist', ['scalapy/lowlevel/redist.pyf'],
                         library_dirs=scl_libdir, libraries=scl_lib,
                         extra_compile_args=(mpicompileargs + omp_args),
                         extra_link_args=(mpilinkargs + omp_args))

llpblas_ext = Extension('scalapy.lowlevel.pblas', ['scalapy/lowlevel/pblas.pyf'],
示例#35
0
#!/usr/bin/env python

import os
from distutils.core import setup, Extension

import mpi4py

mpi4py_inc = mpi4py.get_include()

mpi_bin_dir = os.path.dirname( mpi4py.get_config()['mpicc'] )
mpi_dir = os.path.realpath( os.path.join(mpi_bin_dir,'..') )
mpi_inc_dir = os.path.join(mpi_dir, 'include')
mpi_lib_dir = os.path.join(mpi_dir, 'lib')

compute_pi = Extension('_compute_pi',
   sources = ['compute_pi.i', 'compute_pi.c'],
   #libraries = ['mpich','opa','mpl','rt','pthread'],
   libraries = ['mpich',],
   include_dirs = [mpi_inc_dir, mpi4py_inc],
   library_dirs = [mpi_lib_dir],
   runtime_library_dirs = [mpi_lib_dir],
   swig_opts=['-I' + mpi4py_inc],
   )

setup (name = 'compute_pi',
       version = '0.1',
       ext_modules = [compute_pi],
       py_modules = ["compute_pi"],
       )
示例#36
0
from distutils.core import setup
from Cython.Build import cythonize
from distutils.extension import Extension
import numpy as np
import mpi4py as mpi4py
import sys
import platform
import subprocess as sp
import os.path
import string

# Now get include paths from relevant python modules
include_path = [mpi4py.get_include()]
include_path += [np.get_include()]
include_path += ['./Csrc']

if sys.platform == 'darwin':
    #Compile flags for MacOSX
    library_dirs = []
    libraries = []
    extensions = []
    extra_compile_args = []
    extra_compile_args += [
        '-O3', '-march=native', '-Wno-unused', '-Wno-#warnings', '-fPIC'
    ]
    extra_objects = ['./RRTMG/rrtmg_build/rrtmg_combined.o']
    netcdf_include = '/opt/local/include'
    netcdf_lib = '/opt/local/lib'
    f_compiler = 'gfortran'
elif 'eu' in platform.node():
    #Compile flags for euler @ ETHZ
示例#37
0
文件: setup.py 项目: grlee77/pyFFTW
    def __init__(self, compiler):
        log.debug("Compiler include_dirs: %s" % compiler.include_dirs)
        if hasattr(compiler, "initialize"):
            compiler.initialize() # to set all variables
            log.debug("Compiler include_dirs after initialize: %s" % compiler.include_dirs)
        self.compiler = compiler

        log.debug(sys.version) # contains the compiler used to build this python

        # members with the info for the outside world
        self.include_dirs = get_include_dirs()
        self.objects = []
        self.libraries = []
        self.library_dirs = get_library_dirs()
        self.linker_flags = []
        self.compile_time_env = {}

        if self.compiler.compiler_type == 'msvc':
            if (sys.version_info.major, sys.version_info.minor) < (3, 3):
                # The check above is a nasty hack. We're using the python
                # version as a proxy for the MSVC version. 2008 doesn't
                # have stdint.h, so is needed. 2010 does.
                #
                # We need to add the path to msvc includes
                msvc_2008_path = (os.path.join(os.getcwd(), 'include', 'msvc_2008'))
                self.include_dirs.append(msvc_2008_path)
            elif (sys.version_info.major, sys.version_info.minor) < (3, 5):
                # Actually, it seems that appveyor doesn't have a stdint that
                # works, so even for 2010 we use our own (hacked) version
                # of stdint.
                # This should be pretty safe in whatever case.
                msvc_2010_path = (os.path.join(os.getcwd(), 'include', 'msvc_2010'))
                self.include_dirs.append(msvc_2010_path)

                # To avoid http://bugs.python.org/issue4431
                #
                # C:\Program Files\Microsoft
                # SDKs\Windows\v7.1\Bin\x64\mt.exe -nologo -manifest
                # C:\Users\appveyor\AppData\Local\Temp\1\pyfftw-9in6l66u\a.out.exe.manifest
                # -outputresource:C:\Users\appveyor\AppData\Local\Temp\1\pyfftw-9in6l66u\a.out.exe;1
                # C:\Users\appveyor\AppData\Local\Temp\1\pyfftw-9in6l66u\a.out.exe.manifest
                # : general error c1010070: Failed to load and parse
                # the manifest. The system cannot find the file
                # specified.
                self.compiler.ldflags_shared.append('/MANIFEST')

        if get_platform().startswith('linux'):
            # needed at least libm for linker checks to succeed
            self.libraries.append('m')

        # main fftw3 header is required
        if not self.has_header(['fftw3.h'], include_dirs=self.include_dirs):
            raise CompileError("Could not find the FFTW header 'fftw3.h'")

        # mpi is optional
        # self.support_mpi = self.has_header(['mpi.h', 'fftw3-mpi.h'])
        # TODO enable check when wrappers are included in Pyfftw
        self.support_mpi = False

        if self.support_mpi:
            try:
                import mpi4py
                self.include_dirs.append(mpi4py.get_include())
            except ImportError:
                log.error("Could not import mpi4py. Skipping support for FFTW MPI.")
                self.support_mpi = False

        self.search_dependencies()
示例#38
0
                                               ('libraries=', None, 'libraries')
                                             ]

    def finalize_options(self):
        self.ensure_string_list('libraries')
        _build_ext.finalize_options(self)


import numpy

if have_cython:
    touch('src/pypastix.pyx')
    ext_pypastix  = Extension('pypastix', ['src/pypastix.pyx'],
                              extra_compile_args = ["-O3", "-Wall"],
                              include_dirs = [numpy.get_include(),
                                              mpi4py.get_include()])
else:
    touch('src/pypastix.c')
    ext_pypastix  = Extension('pypastix', ['src/pypastix.c'],
                              extra_compile_args = ["-O3", "-Wall"],
                              include_dirs = [numpy.get_include(),
                                              mpi4py.get_include()],
                          )


setup(
    name = 'pypastix',
    version='0.0.0',
    packages= [],
    author='Xavier LACOSTE',
    author_email='*****@*****.**',
    def build_worker(self):
        
        path = os.path.abspath(self.get_path_to_results())
        codefile = os.path.join(path,"code.o")
        interfacefile = os.path.join(path,"interface.o")
        headerfile = os.path.join(path,"worker_code.h")
        self.sofile = os.path.join(path,"interface.so")
        self.interfacec_o_file = os.path.join(path,"interfacec.o")
        self.exefile = os.path.join(path,"c_worker")
        
        compile_tools.fortran_compile(codefile, test_fortran_implementation.codestring,
                                      extra_args=["-fPIC"])
        
        
        uc = create_c.GenerateACHeaderStringFromASpecificationClass()
        uc.specification_class = test_fortran_implementation.ForTestingInterface
        uc.needs_mpi = False
        header =  uc.result
        
        
        with open(headerfile, "w") as f:
            f.write(header)
        
        
        root, ext = os.path.splitext(interfacefile)
        sourcename = root + '.pyx'
        cname = root + '.c'
        
        uc = create_cython.GenerateACythonSourcecodeStringFromASpecificationClass()
        uc.specification_class = test_fortran_implementation.ForTestingInterface
        uc.function_name_prefix = "ci_"
        uc.needs_mpi = True
        code =  uc.result
        
        with open(sourcename, "w") as f:
            f.write(code)


        uc = create_cython.GenerateACythonStartScriptStringFromASpecificationClass()
        uc.specification_class = test_fortran_implementation.ForTestingInterface
        uc.needs_mpi = True
        script =  uc.result
        
        with open(self.exefile, "w") as f:
            f.write(script)

        os.chmod(self.exefile, 0777)

        import mpi4py
        process, stdout, stderr = compile_tools.open_subprocess([config.compilers.cython, 
        '-I',
        mpi4py.get_include(),
         sourcename, '-o', cname])

        if process.returncode == 0:
            compile_tools.wait_for_file(cname)
        
        if process.returncode != 0 or not os.path.exists(cname):
            print "Could not cythonize {0}, error = {1}".format(sourcename, stderr)
            raise Exception("Could not cythonize {0}, error = {1}".format(sourcename, stderr))
        
        with open(cname, "r") as f:
            string = f.read()


        
        
        uc = create_cython.GenerateAFortranInterfaceSourcecodeStringFromASpecificationClass()
        uc.specification_class = test_fortran_implementation.ForTestingInterface
        uc.function_name_prefix = "ci_"
        uc.needs_mpi = False
        code =  uc.result

        compile_tools.fortran_compile(self.interfacec_o_file, code,
                                      extra_args=["-fPIC"])

        compile_tools.c_pythondev_compile(interfacefile, string)
        compile_tools.fortran_pythondev_buildso(self.sofile,  [interfacefile, codefile, self.interfacec_o_file] )
示例#40
0
def write_setup_local():
    '''
    create setup_local.py. parameters written here will be read
    by setup.py in mfem._ser and mfem._par
    '''
    import numpy

    if build_mfem:
        mfemser = os.path.join(prefix, 'mfem', 'ser')
        mfempar = os.path.join(prefix, 'mfem', 'par')
    else:
        mfemser = mfems_prefix
        mfempar = mfemp_prefix

    params = {
        'cxx_ser': cxx_command,
        'cc_ser': cc_command,
        'cxx_par': mpicxx_command,
        'cc_par': mpicc_command,
        'whole_archive': '--whole-archive',
        'no_whole_archive': '--no-whole-archive',
        'nocompactunwind': '',
        'swigflag': '-Wall -c++ -python -fastproxy -olddefs -keyword',
        'hypreinc': os.path.join(hypre_prefix, 'include'),
        'hyprelib': os.path.join(hypre_prefix, 'lib'),
        'metisinc': os.path.join(metis_prefix, 'include'),
        'metis5lib': os.path.join(metis_prefix, 'lib'),
        'numpync': numpy.get_include(),
        'mfembuilddir': os.path.join(mfempar, 'include'),
        'mfemincdir': os.path.join(mfempar, 'include', 'mfem'),
        'mfemlnkdir': os.path.join(mfempar, 'lib'),
        'mfemserbuilddir': os.path.join(mfemser, 'include'),
        'mfemserincdir': os.path.join(mfemser, 'include', 'mfem'),
        'mfemserlnkdir': os.path.join(mfemser, 'lib'),
        'add_pumi': '',
        'add_strumpack': '',
        'add_cuda': '',
        'cxx11flag': cxx11_flag,
    }

    try:
        import mpi4py  ## avaialbility of this is checked before
        params['mpi4pyinc'] = mpi4py.get_include()
    except ImportError:
        params['mpi4pyinc'] = ''

    def add_extra(xxx):
        params['add_' + xxx] = '1'
        params[xxx + 'inc'] = os.path.join(globals()[xxx + '_prefix'],
                                           'include')
        params[xxx + 'lib'] = os.path.join(globals()[xxx + '_prefix'], 'lib')

    if enable_pumi:
        add_extra('pumi')
    if enable_strumpack:
        add_extra('strumpack')
    if enable_cuda:
        add_extra('cuda')

    pwd = chdir(rootdir)

    fid = open('setup_local.py', 'w')
    fid.write("#  setup_local.py \n")
    fid.write("#  generated from setup.py\n")
    fid.write("#  do not edit this directly\n")

    for key, value in params.items():
        text = key.lower() + ' = "' + value + '"'
        fid.write(text + "\n")
    fid.close()

    os.chdir(pwd)
示例#41
0
 def testGetInclude(self):
     path = mpi4py.get_include()
     self.assertTrue(isinstance(path, str))
     self.assertTrue(os.path.isdir(path))
     header = os.path.join(path, 'mpi4py', 'mpi4py.h')
     self.assertTrue(os.path.isfile(header))
示例#42
0
文件: setup.py 项目: pankajp/pysph
USE_CPP = True
HAS_MPI4PY = True
try:
    import mpi4py

    # assume a working mpi environment
    import commands

    if USE_CPP:
        mpic = "mpicxx"
    else:
        mpic = "mpicc"
    mpi_link_args.append(commands.getoutput(mpic + " --showme:link"))
    mpi_compile_args.append(commands.getoutput(mpic + " --showme:compile"))
    mpi_inc_dirs.append(mpi4py.get_include())
except ImportError:
    HAS_MPI4PY = False

cy_directives = {"embedsignature": True}

# base extension modules.
base = [
    Extension("pysph.base.carray", ["source/pysph/base/carray.pyx"]),
    Extension("pysph.base.point", ["source/pysph/base/point.pyx"]),
    Extension("pysph.base.plane", ["source/pysph/base/plane.pyx"]),
    Extension("pysph.base.particle_array", ["source/pysph/base/particle_array.pyx"]),
    Extension("pysph.base.cell", ["source/pysph/base/cell.pyx"]),
    Extension("pysph.base.polygon_array", ["source/pysph/base/polygon_array.pyx"]),
    Extension("pysph.base.nnps", ["source/pysph/base/nnps.pyx"]),
    Extension("pysph.base.geometry", ["source/pysph/base/geometry.pyx"]),
示例#43
0
文件: setup.py 项目: Afey/h5py
    }
    if HDF5 is not None:
        COMPILER_SETTINGS['include_dirs'] += [op.join(HDF5, 'include')]
        COMPILER_SETTINGS['library_dirs'] += [op.join(HDF5, 'lib'), op.join(HDF5, 'lib64')]
    elif sys.platform == 'darwin':
        # putting here both macports and homebrew paths will generate
        # "ld: warning: dir not found" at the linking phase 
        COMPILER_SETTINGS['include_dirs'] += ['/opt/local/include'] # macports
        COMPILER_SETTINGS['library_dirs'] += ['/opt/local/lib']     # macports
        COMPILER_SETTINGS['include_dirs'] += ['/usr/local/include'] # homebrew
        COMPILER_SETTINGS['library_dirs'] += ['/usr/local/lib']     # homebrew
    elif sys.platform.startswith('freebsd'):
        COMPILER_SETTINGS['include_dirs'] += ['/usr/local/include'] # homebrew
        COMPILER_SETTINGS['library_dirs'] += ['/usr/local/lib']     # homebrew
    if MPI:
        COMPILER_SETTINGS['include_dirs'] += [mpi4py.get_include()]
    COMPILER_SETTINGS['runtime_library_dirs'] = [op.abspath(x) for x in COMPILER_SETTINGS['library_dirs']]

MODULES =  ['defs','_errors','_objects','_proxy', 'h5fd', 'h5z',
            'h5','h5i','h5r','utils',
            '_conv', 'h5t','h5s',
            'h5p',
            'h5d', 'h5a', 'h5f', 'h5g',
            'h5l', 'h5o',
            'h5ds', 'h5ac']

# No Cython, no point in configuring
if HAVE_CYTHON:     

    # Don't autodetect if version is manually given
    if HDF5_VERSION is None:    
示例#44
0
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext

import Cython.Compiler.Options
Cython.Compiler.Options.annotate = True

import mpi4py

setup(
    name='py4est',
    packages=['py4est'],
    cmdclass = {'build_ext': build_ext},
    ext_modules = [Extension("py4est._py4est", 
                             ["py4est/_py4est.pyx"],
                             include_dirs=[mpi4py.get_include(),
                                           '/usr/include',
                                           '/usr/local/include',
                                           '/home/adr/Programme/p4est/p4est/local/include',
                                           'src',
                                           '/usr/include/mpi'],
                             libraries=['p4est', 'sc'],
                             library_dirs=['/home/adr/Programme/p4est/p4est/local/lib'])]

)
        
示例#45
0
print
print "  ScaLAPACK: %s" % scalapackversion
print "  MPI: %s" % mpiversion
print "  OpenMP: %s" % repr(use_omp)
print
print "  Compile args: %s" % repr(mpicompileargs)
print "  Libraries: %s" % repr(scl_lib + mpilinkargs)
print "  Library path: %s" % repr(scl_libdir)
print
print "============================================================================="

## Setup the extensions we are going to build
mpi3_ext = Extension(
    'scalapy.mpi3util', [cython_file('scalapy/mpi3util')],
    include_dirs=['.', np.get_include(),
                  mpi4py.get_include()],
    extra_compile_args=mpicompileargs,
    extra_link_args=mpilinkargs)

blacs_ext = Extension(
    'scalapy.blacs', [cython_file('scalapy/blacs')],
    include_dirs=['.', np.get_include(),
                  mpi4py.get_include()],
    library_dirs=scl_libdir,
    libraries=scl_lib,
    extra_compile_args=mpicompileargs,
    extra_link_args=mpilinkargs)

llredist_ext = Extension('scalapy.lowlevel.redist',
                         ['scalapy/lowlevel/redist.pyf'],
                         library_dirs=scl_libdir,
示例#46
0
# ---

import mpi4py
try: mpi4py.get_include()
except: pass
try: mpi4py.get_config()
except: pass

# ---

def test_mpi4py_rc():
    import mpi4py.rc
    mpi4py.rc(
    initialize = True,
    threads = True,
    thread_level = 'multiple',
    finalize = None,
    fast_reduce = True,
    recv_mprobe = True,
    errors = 'exception',
    )
    try: mpi4py.rc(qwerty=False)
    except TypeError: pass
    else: raise RuntimeError

test_mpi4py_rc()

# ---

def test_mpi4py_profile():
    import mpi4py
示例#47
0
def generate_wrapper():
    '''
    run swig.
    '''
    if dry_run or verbose:
        print("generating SWIG wrapper")

    def ifiles():
        ifiles = os.listdir()
        ifiles = [x for x in ifiles if x.endswith('.i')]
        ifiles = [x for x in ifiles if not x.startswith('#')]
        ifiles = [x for x in ifiles if not x.startswith('.')]
        return ifiles

    def check_new(ifile):
        wfile = ifile[:-2] + '_wrap.cxx'
        if not os.path.exists(wfile):
            return True
        return os.path.getmtime(ifile) > os.path.getmtime(wfile)

    if build_mfem:
        mfemser = os.path.join(prefix, 'mfem', 'ser')
        mfempar = os.path.join(prefix, 'mfem', 'par')
    else:
        mfemser = mfems_prefix
        mfempar = mfemp_prefix

    swig_command = find_command('swig')
    if swig_command is None:
        assert False, "SWIG is not installed"

    swigflag = '-Wall -c++ -python -fastproxy -olddefs -keyword'.split(' ')

    pwd = chdir(os.path.join(rootdir, 'mfem', '_ser'))

    serflag = [
        '-I' + os.path.join(mfemser, 'include'),
        '-I' + os.path.join(mfemser, 'include', 'mfem')
    ]
    for file in ifiles():
        if not check_new(file):
            continue
        command = [swig_command] + swigflag + serflag + [file]
        make_call(command)

    if not build_parallel:
        os.chdir(pwd)
        return

    chdir(os.path.join(rootdir, 'mfem', '_par'))

    import mpi4py
    parflag = [
        '-I' + os.path.join(mfempar, 'include'),
        '-I' + os.path.join(mfempar, 'include', 'mfem'),
        '-I' + mpi4py.get_include()
    ]

    if enable_pumi:
        parflag.append('-I' + os.path.join(pumi_prefix, 'include'))
    if enable_strumpack:
        parflag.append('-I' + os.path.join(strumpack_prefix, 'include'))

    for file in ifiles():
        #        pumi.i does not depends on pumi specific header so this should
        #        work
        #        if file == 'pumi.i':# and not enable_pumi:
        #            continue
        if file == 'strumpack.i' and not enable_strumpack:
            continue
        if not check_new(file):
            continue
        command = [swig_command] + swigflag + parflag + [file]
        make_call(command)

    os.chdir(pwd)
示例#48
0
from configparser import ConfigParser

import mpi4py
import numpy

from setuptools import setup, find_packages
from setuptools.extension import Extension
from Cython.Build import cythonize

lammps_config = ConfigParser()
lammps_config.read('lammps.cfg')

include_dirs = [
    mpi4py.get_include(),
    numpy.get_include(),
    lammps_config.get('lammps', 'lammps_include_dir'),
    lammps_config.get('mpi', 'mpi_include_dir')
]

# TODO: Should maybe include mpi_cxx, mpi, python3.4m
libraries = [lammps_config.get('lammps', 'lammps_library'), 
             lammps_config.get('mpi', 'mpi_library')]
library_dirs = [lammps_config.get('lammps', 'lammps_library_dir')]

extensions = [
    Extension(
        'lammps.core',
        sources=['lammps/core.pyx'],
        include_dirs=include_dirs,
        libraries=libraries,
        library_dirs=library_dirs,
示例#49
0
        sys.stdout.write(
            'netcdf lib does not have netcdf4 parallel functions\n')
        f.write('DEF HAS_PARALLEL4_SUPPORT = 0\n')

    if has_pnetcdf_support:
        sys.stdout.write('netcdf lib has pnetcdf parallel functions\n')
        f.write('DEF HAS_PNETCDF_SUPPORT = 1\n')
    else:
        sys.stdout.write(
            'netcdf lib does not have pnetcdf parallel functions\n')
        f.write('DEF HAS_PNETCDF_SUPPORT = 0\n')

    f.close()

    if has_parallel4_support or has_pnetcdf_support:
        inc_dirs.append(mpi4py.get_include())
        # mpi_incdir should not be needed if using nc-config
        # (should be included in nc-config --cflags)
        if mpi_incdir is not None: inc_dirs.append(mpi_incdir)

    ext_modules = [
        Extension("netCDF4._netCDF4", [netcdf4_src_root + '.pyx'],
                  libraries=libs,
                  library_dirs=lib_dirs,
                  include_dirs=inc_dirs + ['include'],
                  runtime_library_dirs=runtime_lib_dirs)
    ]
else:
    ext_modules = None

setup(
示例#50
0
文件: setup.py 项目: gywukun09/pycles
from distutils.core import setup
from Cython.Build import cythonize
from distutils.extension import Extension
import numpy as np
import mpi4py as mpi4py
import sys
import platform
import subprocess as sp
import os.path


# Now get include paths from relevant python modules
include_path = [mpi4py.get_include()]
include_path += [np.get_include()]
include_path += ['./Csrc']

if sys.platform == 'darwin':
    #Compile flags for MacOSX
    library_dirs = []
    libraries = []
    extensions = []
    extra_compile_args = []
    extra_compile_args += ['-O3', '-march=native', '-Wno-unused', '-Wno-#warnings','-fPIC']
    extra_objects=['./RRTMG/rrtmg_build/rrtmg_combined.o']
    netcdf_include = '/opt/local/include'
    netcdf_lib = '/opt/local/lib'
    f_compiler = 'gfortran'
elif 'euler' in platform.node():
    #Compile flags for euler @ ETHZ
    library_dirs = ['/cluster/apps/openmpi/1.6.5/x86_64/gcc_4.8.2/lib/']
    libraries = []