def setup_java_bridge_extension(): # # Find JAVA_HOME, possibly from Windows registry # java_home = find_javahome() jdk_home = find_jdk() print "Using jdk_home = %s"%jdk_home include_dirs = [get_include()] extra_link_args = None libraries = None library_dirs = None javabridge_sources = [ "javabridge/javabridge.pyx" ] if is_win: if jdk_home is not None: jdk_include = os.path.join(jdk_home, "include") jdk_include_plat = os.path.join(jdk_include, sys.platform) include_dirs += [jdk_include, jdk_include_plat] if is_mingw: # # Build libjvm from jvm.dll on Windows. # This assumes that we're using mingw32 for build # cmd = ["dlltool", "--dllname", os.path.join(jdk_home,"jre\\bin\\client\\jvm.dll"), "--output-lib","libjvm.a", "--input-def","jvm.def", "--kill-at"] p = subprocess.Popen(cmd) p.communicate() library_dirs = [os.path.abspath(".")] else: # # Use the MSVC lib in the JDK # jdk_lib = os.path.join(jdk_home, "lib") library_dirs = [jdk_lib] javabridge_sources.append("javabridge/strtoull.c") libraries = ["jvm"] elif sys.platform == 'darwin': include_dirs += ['/System/Library/Frameworks/JavaVM.framework/Headers'] extra_link_args = ['-framework', 'JavaVM'] elif sys.platform.startswith('linux'): include_dirs += [os.path.join(java_home,'include'), os.path.join(java_home,'include','linux')] library_dirs = [os.path.join(java_home,'jre','lib','amd64','server')] libraries = ["jvm"] java_bridge_extension = Extension("javabridge/javabridge", sources=javabridge_sources, libraries=libraries, library_dirs=library_dirs, include_dirs=include_dirs, extra_link_args=extra_link_args) print "FORT?", java_bridge_extension.has_f2py_sources() return java_bridge_extension
'gradunwarp.__init__', 'gradunwarp.core.utils', 'gradunwarp.core.unwarp_resample', 'gradunwarp.core.gradient_unwarp', 'gradunwarp.core.tests.test_utils', ] dats = [ ('gradunwarp/core/', ['gradunwarp/core/interp3_ext.c']), ('gradunwarp/core/', ['gradunwarp/core/legendre_ext.c']), ('gradunwarp/core/', ['gradunwarp/core/transform_coordinates_ext.c']), ] # to build the C extension interp3_ext.c ext1 = Extension('gradunwarp.core.interp3_ext', include_dirs=get_numpy_include_dirs(), sources=['gradunwarp/core/interp3_ext.c'], extra_compile_args=['-O3']) # to build the C extension legendre_ext.c ext2 = Extension('gradunwarp.core.legendre_ext', include_dirs=get_numpy_include_dirs(), sources=['gradunwarp/core/legendre_ext.c'], extra_compile_args=['-O3']) # to build the C extension transform_coordinates_ext.c ext3 = Extension('gradunwarp.core.transform_coordinates_ext', include_dirs=get_numpy_include_dirs(), sources=['gradunwarp/core/transform_coordinates_ext.c'], extra_compile_args=['-O3']) scripts_cmd = [ 'gradunwarp/core/gradient_unwarp.py', ]
COMPILER_FLAGS = ["-O3", "-m64", "-march=native", "-fPIC"] LINKER_FLAGS = [] MATH_LINKER_FLAGS = ["-lblas", "-llapack"] # Intel if any(["intelem" in arg for arg in sys.argv]): COMPILER_FLAGS = ["-xHost", "-O3", "-axAVX", "-qopenmp"] LINKER_FLAGS = ["-liomp5", " -lpthread", "-lm", "-ldl"] MATH_LINKER_FLAGS = ["-L${MKLROOT}/lib/intel64", "-lmkl_rt"] mytest_module = Extension( name='libganstruc', sources=[ './libganstruc/encode_struc.f90', ], extra_f90_compile_args=COMPILER_FLAGS, extra_f77_compile_args=COMPILER_FLAGS, extra_compile_args=COMPILER_FLAGS, #extra_link_args = LINKER_FLAGS + MATH_LINKER_FLAGS, language=FORTRAN, f2py_options=['--quiet']) # use README.md as long description def readme(): with open('README.md') as f: return f.read() def setup_pepytools():
from __future__ import division, absolute_import, print_function from numpy.distutils.core import Extension ext1 = Extension(name='pka_number', sources=['pka_number.f90']) # ext2 = Extension(name = 'find_abnormal', # sources = ['find_abnormal.pyf', 'main.f90']) if __name__ == "__main__": from numpy.distutils.core import setup setup( name='f2py_example', description="F2PY Users Guide examples", author="Pearu Peterson", author_email="*****@*****.**", # ext_modules = [ext1, ext2] ext_modules=[ext1]) # End of setup_example.py
# LD_PRELOAD=$MKLROOT/lib/intel64_lin/libmkl_core.so:$MKLROOT/lib/intel64_lin/libmkl_sequential.so \ # python scripts/cellconstructo/r_test.py os.environ["CC"] = "icc" os.environ["F90"] = "ifort" symph_ext = Extension( name="symph", sources=[ "FModules/symdynph_gq_new.f90", "FModules/symm_base.f90", "FModules/sgam_ph.f90", "FModules/invmat.f90", "FModules/set_asr.f90", "FModules/error_handler.f90", "FModules/io_global.f90", "FModules/flush_unit.f90", "FModules/symvector.f90", "FModules/fc_supercell_from_dyn.f90", "FModules/set_tau.f90", "FModules/cryst_to_car.f90", "FModules/recips.f90", "FModules/q2qstar_out.f90", "FModules/rotate_and_add_dyn.f90", "FModules/trntnsc.f90", "FModules/star_q.f90", "FModules/eqvect.f90", "FModules/symm_matrix.f90", "FModules/from_matdyn.f90", "FModules/interp.f90", "FModules/q_gen.f90", "FModules/smallgq.f90", "FModules/symmetry_high_rank.f90", "FModules/unwrap_tensors.f90", "FModules/get_latvec.f90", "FModules/contract_two_phonon_propagator.f90" ], extra_f90_compile_args=["-fpp"], extra_link_args=["-mkl"]) secondorder_ext = Extension(name="secondorder", sources=[ "FModules/second_order_centering.f90", "FModules/second_order_ASR.f90" ],
'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], description="pixell", package_dir={"pixell": "pixell"}, entry_points={ # 'console_scripts': [ # 'pixell=pixell.cli:main', # ], }, ext_modules=[ Extension('pixell.sharp', sources=['cython/sharp.c'], libraries=['sharp', 'c_utils', 'fftpack', 'm'], library_dirs=['_deps/libsharp/auto/lib'], include_dirs=[np.get_include()], **compile_opts), Extension('pixell.distances', sources=['cython/distances.c', 'cython/distances_core.c'], include_dirs=[np.get_include()], **compile_opts), Extension('pixell._interpol_32', sources=['fortran/interpol_32.f90'], **compile_opts), Extension('pixell._interpol_64', sources=['fortran/interpol_64.f90'], **compile_opts), Extension('pixell._colorize', sources=['fortran/colorize.f90'], **compile_opts),
if p.returncode != 0: print("Error generating SWIG wrappers.") sys.exit(1) setup( name='paratext', version=version, description= 'Reads text files in parallel. The first release includes a parallel CSV reader.', long_description=""" See README """, keywords=['csv', 'reading'], ext_modules=[ Extension('_paratext_internal', [ '../src/paratext_internal_wrap.cxx', '../src/paratext_internal.cpp' ], extra_link_args=extra_link_args, extra_compile_args=extra_compile_args, include_dirs=['../src/'], libraries=["stdc++"] + extra_libraries), ], py_modules=["paratext_internal"], author="Damian Eads", author_email="*****@*****.**", license="Apache License", packages=['paratext'], url='http://wise.io', include_package_data=True, **extra_setuptools_args)
from numpy.distutils.core import Extension f90periodogram = Extension(name='f90periodogram', sources=['seismo/src/periodogram.f90'], extra_f90_compile_args=["-fopenmp", "-lgomp"], extra_link_args=["-lgomp"]) if __name__ == "__main__": from numpy.distutils.core import setup setup(name='seismo_blobs', description="Compiled sources for use with seismo", author="Ewald Zietsman", author_email="*****@*****.**", ext_modules=[f90periodogram] ) # Now seismo import setuptools setuptools.setup( name="seismo", version="0.2", packages=setuptools.find_packages(), install_requires=['numpy>=1.9'], # metadata for upload to PyPI author="Ewald Zietsman", author_email="*****@*****.**", description="Timeseries stuff for asteroseismology",
'sphinx-bootstrap-theme >= 0.6', 'sphinxcontrib-programoutput', 'sphinx-automodapi', 'ipython', 'sphinx_rtd_theme', ], } # fortran compile wrapper = Extension( 'cosmic._evolvebin', sources=[ 'cosmic/src/comenv.f', 'cosmic/src/corerd.f', 'cosmic/src/deltat.f', 'cosmic/src/dgcore.f', 'cosmic/src/evolv2.f', 'cosmic/src/gntage.f', 'cosmic/src/hrdiag.f', 'cosmic/src/instar.f', 'cosmic/src/kick.f', 'cosmic/src/mix.f', 'cosmic/src/mlwind.f', 'cosmic/src/mrenv.f', 'cosmic/src/ran3.f', 'cosmic/src/rl.f', 'cosmic/src/star.f', 'cosmic/src/zcnsts.f', 'cosmic/src/zfuncs.f', 'cosmic/src/concatkstars.f', 'cosmic/src/bpp_array.f', 'cosmic/src/checkstate.f' ], ) #extra_compile_args = ["-g","-O0"], extra_f77_compile_args=["-O0"], extra_f90_compile_args=["-O0"]) # -- run setup ---------------------------------------------------------------- packagenames = find_packages() scripts = glob.glob(os.path.join('bin', '*')) setup( name=DISTNAME, provides=[PACKAGENAME],
pass def finalize_options(self): pass def run(self): errno = os.system("sage -t --force-lib %s" % SRC) if errno != 0: sys.exit(1) ext1 = Extension( name='guptri_py._fguptri_py', # the following silences type mismatch errors in gfortran 10 # (-fallow-argument-mismatch is not recognized by older fortran versions) extra_f77_compile_args=['-std=legacy'], sources=['guptri_py/_fguptri_py.pyf'] + [ os.path.join(TMPDIR, s) for s in ('fguptri.f', 'guptribase.f', 'zguptri.f') ]) from numpy.distutils.core import setup setup(cmdclass={ 'download': DownloadCommand, 'build_ext': BuildExtCommand, 'test': TestCommand, 'clean': CleanCommand, }, name='guptri_py', version=VERSION, packages=["guptri_py"],
library_dirs = getattr(command, libname + '_lib_dirs').split(' ') include_dirs = getattr(command, libname + '_include_dirs').split(' ') libraries = getattr(command, libname + '_libraries').split(' ') return [library_dirs, include_dirs, libraries] ### # Static Extensions ### estmethods = Extension( 'sherpa.estmethods._est_funcs', [ 'sherpa/estmethods/src/estutils.cc', 'sherpa/estmethods/src/info_matrix.cc', 'sherpa/estmethods/src/projection.cc', 'sherpa/estmethods/src/estwrappers.cc' ], (sherpa_inc + ['sherpa/utils/src/gsl']), depends=(get_deps(['extension', 'utils']) + [ 'sherpa/estmethods/src/estutils.hh', 'sherpa/estmethods/src/info_matrix.hh', 'sherpa/estmethods/src/projection.hh', 'sherpa/utils/src/gsl/fcmp.h' ])) utils = Extension( 'sherpa.utils._utils', [ 'sherpa/utils/src/cephes/const.c', 'sherpa/utils/src/cephes/fabs.c', 'sherpa/utils/src/cephes/isnan.c', 'sherpa/utils/src/cephes/mtherr.c', 'sherpa/utils/src/cephes/polevl.c', 'sherpa/utils/src/cephes/ndtri.c', 'sherpa/utils/src/cephes/gamma.c', 'sherpa/utils/src/cephes/igam.c', 'sherpa/utils/src/cephes/igami.c', 'sherpa/utils/src/cephes/incbet.c', 'sherpa/utils/src/cephes/incbi.c', 'sherpa/utils/src/sjohnson/Faddeeva.cc', 'sherpa/utils/src/_utils.cc'
#!/usr/bin/env python import os from numpy.distutils.core import setup, Extension ext1 = Extension(name = 'al', sources = ['alarmas/alarmas.py'], f2py_options = ['--opt = O3']) ext2 = Extension(name = 'fs', sources = ['alarmas/funciones_sora.py']) setup( name='hidro_alarmas', version='0.0.1', author='Hidro SIATA', author_email='*****@*****.**', packages=['alarmas'], package_data={'alarmas':['al.so','fs.so']}, url='https://github.com/SIATAhidro/Alarmas.git', license='LICENSE.txt', description='Despliegue de archivos para pagina de Alarmas Comunitarias', long_description=open('README.md').read(), install_requires=[ ], ext_modules=[ext1, ext2], )
import platform setup( name='RotorSE', version='0.1.1', description='Rotor Systems Engineering Model', author='NREL WISDEM Team', author_email='*****@*****.**', install_requires=['commonse', 'ccblade', 'pbeam'], package_dir={'': 'src'}, packages=['rotorse','rotorse.test','rotorse.turbine_inputs', 'rotorse.geometry_tools'], package_data={'':['*.inp']}, include_package_data=True, license='Apache License, Version 2.0', dependency_links=['https://github.com/WISDEM/CCBlade/tarball/master#egg=ccblade', 'https://github.com/WISDEM/pBEAM/tarball/master#egg=pbeam', 'https://github.com/WISDEM/CommonSE/tarball/master#egg=commonse'], zip_safe=False ) from numpy.distutils.core import setup, Extension setup( name='precomp', package_dir={'': 'src/rotorse'}, ext_modules=[Extension('_precomp', ['src/rotorse/PreCompPy.f90'], extra_compile_args=['-O2','-fPIC','-shared'], extra_link_args=['-shared'])], # ext_modules=[Extension('_precomp', ['src/rotorse/PreCompPy.f90'], extra_compile_args=['-O2'])], )
from numpy.distutils.core import Extension, setup compiler_flags = ['-O3', '-fopenmp'] # , '-m64', '-march=native', '-fPIC' linker_flags = ['-lgomp'] f2py_options = ['--verbose'] language = 'f90' edm = Extension(name='edm', sources=['euclidean.f90'], extra_f90_compile_args=compiler_flags, extra_f77_compile_args=compiler_flags, extra_compile_args=compiler_flags, extra_link_args=linker_flags, language=language, f2py_options=f2py_options) cityblock = Extension(name='cbdm', sources=['cityblock.f90'], extra_f90_compile_args=compiler_flags, extra_f77_compile_args=compiler_flags, extra_compile_args=compiler_flags, extra_link_args=linker_flags, language=language, f2py_options=f2py_options) setup(name='metrics', ext_package='metrics', ext_modules=[edm, cityblock])
if __name__ == "__main__": from numpy.distutils.core import setup from numpy.distutils.misc_util import Configuration from numpy.distutils.core import Extension from numpy.distutils import fcompiler from Cython.Build import cythonize cy_ext32 = Extension(name = "CHAPSim_post.post._cy_ext32_base", sources = ["src/autocorr_parallel32.pyx"], extra_compile_args = ["-fopenmp","-O3"], extra_link_args = ["-fopenmp","-O3"]) cy_ext64 = Extension(name = "CHAPSim_post.post._cy_ext64_base", sources = ["src/autocorr_parallel64.pyx"], extra_compile_args = ["-fopenmp","-O3"], extra_link_args = ["-fopenmp","-O3"]) # if fcompiler.get_default_fcompiler(): # f90_ext = Extension(name = 'CHAPSim_post.post._f90_ext_base', # sources = ["src/autocorr_parallel.f90"], # extra_link_args=["-lgomp"], # extra_f90_compile_args=['-O3','-fopenmp']) # ext_list = [cy_ext32,f90_ext] # else: ext_list = cythonize([cy_ext32,cy_ext64]) print([type(ext) for ext in ext_list]) config = Configuration(package_name='CHAPSim_post', description="Package containing post-processing routines for the CHAPSim DNS Solver",
import sys #sys.path.append('../../trunk/fortran_srcs/') extra_link_args=[] if sys.platform=='darwin': extra_link_args=['-framework','veclib', '-fopenmp' , '-lgomp', '/openmp'] from numpy.distutils.core import setup, Extension extra_link_args.append('-fopenmp') # option for fortran OpenMP opt_flags = ['-fopenmp' , '-lgomp' ,'-g','-fbounds-check' ] #~ opt_flags = ['g'] files1 = ['../../trunk/fortran_srcs/calculsfortran_rec.f90','../../trunk/fortran_srcs/calculsfortran_ini.f90', '../../trunk/fortran_srcs/calculs_2D.f90' ] # fortran modules used by main python code ext1 = Extension(name='_calculsfor_f90', sources = files1, extra_compile_args=opt_flags, extra_f90_compile_args=opt_flags, extra_link_args=extra_link_args) setup(name = "_modules_f90", version = '0.1', description = "modules for a python code", author = "Fred a partir du fichier de Martin", author_email= '', url = '', ext_modules = [ext1])
srcs_spherepack = [ 'src/gaqd.f', 'src/shses.f', 'src/shaes.f', 'src/vhaes.f', 'src/vhses.f', 'src/shsgs.f', 'src/shags.f', 'src/vhags.f', 'src/vhsgs.f', 'src/sphcom.f', 'src/hrfft.f', 'src/shaec.f', 'src/shagc.f', 'src/shsec.f', 'src/shsgc.f', 'src/vhaec.f', 'src/vhagc.f', 'src/vhsec.f', 'src/vhsgc.f', 'src/ihgeod.f', 'src/alf.f' ] srcs_local = [ 'src/_spherepack.pyf', 'src/getlegfunc.f', 'src/specintrp.f', 'src/onedtotwod.f', 'src/onedtotwod_vrtdiv.f', 'src/twodtooned.f', 'src/twodtooned_vrtdiv.f', 'src/multsmoothfact.f', 'src/lap.f', 'src/invlap.f' ] ext = Extension(name='_spherepack', sources=srcs_local + srcs_spherepack) #havefiles = [os.path.isfile(f) for f in srcs_spherepack] # #if havefiles.count(False) and sys.argv[1] not in ['sdist','clean']: # sys.stdout.write(""" # SPHEREPACK fortran source files not in src directory. # The SPHEREPACK license forbids redistribution of the source. # You can download the tarfile from http://www.scd.ucar.edu/softlib/SPHERE.html # and copy the *.f files to the src directory, or it can be done # automatically for you now. # # WARNING: By downloading the SPHEREPACK source files, you are agreeing to # the terms of the SPHEREPACK license at # http://www2.cisl.ucar.edu/resources/legacy/spherepack/license\n # """)
list_int_lap_vec.append('lfmm2d' + st + cd + pg + '_vec') list_int_lap_vec.append('cfmm2d' + st + cd + pg + '_vec') for cd in c_opts2: for pg in p_optsh2: list_int_helm_dir.append('h2d_direct' + cd + pg) for pg in p_optsl2: list_int_lap_dir.append('r2d_direct' + cd + pg) list_int_lap_dir.append('l2d_direct' + cd + pg) list_int_lap_dir.append('c2d_direct' + cd + pg) ext_helm = Extension( name='fmm2dpy.hfmm2d_fortran', sources=['../src/helmholtz/' + item for item in list_helm] + ['../src/common/' + item for item in list_common], f2py_options=['only:'] + list_int_helm + list_int_helm_vec + list_int_helm_dir + [':'], extra_f90_compile_args=["-std=legacy"], extra_f77_compile_args=["-std=legacy"], extra_link_args=FLIBS) ext_lap = Extension(name='fmm2dpy.lfmm2d_fortran', sources=['../src/laplace/' + item for item in list_lap] + ['../src/common/' + item for item in list_common], f2py_options=['only:'] + list_int_lap + list_int_lap_vec + list_int_lap_dir + [':'], extra_f90_compile_args=["-std=legacy"], extra_f77_compile_args=["-std=legacy"], extra_link_args=FLIBS) ## TODO: fill in the info below
def __init__(self, name, sourcedir='', **kwa): Extension.__init__(self, name, sources=[], **kwa) self.sourcedir = os.path.abspath(sourcedir)
raise ValueError('Verbosity must be an integer.') def run(self): import sys if sys.version.startswith('2.6') or sys.version.startswith('3.1'): import unittest2 as unittest else: import unittest suite = unittest.TestLoader().discover('tests', pattern='*test.py', top_level_dir='.') unittest.TextTestRunner(verbosity=self.verbosity).run(suite) ################################################################################ # The Fortran extension modules to build using f2py ext_modules = [ Extension('rpmdrate._surface', ['rpmdrate/_surface.f90']), Extension('rpmdrate._main', ['rpmdrate/_main.pyf', 'rpmdrate/_math.f90', 'rpmdrate/_surface.f90', 'rpmdrate/_main.f90'], libraries=['blas', 'fftw3']), ] setup( name = 'RPMDrate', version = '0.1.0', description = 'Ring polymer molecular dynamics simulations', author = 'Joshua W. Allen, Yury V. Suleimanov, William H. Green', author_email = '*****@*****.**', url = 'http://github.com/GreenGroup/RPMDrate', packages = ['rpmdrate'], cmdclass = {'test': test}, ext_modules = ext_modules, requires = ['numpy (>=1.5.0)'], provides = ['rpmdrate'],
def run_compile(): """ Do it all in one call! """ import tempfile i = sys.argv.index('-c') del sys.argv[i] remove_build_dir = 0 try: i = sys.argv.index('--build-dir') except ValueError: i = None if i is not None: build_dir = sys.argv[i + 1] del sys.argv[i + 1] del sys.argv[i] else: remove_build_dir = 1 build_dir = os.path.join(tempfile.mktemp()) sysinfo_flags = filter(re.compile(r'[-][-]link[-]').match, sys.argv[1:]) sys.argv = filter(lambda a, flags=sysinfo_flags: a not in flags, sys.argv) if sysinfo_flags: sysinfo_flags = [f[7:] for f in sysinfo_flags] f2py_flags = filter( re.compile( r'[-][-]((no[-]|)(wrap[-]functions|lower)|debug[-]capi|quiet)|[-]include' ).match, sys.argv[1:]) sys.argv = filter(lambda a, flags=f2py_flags: a not in flags, sys.argv) f2py_flags2 = [] fl = 0 for a in sys.argv[1:]: if a in ['only:', 'skip:']: fl = 1 elif a == ':': fl = 0 if fl or a == ':': f2py_flags2.append(a) if f2py_flags2 and f2py_flags2[-1] != ':': f2py_flags2.append(':') f2py_flags.extend(f2py_flags2) sys.argv = filter(lambda a, flags=f2py_flags2: a not in flags, sys.argv) flib_flags = filter( re.compile( r'[-][-]((f(90)?compiler([-]exec|)|compiler)=|help[-]compiler)'). match, sys.argv[1:]) sys.argv = filter(lambda a, flags=flib_flags: a not in flags, sys.argv) fc_flags = filter( re.compile( r'[-][-]((f(77|90)(flags|exec)|opt|arch)=|(debug|noopt|noarch|help[-]fcompiler))' ).match, sys.argv[1:]) sys.argv = filter(lambda a, flags=fc_flags: a not in flags, sys.argv) if 1: del_list = [] for s in flib_flags: v = '--fcompiler=' if s[:len(v)] == v: from numpy.distutils import fcompiler fcompiler.load_all_fcompiler_classes() allowed_keys = fcompiler.fcompiler_class.keys() nv = ov = s[len(v):].lower() if ov not in allowed_keys: vmap = {} # XXX try: nv = vmap[ov] except KeyError: if ov not in vmap.values(): print 'Unknown vendor: "%s"' % (s[len(v):]) nv = ov i = flib_flags.index(s) flib_flags[i] = '--fcompiler=' + nv continue for s in del_list: i = flib_flags.index(s) del flib_flags[i] assert len(flib_flags) <= 2, ` flib_flags ` setup_flags = filter(re.compile(r'[-][-](verbose)').match, sys.argv[1:]) sys.argv = filter(lambda a, flags=setup_flags: a not in flags, sys.argv) if '--quiet' in f2py_flags: setup_flags.append('--quiet') modulename = 'untitled' sources = sys.argv[1:] for optname in ['--include_paths', '--include-paths']: if optname in sys.argv: i = sys.argv.index(optname) f2py_flags.extend(sys.argv[i:i + 2]) del sys.argv[i + 1], sys.argv[i] sources = sys.argv[1:] if '-m' in sys.argv: i = sys.argv.index('-m') modulename = sys.argv[i + 1] del sys.argv[i + 1], sys.argv[i] sources = sys.argv[1:] else: from numpy.distutils.command.build_src import get_f2py_modulename pyf_files, sources = filter_files('', '[.]pyf([.]src|)', sources) sources = pyf_files + sources for f in pyf_files: modulename = get_f2py_modulename(f) if modulename: break extra_objects, sources = filter_files('', '[.](o|a|so)', sources) include_dirs, sources = filter_files('-I', '', sources, remove_prefix=1) library_dirs, sources = filter_files('-L', '', sources, remove_prefix=1) libraries, sources = filter_files('-l', '', sources, remove_prefix=1) undef_macros, sources = filter_files('-U', '', sources, remove_prefix=1) define_macros, sources = filter_files('-D', '', sources, remove_prefix=1) using_numarray = 0 using_numeric = 0 for i in range(len(define_macros)): name_value = define_macros[i].split('=', 1) if len(name_value) == 1: name_value.append(None) if len(name_value) == 2: define_macros[i] = tuple(name_value) else: print 'Invalid use of -D:', name_value from numpy.distutils.system_info import get_info num_include_dir = None num_info = {} #import numpy #n = 'numpy' #p = get_prefix(numpy) #from numpy.distutils.misc_util import get_numpy_include_dirs #num_info = {'include_dirs': get_numpy_include_dirs()} if num_info: include_dirs.extend(num_info.get('include_dirs', [])) from numpy.distutils.core import setup, Extension ext_args = { 'name': modulename, 'sources': sources, 'include_dirs': include_dirs, 'library_dirs': library_dirs, 'libraries': libraries, 'define_macros': define_macros, 'undef_macros': undef_macros, 'extra_objects': extra_objects, 'f2py_options': f2py_flags, } if sysinfo_flags: from numpy.distutils.misc_util import dict_append for n in sysinfo_flags: i = get_info(n) if not i: outmess('No %s resources found in system'\ ' (try `f2py --help-link`)\n' % (`n`)) dict_append(ext_args, **i) ext = Extension(**ext_args) sys.argv = [sys.argv[0]] + setup_flags sys.argv.extend([ 'build', '--build-temp', build_dir, '--build-base', build_dir, '--build-platlib', '.' ]) if fc_flags: sys.argv.extend(['config_fc'] + fc_flags) if flib_flags: sys.argv.extend(['build_ext'] + flib_flags) setup(ext_modules=[ext]) if remove_build_dir and os.path.exists(build_dir): import shutil outmess('Removing build directory %s\n' % (build_dir)) shutil.rmtree(build_dir)
inc_dirs = [netCDF4_includedir, HDF5_includedir] # add szip to link if desired. if szip_libdir is None and szip_dir is not None: szip_libdir = os.path.join(szip_dir, 'lib') if szip_incdir is None and szip_dir is not None: szip_incdir = os.path.join(szip_dir, 'include') if szip_incdir is not None and szip_libdir is not None: libs.append('sz') lib_dirs.append(szip_libdir) inc_dirs.append(szip_incdir) extensions = [ Extension("netCDF4", ["netCDF4.c"], libraries=libs, library_dirs=lib_dirs, include_dirs=inc_dirs, runtime_library_dirs=lib_dirs) ] setup( name="netCDF4", version="1.0.4", long_description= "netCDF version 4 has many features not found in earlier versions of the library, such as hierarchical groups, zlib compression, multiple unlimited dimensions, and new data types. It is implemented on top of HDF5. This module implements most of the new features, and can read and write netCDF files compatible with older versions of the library. The API is modelled after Scientific.IO.NetCDF, and should be familiar to users of that module.\n\nThis project has a `Subversion repository <http://code.google.com/p/netcdf4-python/source>`_ where you may access the most up-to-date source.", author="Jeff Whitaker", author_email="*****@*****.**", url= "http://netcdf4-python.googlecode.com/svn/trunk/docs/netCDF4-module.html", download_url="http://code.google.com/p/netcdf4-python/downloads/list", scripts=['utils/nc3tonc4', 'utils/nc4tonc3'],
#!/usr/bin/env python """ usage: python setup.py build_ext --inplace """ import commands flag = commands.getstatusoutput('swig -c++ -python splinalg.i') if flag[0] != 0: print flag exit() from numpy.distutils.core import setup, Extension splinalg_module = Extension( '_splinalg', sources=['splinalg_wrap.cxx'], define_macros=[('__STDC_FORMAT_MACROS', 1)], ) setup( name='splinalg', version='0.1', author="Luke Olson", description="""basic sparse linear algebra""", ext_modules=[splinalg_module], py_modules=["splinalg"], ) # def configuration(parent_package='',top_path=None): # from numpy.distutils.misc_util import Configuration # # config = Configuration() #
from numpy.distutils.core import setup, Extension import os, sys, subprocess # build fortran library if it does not yet exist. if not os.path.isfile('src/libbufr.a'): strg = 'cd src; sh makebufrlib.sh' sys.stdout.write('executing "%s"\n' % strg) subprocess.call(strg, shell=True) # interface for NCEP bufrlib. ext_bufrlib = Extension(name='_bufrlib', sources=['src/_bufrlib.pyf'], libraries=['bufr'], library_dirs=['src']) if __name__ == "__main__": setup( name='py-ncepbufr', version="1.1.0", description="Python interface to NCEP bufrlib", author="Jeff Whitaker", author_email="*****@*****.**", url="http://github.com/jswhit/py-ncepbufr", ext_modules=[ext_bufrlib], packages=['ncepbufr'], scripts=['utils/prepbufr2nc'], )
libraries += ["sz"] else: extra_compile_args = ["-DNOSZIP"] if sys.platform == 'win32': libraries += ["libjpeg", "zlib", "ws2_32"] else: libraries += ["jpeg", "z"] if not compress: extra_compile_args += ["-DNOCOMPRESS"] _hdfext = Extension( 'pyhdf._hdfext', sources=["pyhdf/hdfext_wrap.c"], include_dirs=include_dirs, extra_compile_args=extra_compile_args, library_dirs=library_dirs, extra_link_args=extra_link_args, libraries=libraries, ) if sys.platform == 'win32': data_files = [("pyhdf", [dll_path + x for x in ["mfhdf.dll", "hdf.dll"]])] else: data_files = [] setup( name='pyhdf', maintainer='pyhdf authors', author='Andre Gosselin et al.', description=DOCLINES[0],
# CFLAGS for pyMAP if platform.system() == 'Windows': # For Anaconda pymapArgs = ['-O1', '-m64', '-fPIC', '-std=c99', '-DCMINPACK_NO_DLL'] elif sys.platform == 'cygwin': pymapArgs = ['-O1', '-m64', '-fPIC', '-std=c99'] elif platform.system() == 'Darwin': pymapArgs = ['-O1', '-m64', '-fno-omit-frame-pointer', '-fPIC'] #, '-std=c99'] else: #pymapArgs = ['-O1', '-m64', '-fPIC', '-std=c99', '-D WITH_LAPACK'] pymapArgs = ['-O1', '-m64', '-fPIC', '-std=c99'] # All the extensions bemExt = Extension('wisdem.ccblade._bem', sources=[os.path.join('wisdem', 'ccblade', 'bem.f90')], extra_compile_args=['-O2', '-fPIC']) pyframeExt = Extension('wisdem.pyframe3dd._pyframe3dd', sources=glob.glob( os.path.join('wisdem', 'pyframe3dd', 'src', '*.c'))) precompExt = Extension( 'wisdem.rotorse._precomp', sources=[os.path.join('wisdem', 'rotorse', 'PreCompPy.f90')], extra_compile_args=['-O2', '-fPIC']) pbeamExt = Extension('wisdem.pBeam._pBEAM', sources=glob.glob( os.path.join('wisdem', 'pBeam', 'src', '*.cpp')), extra_compile_args=pbeamArgs, include_dirs=[os.path.join('wisdem', 'include')]) pymapExt = Extension( 'wisdem.pymap._libmap',
print( ' Some of the core phoebe functionality will be missing until you install those dependencies.' ) class PhoebeBuildCommand(build_py): def run(self): build_py.run(self) self.run_command('build_ext') self.run_command('check_imports') ext_modules = [ Extension('libphoebe', sources=['./phoebe/lib/libphoebe.cpp'], language='c++', extra_compile_args=["-std=c++11"], include_dirs=[numpy.get_include()]), Extension('phoebe.algorithms.ceclipse', language='c++', sources=['phoebe/algorithms/ceclipse.cpp'], include_dirs=[numpy.get_include()]), ] # # Main setup # setup(name='phoebe', version='2.1.15', description='PHOEBE 2.1.15', author='PHOEBE development team',
ext_args.update(ANT_EXT_ARGS) #----------------------------------------------------------------------------# ### SETUP #################################################################### s_args = { 'name': 'HASHpy', 'version': '0.6.0', 'description': 'Routines for running HASH algorithms', 'author': 'Mark Williams', 'url': 'https//github.com/markcwill/hashpy', 'packages': ['hashpy', 'hashpy.io', 'hashpy.plotting'], 'package_data': { 'hashpy': ['src/*.inc', 'src/Makefile', 'data/*', 'scripts/*', 'src/*.f'] }, 'ext_modules': [Extension('hashpy.libhashpy', **ext_args)], } ############################################################################## # hashpy.db -----------------------------------------------------------------# # TODO: OBSELETED - break out to separate module/package so hashpy can # stand alone. # # copy pf and bins from hashpy.db to antelope if available if 'ANTELOPE' in os.environ: ant_bin = os.path.join(os.environ['ANTELOPE'], 'bin') ant_pf = os.path.join(os.environ['ANTELOPE'], 'data', 'pf') s_args['data_files'] = [(ant_bin, ['hashpy/scripts/dbhash']), (ant_pf, ['hashpy/data/dbhash.pf'])] #----------------------------------------------------------------------------#
#!/usr/bin/env python from numpy.distutils.core import setup, Extension ext = Extension('tsyganenko.tsygFort', sources=['tsyganenko/geopack08.pyf','tsyganenko/geopack08.for','tsyganenko/T96.f','tsyganenko/T02.f']) setup (name = "Tsyganenko", version = "0.1", description = "wrapper to call fortran routines from the Tsyganenko models", author = "Sebastien de Larquier", author_email = "*****@*****.**", url = "", long_description = """ For more information on the Tsyganenko gemagnetic field models, go to http://ccmc.gsfc.nasa.gov/models/modelinfo.php?model=Tsyganenko%20Model """, packages = ['tsyganenko'], ext_modules = [ext], keywords=['Scientific/Space'], classifiers=[ "Programming Language :: Python/Fortran" ] )
from setuptools import find_packages from numpy.distutils.core import Extension, setup with open('README.md', encoding='utf-8') as f: long_description = f.read() # f_sources = ['harmonization/glmnet/glmnet.pyf', # 'harmonization/glmnet/glmnet.f'] f_sources = ['harmonization/glmnet/glmnet.f'] fflags = ['-fdefault-real-8', '-ffixed-form', '-O3', '-fPIC', '-shared'] module = Extension('harmonization._glmnet', sources=f_sources, extra_f77_compile_args=fflags, extra_f90_compile_args=fflags) setup( name='harmonization', version='0.1', author='Samuel St-Jean', author_email='*****@*****.**', packages=find_packages(), # scripts=['scripts/dpr', 'scripts/dpr_make_fancy_graph'], url='https://github.com/samuelstjean/harmonization', license='GPL2', description= 'Implementation of "Harmonization of diffusion MRI datasets with adaptive dictionary learning".', long_description=long_description, long_description_content_type='text/markdown',
from numpy.distutils.core import Extension, setup ext_modules = [ Extension( name='topo3d', sources=[ 'Topo3D/conductionQ2.f90', 'Topo3D/conductionT2.f90', ] ), Extension( name='asteroids', sources=[ 'Asteroids/asteroid_fast1.f90', ] ) ] setup( name='pcc', author='Norbert Schorghofer', ext_modules=ext_modules )