def configuration(parent_package="", top_path=None): config = Configuration("math", parent_package, top_path) config.add_subpackage("test") config.add_subpackage("fit") # ===================================== # histogramnd # ===================================== histo_dir = "histogramnd" histo_src = [os.path.join(histo_dir, srcf) for srcf in ["chistogramnd.pyx", "src/histogramnd_c.c"]] histo_inc = [os.path.join(histo_dir, "include"), numpy.get_include()] config.add_extension("chistogramnd", sources=histo_src, include_dirs=histo_inc, language="c") # ===================================== # ===================================== # ===================================== # histogramnd_lut # ===================================== histo_dir = "histogramnd" histo_src = [os.path.join(histo_dir, srcf) for srcf in ["chistogramnd_lut.pyx"]] histo_inc = [os.path.join(histo_dir, "include"), numpy.get_include()] config.add_extension("chistogramnd_lut", sources=histo_src, include_dirs=histo_inc, language="c") # ===================================== # ===================================== # marching cubes mc_dir = "marchingcubes" mc_src = [os.path.join(mc_dir, srcf) for srcf in ["marchingcubes.pyx", "mc_lut.cpp"]] config.add_extension("marchingcubes", sources=mc_src, include_dirs=[mc_dir, numpy.get_include()], language="c++") return config
def Extension(name, source=None, can_use_openmp=False, extra_sources=None, **kwargs): """ Wrapper for distutils' Extension """ if source is None: source = name cython_c_ext = ".pyx" if USE_CYTHON else ".c" sources = [os.path.join("src", source + cython_c_ext)] if extra_sources: sources.extend(extra_sources) if "include_dirs" in kwargs: include_dirs = set(kwargs.pop("include_dirs")) include_dirs.add(numpy.get_include()) include_dirs.add("src") include_dirs = list(include_dirs) else: include_dirs = ["src", numpy.get_include()] if can_use_openmp and USE_OPENMP: extra_compile_args = set(kwargs.pop("extra_compile_args", [])) extra_compile_args.add(USE_OPENMP) kwargs["extra_compile_args"] = list(extra_compile_args) extra_link_args = set(kwargs.pop("extra_link_args", [])) extra_link_args.add(USE_OPENMP) kwargs["extra_link_args"] = list(extra_link_args) ext = _Extension(name=name, sources=sources, include_dirs=include_dirs, **kwargs) if USE_CYTHON: cext = cythonize([ext], compile_time_env={"HAVE_OPENMP": bool(USE_OPENMP)}) if cext: ext = cext[0] return ext
def rmsd_extensions(): compiler_args = (compiler.compiler_args_openmp + compiler.compiler_args_sse2 + compiler.compiler_args_sse3 + compiler.compiler_args_opt) compiler_libraries = compiler.compiler_libraries_openmp rmsd = Extension('mdtraj._rmsd', sources=[ 'MDTraj/rmsd/src/theobald_rmsd.c', 'MDTraj/rmsd/src/rotation.c', 'MDTraj/rmsd/src/center.c', 'MDTraj/rmsd/_rmsd.pyx'], include_dirs=[ 'MDTraj/rmsd/include', numpy.get_include()], extra_compile_args=compiler_args, libraries=compiler_libraries) lprmsd = Extension('mdtraj._lprmsd', sources=[ 'MDTraj/rmsd/src/theobald_rmsd.c', 'MDTraj/rmsd/src/rotation.c', 'MDTraj/rmsd/src/center.c', 'MDTraj/rmsd/src/fancy_index.cpp', 'MDTraj/rmsd/src/Munkres.cpp', 'MDTraj/rmsd/src/euclidean_permutation.cpp', 'MDTraj/rmsd/_lprmsd.pyx'], language='c++', include_dirs=[ 'MDTraj/rmsd/include', numpy.get_include()], extra_compile_args=compiler_args, libraries=compiler_libraries + extra_cpp_libraries) return rmsd, lprmsd
def initialize_options(self): from numpy import get_include _build_ext.initialize_options(self) if self.include_dirs is None: self.include_dirs = get_include() else: self.include_dirs += get_include()
def geometry_extensions(): compiler_args = (compiler.compiler_args_sse2 + compiler.compiler_args_sse3 + compiler.compiler_args_sse41 + compiler.compiler_args_opt + compiler.compiler_args_sse41) define_macros = compiler.define_macros_sse41 return [ Extension('mdtraj.geometry._geometry', sources=['MDTraj/geometry/src/geometry.c', 'MDTraj/geometry/src/sasa.c', 'MDTraj/geometry/src/dssp.cpp', 'MDTraj/geometry/src/_geometry.pyx'], include_dirs=['MDTraj/geometry/include', 'MDTraj/geometry/src/kernels', numpy.get_include()], define_macros=define_macros, extra_compile_args=compiler_args, libraries=extra_cpp_libraries, language='c++'), Extension('mdtraj.geometry.drid', sources=["MDTraj/geometry/drid.pyx", "MDTraj/geometry/src/dridkernels.c", "MDTraj/geometry/src/cephes/cbrt.c", "MDTraj/geometry/src/cephes/isnan.c", "MDTraj/geometry/src/moments.c"], include_dirs=["MDTraj/geometry/include", "MDTraj/geometry/include/cephes", numpy.get_include()], define_macros=define_macros, extra_compile_args=compiler_args) ]
def build_sps(ext_modules): if platform.system() == 'Linux' : extra_compile_args = ['-pthread'] #extra_compile_args = [] elif platform.system() == 'SunOS' : #extra_compile_args = ['-pthreads'] extra_compile_args = [] else: extra_compile_args = [] module = Extension(name = 'PyMca5.spslut', sources = ['PyMca5/PyMcaIO/sps/Src/sps_lut.c', 'PyMca5/PyMcaIO/sps/Src/spslut_py.c'], define_macros = define_macros, extra_compile_args = extra_compile_args, include_dirs = ['PyMca5/PyMcaIO/sps/Include', numpy.get_include()]) ext_modules.append(module) if sys.platform != "win32": module = (Extension(name = 'PyMca5.PyMcaIO.sps', sources = ['PyMca5/PyMcaIO/sps/Src/sps.c', 'PyMca5/PyMcaIO/sps/Src/sps_py.c'], define_macros = define_macros, extra_compile_args = extra_compile_args, include_dirs = ['PyMca5/PyMcaIO/sps/Include', numpy.get_include()])) ext_modules.append(module)
def configuration(parent_package='', top_path=None): import numpy from numpy.distutils.misc_util import Configuration config = Configuration('neighbors', parent_package, top_path) libraries = [] if os.name == 'posix': libraries.append('m') config.add_extension('ball_tree', sources=['ball_tree.c'], include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension('kd_tree', sources=['kd_tree.c'], include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension('dist_metrics', sources=['dist_metrics.c'], include_dirs=[numpy.get_include(), os.path.join(numpy.get_include(), 'numpy')], libraries=libraries) config.add_extension('typedefs', sources=['typedefs.c'], include_dirs=[numpy.get_include()], libraries=libraries) config.add_subpackage('tests') return config
def configuration(parent_package='', top_path=None): import numpy from numpy.distutils.misc_util import Configuration config = Configuration('csgraph', parent_package, top_path) config.add_data_dir('tests') config.add_extension('_shortest_path', sources=['_shortest_path.c'], include_dirs=[numpy.get_include()]) config.add_extension('_traversal', sources=['_traversal.c'], include_dirs=[numpy.get_include()]) config.add_extension('_min_spanning_tree', sources=['_min_spanning_tree.c'], include_dirs=[numpy.get_include()]) config.add_extension('_tools', sources=['_tools.c'], include_dirs=[numpy.get_include()]) return config
def configuration(parent_package="", top_path=None): config = Configuration("tree", parent_package, top_path) libraries = [] if os.name == 'posix': libraries.append('m') # check for pre-compiled versions for the encountered sklearn version if not os.path.isdir("{}/headers/{}".format(os.path.dirname(os.path.realpath(__file__)), sklearn.__version__)) or \ not os.path.isfile("{}/headers/{}/_tree.c".format(os.path.dirname(os.path.realpath(__file__)), sklearn.__version__)): raise Exception(\ """sklearnef holds no pre-compiled _tree.c for your current scikit-learn version ({version}). Please download the corresponding header file from \ https://raw.githubusercontent.com/scikit-learn/scikit-learn/{version}/sklearn/tree/_tree.pxd, place it in sklearnef/tree/headers/sklearn/tree/ and compile _tree.pyx with cython using \ 'cython _tree.pyx -o headers/{version}/_tree.c -I headers/'. Then re-run \ the installation of sklearnef.""".format(version=sklearn.__version__)) config.add_extension("_diffentropy", sources=["headers/_diffentropy.c"], include_dirs=[numpy.get_include()], libraries=libraries + ['lapack', 'blas'], extra_compile_args=["-O3"]) config.add_extension("_tree", sources=["headers/{version}/_tree.c".format(version=sklearn.__version__)], include_dirs=[numpy.get_include()], libraries=libraries, extra_compile_args=["-O3"]) config.add_subpackage("tests") return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration libraries = [] if os.name == 'posix': libraries.append('m') config = Configuration('iced', parent_package, top_path) config.add_subpackage('utils') config.add_subpackage("io") config.add_extension( '_normalization_', libraries=libraries, sources=['_normalization_.c'], include_dirs=[join('..', 'src', 'cblas'), numpy.get_include()]) config.add_extension( '_filter_', libraries=libraries, sources=['_filter_.c'], include_dirs=[join('..', 'src', 'cblas'), numpy.get_include()]) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration cblas_libs, blas_info = get_blas_info() libraries = [] if os.name == 'posix': cblas_libs.append('m') libraries.append('m') config = Configuration('cluster', parent_package, top_path) config.add_extension('_hierarchical', sources=['_hierarchical.c'], include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension( '_k_means', libraries=cblas_libs, sources=['_k_means.c'], include_dirs=[join('..', 'src', 'cblas'), numpy.get_include(), blas_info.pop('include_dirs', [])], extra_compile_args=blas_info.pop('extra_compile_args', []), **blas_info ) return config
def main(): setup(name = 'glu', version = get_version(), author = 'Kevin Jacobs', author_email = '*****@*****.**', maintainer = 'Kevin Jacobs', maintainer_email = '*****@*****.**', platforms = ['any'], description = 'Genotype Library and Utilities (GLU)', long_description = ('Genotype Library and Utilities (GLU): Tools for the management of large ' 'amounts of SNP genotype data and programs to check its quality and to ' 'test for association between SNP markers with continuous or discrete ' 'trait phenotypes.'), classifiers = filter(None, classifiers.split('\n')), install_requires = requires, packages = find_packages(), include_package_data = True, scripts = ['bin/glu'], zip_safe = False, test_suite = 'nose.collector', ext_modules = [ Extension('glu.lib.genolib.bitarrayc', sources = ['glu/lib/genolib/bitarrayc.c']), Extension('glu.lib.genolib._genoarray', sources = ['glu/lib/genolib/_genoarray.c', 'glu/lib/genolib/bitarrayc.c', 'glu/lib/genolib/_ibs.c', 'glu/lib/genolib/_ld.c'], include_dirs = [np.get_include()]), Extension('glu.modules.struct._admix', sources = ['glu/modules/struct/_admix.c'], include_dirs = [np.get_include()]), Extension('glu.modules.ld.pqueue', sources = ['glu/modules/ld/pqueue.c']), glmnet_config(), ] + cython_modules(), entry_points={ 'console_scripts' : ['glu = glu.lib.glu_launcher:main'], } )
def cython_modules(): try: from Cython.Build import cythonize ext = [ Extension('glu.lib._illumina', sources = ['glu/lib/_illumina.pyx']), Extension('glu.lib.genolib.helpers', sources = ['glu/lib/genolib/helpers.pyx'], include_dirs = [np.get_include()]), Extension('glu.lib.seqlib._cigar', sources = ['glu/lib/seqlib/_cigar.pyx']), Extension('glu.lib.seqlib._edits', sources = ['glu/lib/seqlib/_edits.pyx']), Extension('glu.lib.seqlib.gc', sources = ['glu/lib/seqlib/gc.pyx']), Extension('glu.lib.seqlib.intervaltree', sources = ['glu/lib/seqlib/intervaltree.pyx']), ] ext = cythonize(ext) # Fall back to using pre-generated C files except ImportError: ext = [ Extension('glu.lib._illumina', sources = ['glu/lib/_illumina.c']), Extension('glu.lib.genolib.helpers', sources = ['glu/lib/genolib/helpers.c'], include_dirs = [np.get_include()]), Extension('glu.lib.seqlib._cigar', sources = ['glu/lib/seqlib/_cigar.c']), Extension('glu.lib.seqlib._edits', sources = ['glu/lib/seqlib/_edits.c']), Extension('glu.lib.seqlib.gc', sources = ['glu/lib/seqlib/gc.c']), Extension('glu.lib.seqlib.intervaltree', sources = ['glu/lib/seqlib/intervaltree.c']), ] return ext
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, BlasNotFoundError config = Configuration('io', parent_package, top_path) config.add_data_dir('tests') # determine and verify the at RSL location rsl_path = os.environ.get('RSL_PATH') if rsl_path is None: rsl_path = guess_rsl_path() rsl_lib_path = os.path.join(rsl_path, 'lib') rsl_include_path = os.path.join(rsl_path, 'include') # build the RSL interface if RSL is installed if check_rsl_path(rsl_lib_path, rsl_include_path): config.add_extension( '_rsl_interface', sources=['_rsl_interface.c'], libraries=['rsl'], library_dirs=[rsl_lib_path], include_dirs=[rsl_include_path] + [get_include()], runtime_library_dirs=[rsl_lib_path]) else: import warnings warnings.warn(RSL_MISSING_WARNING % (rsl_path)) config.add_extension('_sigmetfile', sources=['_sigmetfile.c'], include_dirs=[get_include()]) config.add_extension('nexrad_interpolate', sources=['nexrad_interpolate.c'], include_dirs=[get_include()]) return config
def configuration(parent_package='', top_path=None): import numpy from numpy.distutils.misc_util import Configuration config = Configuration('utils', parent_package, top_path) config.add_subpackage('sparsetools') cblas_libs, blas_info = get_blas_info() libraries = [] if os.name == 'posix': libraries.append('m') cblas_libs.append('m') config.add_extension('arraybuilder', sources=['arraybuilder.c']) config.add_extension('sparsefuncs', sources=['sparsefuncs.c'], libraries=libraries) config.add_extension('arrayfuncs', sources=['arrayfuncs.c'], depends=[join('src', 'cholesky_delete.h')], libraries=cblas_libs, include_dirs=[join('..', 'src', 'cblas'), numpy.get_include(), blas_info.pop('include_dirs', [])], extra_compile_args=blas_info.pop('extra_compile_args', []), **blas_info ) config.add_extension( 'murmurhash', sources=['murmurhash.c', join('src', 'MurmurHash3.cpp')], include_dirs=['src']) config.add_extension('lgamma', sources=['lgamma.cpp', join('src', 'Gamma.cpp')], include_dirs=['src'], libraries=libraries) config.add_extension('graph_shortest_path', sources=['graph_shortest_path.c'], include_dirs=[numpy.get_include()]) config.add_extension('seq_dataset', sources=['seq_dataset.c'], include_dirs=[numpy.get_include()]) config.add_extension('weight_vector', sources=['weight_vector.c'], include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension("random", sources=["random.c"], include_dirs=[numpy.get_include()], libraries=libraries) return config
def configuration(parent_package="", top_path=None): config = Configuration("tree", parent_package, top_path) libraries = [] if os.name == 'posix': libraries.append('m') config.add_extension("_tree", sources=["_tree.pyx"], include_dirs=[numpy.get_include()], libraries=libraries, extra_compile_args=["-O3"]) config.add_extension("_splitter", sources=["_splitter.pyx"], include_dirs=[numpy.get_include()], libraries=libraries, extra_compile_args=["-O3"]) config.add_extension("_criterion", sources=["_criterion.pyx"], include_dirs=[numpy.get_include()], libraries=libraries, extra_compile_args=["-O3"]) config.add_extension("_utils", sources=["_utils.pyx"], include_dirs=[numpy.get_include()], libraries=libraries, extra_compile_args=["-O3"]) config.add_subpackage("tests") return config
def main(): packages = ['disvis', 'disvis.IO'] requirements = ['numpy'] ext = '.pyx' if CYTHON else '.c' ext_modules = [Extension("disvis.libdisvis", [join("src", "libdisvis" + ext)], include_dirs = [numpy.get_include()], )] cmdclass = {} if CYTHON: ext_modules = cythonize(ext_modules) cmdclass = {'build_ext' : build_ext} package_data = {'disvis': [join('data', '*.npy'), join('kernels', '*.cl')]} scripts = [join('scripts', 'disvis')] setup(name="disvis", version='1.0.1', description='Quantifying and visualizing the interaction space of distance-constrainted macromolecular complexes', author='Gydo C.P. van Zundert', author_email='*****@*****.**', packages=packages, cmdclass=cmdclass, ext_modules=ext_modules, package_data=package_data, scripts=scripts, requires=requirements, include_dirs=[numpy.get_include()], )
def configuration(parent_package='', top_path=None): # compiles files during installation from numpy.distutils.misc_util import Configuration config = Configuration('cy', parent_package, top_path) exts = ['spmatfuncs', 'stochastic', 'sparse_utils', 'graph_utils'] if os.environ['QUTIP_RELEASE'] == 'TRUE': for ext in exts: config.add_extension(ext, sources=[ext + ".c"], include_dirs=[np.get_include()], extra_compile_args=['-w -ffast-math -O3 -march=native -mfpmath=sse'], extra_link_args=[]) else: for ext in exts: config.add_extension(ext, sources=[ext + ".pyx"], include_dirs=[np.get_include()], extra_compile_args=['-w -ffast-math -O3 -march=native -mfpmath=sse'], extra_link_args=[]) config.ext_modules = cythonize(config.ext_modules) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info blas_info = get_info('blas_opt', 0) if (not blas_info) or ( ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', [])): cblas_libs = ['cblas'] blas_info.pop('libraries', None) else: cblas_libs = blas_info.pop('libraries', []) config = Configuration('cluster', parent_package, top_path) config.add_extension( '_inertia', sources=['_inertia.c'], include_dirs=[numpy.get_include()], ) config.add_extension( '_k_means', libraries=cblas_libs, sources=['_k_means.c'], include_dirs=[join('..', 'src', 'cblas'), numpy.get_include(), blas_info.pop('include_dirs', [])], extra_compile_args=blas_info.pop('extra_compile_args', []), **blas_info ) return config
def configuration(parent_package='', top_path=None): config = Configuration('ndimage', parent_package, top_path) config.add_extension("_nd_image", sources=["src/nd_image.c","src/ni_filters.c", "src/ni_fourier.c","src/ni_interpolation.c", "src/ni_measure.c", "src/ni_morphology.c","src/ni_support.c"], include_dirs=['src']+[get_include()]) # Cython wants the .c and .pyx to have the underscore. config.add_extension("_ni_label", sources=["src/_ni_label.c",], include_dirs=['src']+[get_include()]) config.add_extension("_ctest", sources=["src/_ctest.c"], include_dirs=[get_include()]) config.add_extension("_ctest_oldapi", sources=["src/_ctest.c"], include_dirs=[get_include()], define_macros=[("OLDAPI", 1)]) config.add_extension("_cytest", sources=["src/_cytest.c"]) config.add_data_dir('tests') return config
def configuration(parent_package='', top_path=None): import numpy from numpy.distutils.misc_util import Configuration config = Configuration('utils', parent_package, top_path) config.add_subpackage('sparsetools') # cd fast needs CBLAS blas_info = get_info('blas_opt', 0) if (not blas_info) or ( ('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', [])): cblas_libs = ['cblas'] blas_info.pop('libraries', None) else: cblas_libs = blas_info.pop('libraries', []) config.add_extension('arrayfuncs', sources=['arrayfuncs.c'], depends=[join('src', 'cholesky_delete.c')], libraries=cblas_libs, include_dirs=[join('..', 'src', 'cblas'), numpy.get_include(), blas_info.pop('include_dirs', [])], extra_compile_args=blas_info.pop('extra_compile_args', []), **blas_info ) config.add_extension('graph_shortest_path', sources=['graph_shortest_path.c'], include_dirs=[numpy.get_include()]) return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('impl', parent_package, top_path) cblas_libs, blas_info = get_blas_info() config.add_extension('dataset_fast', sources=['dataset_fast.c'], include_dirs=[numpy.get_include()]) config.add_extension('matrix_fact_fast', sources=['matrix_fact_fast.c'], include_dirs=[numpy.get_include()]) config.add_extension('preprocessing_fast', sources=['preprocessing_fast.c'], include_dirs=[numpy.get_include()]) config.add_extension('dict_fact_fast', sources=['dict_fact_fast.c'], include_dirs=[numpy.get_include()]) config.add_extension('dict_fact_fast_partial', sources=['dict_fact_fast_partial.c'], include_dirs=[numpy.get_include()]) config.add_subpackage('tests') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, BlasNotFoundError config = Configuration('map', parent_package, top_path) config.add_data_dir('tests') libraries = [] if os.name == 'posix': libraries.append('m') config.add_extension('ball_tree', sources=['ball_tree.c'], include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension('ckdtree', sources=['ckdtree.c'], include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension('_load_nn_field_data', sources=['_load_nn_field_data.c'], include_dirs=[numpy.get_include()]) config.add_extension('_gate_to_grid_map', sources=['_gate_to_grid_map.c'], libraries=libraries) return config
def make_extension_modules(mode="disable_threads", nfft_library_dirs=[], nfft_include_dirs=[]): ext_icosahedron = Extension( "condor.utils.icosahedron", sources=["src/utils/icosahedron/icosahedronmodule.c"], include_dirs=[numpy.get_include()], ) _nfft_libraries = { "disable_threads": ["nfft3"], "enable_threads": ["nfft3_threads" ,"fftw3_threads" ,"fftw3"] } _nfft_macros = { "disable_threads" : [], "enable_threads" : [("ENABLE_THREADS", None)], } ext_nfft = Extension( "condor.utils.nfft", sources=["src/utils/nfft/nfftmodule.c"], library_dirs=nfft_library_dirs, libraries=_nfft_libraries[mode], include_dirs=[numpy.get_include()] + nfft_include_dirs, define_macros=_nfft_macros[mode], runtime_library_dirs = nfft_library_dirs, extra_link_args = [] if (nfft_library_dirs == []) else ['-Wl,-rpath,'+nfft_library_dirs[0]+',-L'+nfft_library_dirs[0]], ) return [ext_icosahedron, ext_nfft]
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('lightning', parent_package, top_path) randomdir = os.path.join(top_path, "lightning", "random") config.add_extension('dataset_fast', sources=['dataset_fast.cpp'], include_dirs=[numpy.get_include(), randomdir]) config.add_extension('dual_cd_fast', sources=['dual_cd_fast.cpp'], include_dirs=[numpy.get_include(), randomdir]) config.add_extension('loss_fast', sources=['loss_fast.cpp'], include_dirs=[numpy.get_include(), randomdir]) config.add_extension('primal_cd_fast', sources=['primal_cd_fast.cpp'], include_dirs=[numpy.get_include(), randomdir]) config.add_extension('sgd_fast', sources=['sgd_fast.cpp'], include_dirs=[numpy.get_include(), randomdir]) config.add_subpackage('random') config.add_subpackage('tests') config.add_subpackage('datasets') return config
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, BlasNotFoundError config = Configuration('pyLLE', parent_package, top_path) config.add_subpackage('python_only') config.add_extension('ball_tree', language='c++', sources=[join('wrappers', 'ball_tree.cpp')], depends=[join('include', 'BallTree.h'), join('include', 'BallTreePoint.h')], include_dirs=['include',numpy.get_include()]) config.add_extension('LLE', language='c++', sources=[join('wrappers','LLE.cpp')]\ + MatVec_sources + LLE_sources, libraries=['stdc++','blas','lapack','arpack'], library_dirs = LIBS, include_dirs=['include','lib/MatVec', numpy.get_include()] ) return config
def extensions(): __builtins__.__NUMPY_SETUP__ = False import numpy as np ext_modules = [ Extension( 'pysas.world', ["pysas/world.pyx"] + glob("lib/world/*.cpp"), include_dirs=['lib/world', np.get_include()], extra_compile_args=["-O3"], language="c++", ), Extension( 'pysas.mcep', ["pysas/mcep.pyx"], include_dirs=[np.get_include()], extra_compile_args=["-O3"], language="c++", ), Extension( 'pysas.excite', ["pysas/excite.pyx"], include_dirs=[np.get_include()], extra_compile_args=["-O3"], language="c++", ), Extension( 'pysas.synthesis.mlsa', ["pysas/synthesis/mlsa.pyx"], include_dirs=[np.get_include()], extra_compile_args=["-O3"], language="c++", ) ] return ext_modules
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('linear_model', parent_package, top_path) cblas_libs, blas_info = get_blas_info() libraries = [] if os.name == 'posix': cblas_libs.append('m') libraries.append('m') config.add_extension('cd_fast', sources=['cd_fast.c'], libraries=cblas_libs, include_dirs=[join('..', 'src', 'cblas'), numpy.get_include(), blas_info.pop('include_dirs', [])], extra_compile_args=blas_info.pop('extra_compile_args', []), **blas_info ) config.add_extension('sgd_fast', sources=['sgd_fast.c'], include_dirs=[numpy.get_include()], libraries=libraries, ) # add other directories config.add_subpackage('tests') return config
def main(): ext_modules = [ Extension("lz4.lz4", sources=["lz4/lz4.pyx", os.path.join(c_src_dir, 'lz4.c')], include_dirs=[np.get_include(), c_src_dir], define_macros=[], extra_link_args=[], extra_compile_args=['-mssse3', '-std=c99']), Extension("lz4.shuffle", sources=["lz4/shuffle.pyx"], include_dirs=[np.get_include()], define_macros=[], extra_link_args=[], extra_compile_args=['-mssse3', '-std=c99']), ] kwargs = dict( name = 'lz4', packages = ['lz4'], cmdclass = {'build_ext': build_ext}, ext_modules = cythonize(ext_modules), ) setup(**kwargs)
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration libraries = [] if os.name == 'posix': libraries.append('m') config = Configuration('cluster', parent_package, top_path) config.add_extension('_dbscan_inner', sources=['_dbscan_inner.pyx'], include_dirs=[numpy.get_include()], language="c++") config.add_extension('_hierarchical', sources=['_hierarchical.pyx'], language="c++", include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension('_k_means_elkan', sources=['_k_means_elkan.pyx'], include_dirs=[numpy.get_include()], libraries=libraries) config.add_extension('_k_means', sources=['_k_means.pyx'], include_dirs=numpy.get_include(), libraries=libraries) config.add_subpackage('tests') return config
cython_extension('arraydatatype'), cython_extension('errorchecker'), cython_extension('vbo'), cython_extension('nones_formathandler'), cython_extension('latebind'), ]) try: import numpy except ImportError: sys.stderr.write( """Unable to import numpy, skipping numpy extension building\n""") else: if hasattr(numpy, 'get_include'): includeDirectories = [ numpy.get_include(), ] else: includeDirectories = [ os.path.join( os.path.dirname(numpy.__file__), 'core', 'include', ), ] extensions.append( cython_extension('numpy_formathandler', includeDirectories)) if __name__ == "__main__": extraArguments = { 'classifiers': [
include_dirs=include_dirs, libraries=libraries, library_dirs=library_dirs, )) if BUILD_HINTSVM: print("Build HintSVM...") extensions.append( Extension( "libact.query_strategies._hintsvm", sources=[ "libact/query_strategies/_hintsvm.pyx", "libact/query_strategies/src/hintsvm/libsvm_helper.c", "libact/query_strategies/src/hintsvm/svm.cpp" ], include_dirs=[ numpy.get_include(), "libact/query_strategies/src/hintsvm/" ], extra_compile_args=['-lstdc++'], )) extensions = cythonize(extensions) cmdclasses = {'build_ext': build_ext} setup_requires = [] with open('./requirements.txt') as f: requirements = f.read().splitlines() install_requires = requirements tests_require = [ 'coverage', ] setup(
define_macros=[('CYTHON_TRACE', '1')]), Extension(name='_cython._mf', sources=["_cython/_mf.pyx"], define_macros=[('CYTHON_TRACE', '1')]), ] setup(name='RecPy', version="0.2.0", description= 'Recommender Systems framework for the 2016 Recsys Course at Polimi', url='https://github.com/mquad/recsys-course', author='Massimo Quadrana and Yashar Deldjoo', author_email='Massimo Quadrana', license='MIT', classifiers=[ 'Development Status :: 3 - Alpha', 'Natural Language :: English', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Cython', 'Operating System :: OS Independent', 'Topic :: Software Development :: Libraries :: Python Modules', ], install_requires=['numpy', 'scipy>=0.16'], packages=['_cython'], setup_requires=["Cython >= 0.19"], ext_modules=cythonize(extensions), include_dirs=[numpy.get_include()])
# requires proj4 headers ext_modules=[ Extension( 'cartopy.trace', ['lib/cartopy/trace.pyx', 'lib/cartopy/_trace.cpp'], include_dirs=[include_dir, './lib/cartopy'] + proj_includes + geos_includes, libraries=proj_libraries + geos_libraries, library_dirs=[library_dir] + proj_library_dirs + geos_library_dirs, language='c++', **extra_extension_args ), Extension( 'cartopy._crs', ['lib/cartopy/_crs.pyx'], include_dirs=[include_dir, np.get_include()] + proj_includes, libraries=proj_libraries, library_dirs=[library_dir] + proj_library_dirs, **extra_extension_args ), ], cmdclass={'build_ext': build_ext}, classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Operating System :: POSIX :: AIX', 'Operating System :: POSIX :: Linux',
"*", sources=cython_files, include_dirs=[ os.path.abspath(os.path.join(CUDF_ROOT, "../include/cudf")), os.path.abspath(os.path.join(CUDF_ROOT, "../include")), os.path.abspath( os.path.join(CUDF_ROOT, "../libcudf_kafka/include/cudf_kafka") ), os.path.join(CUDF_ROOT, "include"), os.path.join(CUDF_ROOT, "_deps/libcudacxx-src/include"), os.path.join( os.path.dirname(sysconfig.get_path("include")), "rapids/libcudacxx", ), os.path.dirname(sysconfig.get_path("include")), np.get_include(), cuda_include_dir, ], library_dirs=([get_python_lib(), os.path.join(os.sys.prefix, "lib")]), libraries=["cudf", "cudf_kafka"], language="c++", extra_compile_args=["-std=c++17"], ) ] setup( name="cudf_kafka", version=versioneer.get_version(), description="cuDF Kafka Datasource", url="https://github.com/rapidsai/cudf", author="NVIDIA Corporation",
def __str__(): import numpy return numpy.get_include()
def c_header_dirs(self): return [pygpu.get_include(), numpy.get_include()]
USE_CYTHON = True DISTNAME = 'lsh' DESCRIPTION = 'A library for performing shingling and LSH for python.' MAINTAINER = 'Matti Lyra' MAINTAINER_EMAIL = '*****@*****.**' URL = 'https://github.com/mattilyra/lsh' DOWNLOAD_URL = 'https://github.com/mattilyra/lsh' VERSION = '0.3.2' ext = '.pyx' if USE_CYTHON else '.cpp' try: import numpy as np includes = [np.get_include()] except ImportError: includes = [] extensions = [ Extension("lsh.cMinhash", ["lsh/cMinhash{}".format(ext), 'lsh/MurmurHash3.cpp'], include_dirs=includes) ] if USE_CYTHON: from Cython.Build import cythonize extensions = cythonize(extensions) install_deps = ['numpy', 'cython>=0.24.1'] test_deps = [
def finalize_options(self): _build_ext.finalize_options(self) __builtins__.__NUMPY_SETUP__ = False import numpy self.include_dirs.append(numpy.get_include())
], # What does your project relate to? keywords='nefis file_format', # You can just specify the packages manually here if your project is # simple. Or you can use find_packages(). packages=find_packages(exclude=['contrib', 'docs', 'tests*']), cmdclass=cmdclass, ext_modules=ext_modules, # List run-time dependencies here. These will be installed by pip when your # project is installed. For an analysis of "install_requires" vs pip's # requirements files see: # https://packaging.python.org/en/latest/technical.html#install-requires-vs-requirements-files install_requires=requirements, include_dirs=[np.get_include()], # <---- New line # hmm, where did the data go? # data_files=[('nefis_data', ['data/trim-f34.dat', 'data/trim-f34.def'])], # To provide executable scripts, use entry points in preference to the # "scripts" keyword. Entry points provide cross-platform support and allow # pip to create the appropriate form of executable for the target platform. entry_points={ 'console_scripts': [ # TODO: check if you prefer this interface 'nefis=nefis.cli:cli' ], }, )
from setuptools import setup, Extension from Cython.Distutils import build_ext import glob import numpy as np sources = ['pysofia/_sofia_ml.pyx'] + glob.glob('pysofia/src/*.cc') setup(name='pysofia', version='0.10.dev0', description='Python bindings for sofia-ml', long_description=open('README.rst').read(), author='Fabian Pedregosa & Csy', author_email='*****@*****.**', url='http://pypi.python.org/pypi/pysofia', packages=['pysofia'], cmdclass={'build_ext': build_ext}, install_requires=['six'], ext_modules=[Extension('pysofia._sofia_ml', sources=sources, language='c++', include_dirs=[np.get_include()])], )
from Cython.Build import cythonize from cython import __version__ as cython_version min_cython_version = '0.20.1' if LooseVersion(cython_version) < LooseVersion(min_cython_version): raise ValueError( 'cython support requires cython>={}'.format(min_cython_version)) cython = True except ImportError: cython = False clang = False if sys.platform.lower().startswith('darwin'): clang = True include_dirs = ['include', 'distributions'] include_dirs.append(numpy.get_include()) if 'EXTRA_INCLUDE_PATH' in os.environ: include_dirs.append(os.environ['EXTRA_INCLUDE_PATH']) extra_compile_args = [ '-DDIST_DEBUG_LEVEL=3', '-DDIST_THROW_ON_ERROR=1', '-Wno-unused-function', ] if clang: extra_compile_args.extend([ '-mmacosx-version-min=10.7', # for anaconda '-std=c++0x', '-stdlib=libc++', '-Wno-deprecated-register',
import time import numpy as np from scipy import linalg from numba import njit, jit, float64, int64 import pyximport pyximport.install(setup_args={"include_dirs": np.get_include()}, reload_support=True) from lasso_ct import lasso_ct def timeit(method): def timed(*args, **kw): ts = time.time() result = method(*args, **kw) te = time.time() print('%r %2.2f sec' % (method.__name__, te - ts)) return result return timed @njit(float64(float64)) def fsign(f): if f == 0: return 0 elif f > 0: return 1.0 else: return -1.0
def finalize_options(self): _build_ext.finalize_options(self) # Prevent numpy from thinking it is still in its setup process: __builtins__.__NUMPY_SETUP__ = False import numpy self.include_dirs.append(numpy.get_include())
import os from setuptools import setup, find_packages from setuptools.extension import Extension from Cython.Build import cythonize from Cython.Distutils import build_ext import numpy extensions = [ Extension("openpiv.process", ["./openpiv/process.pyx"], include_dirs=[numpy.get_include()]), Extension("openpiv.lib", ["./openpiv/lib.pyx"], include_dirs=[numpy.get_include()]) ] extensions = cythonize(extensions, include_path=[numpy.get_include()]) # read the contents of your README file from os import path this_directory = path.abspath(path.dirname(__file__)) #with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: with open(path.join(this_directory, 'README.md')) as f: long_description = f.read() setup( name="OpenPIV", version='0.21.8c', cmdclass={'build_ext': build_ext}, ext_modules=extensions, packages=find_packages(), include_package_data=True,
'include': pjoin(home, 'include'), 'lib64': pjoin(home, 'lib64') } for k, v in iteritems(cudaconfig): if not os.path.exists(v): raise EnvironmentError( 'The CUDA %s path could not be located in %s' % (k, v)) return cudaconfig CUDA = locate_cuda() # Obtain the numpy include directory. This logic works across numpy versions. try: numpy_include = np.get_include() except AttributeError: numpy_include = np.get_numpy_include() def customize_compiler_for_nvcc(self): """inject deep into distutils to customize how the dispatch to gcc/nvcc works. If you subclass UnixCCompiler, it's not trivial to get your subclass injected in, and still have the right customizations (i.e. distutils.sysconfig.customize_compiler) run on it. So instead of going the OO route, I have this. Note, it's kindof like a wierd functional subclassing going on.""" # tell the compiler it can processes .cu
CXX_FLAGS += '-DEGL_ADD_PYTHON_INIT ' CXX_FLAGS += '-DB3_ENABLE_FILEIO_PLUGIN ' CXX_FLAGS += '-DB3_USE_ZIPFILE_FILEIO ' CXX_FLAGS += '-DBT_THREADSAFE=1 ' CXX_FLAGS += '-DSTATIC_LINK_SPD_PLUGIN ' EGL_CXX_FLAGS = '' # libraries += [current_python] libraries = [] include_dirs = [] try: import numpy NP_DIRS = [numpy.get_include()] except: print("numpy is disabled. getCameraImage maybe slower.") else: print("numpy is enabled.") CXX_FLAGS += '-DPYBULLET_USE_NUMPY ' for d in NP_DIRS: print("numpy_include_dirs = %s" % d) include_dirs += NP_DIRS sources = ["examples/pybullet/pybullet.c"]\ +["src/btLinearMathAll.cpp"]\ +["src/btBulletCollisionAll.cpp"]\ +["src/btBulletDynamicsAll.cpp"]\ +["examples/ExampleBrowser/InProcessExampleBrowser.cpp"]\ +["examples/TinyRenderer/geometry.cpp"]\
ext = '.pyx' if USE_CYTHON else '.cpp' extensions = [ Extension(name='cornac.models.c2pf.c2pf', sources=[ 'cornac/models/c2pf/cython/c2pf' + ext, 'cornac/models/c2pf/cpp/cpp_c2pf.cpp'], include_dirs=[ 'cornac/models/c2pf/cpp/', 'cornac/utils/external/eigen/Eigen', 'cornac/utils/external/eigen/unsupported/Eigen/' ], language='c++'), Extension(name='cornac.models.nmf.recom_nmf', sources=['cornac/models/nmf/recom_nmf' + ext], include_dirs=[np.get_include()], language='c++'), Extension(name='cornac.models.pmf.pmf', sources=['cornac/models/pmf/cython/pmf' + ext], language='c++'), Extension(name='cornac.models.mcf.mcf', sources=['cornac/models/mcf/cython/mcf' + ext], language='c++'), Extension(name='cornac.models.sorec.sorec', sources=['cornac/models/sorec/cython/sorec' + ext], language='c++'), Extension('cornac.models.hpf.hpf', sources=['cornac/models/hpf/cython/hpf' + ext, 'cornac/models/hpf/cpp/cpp_hpf.cpp'], include_dirs=[ 'cornac/models/hpf/cpp/',
from Cython.Build import cythonize from setuptools.extension import Extension import numpy as np ext_modules = cythonize([ 'astrid/circle.pyx', 'astrid/defaults.pyx', 'astrid/io.pyx', 'astrid/logger.pyx', 'astrid/orc.pyx', 'astrid/midi.pyx', 'astrid/names.pyx', 'astrid/sampler.pyx', 'astrid/server.pyx', 'astrid/voices.pyx', ], include_path=[np.get_include()], annotate=True) ext_modules = [ Extension('astrid.circle', ['astrid/circle.c']), Extension('astrid.defaults', ['astrid/defaults.c']), Extension('astrid.io', ['astrid/io.c'], extra_compile_args=['-fopenmp'], extra_link_args=['-fopenmp'] ), Extension('astrid.logger', ['astrid/logger.c']), Extension('astrid.orc', ['astrid/orc.c']), Extension('astrid.midi', ['astrid/midi.c']), Extension('astrid.names', ['astrid/names.c']), Extension('astrid.sampler', ['astrid/sampler.c']), Extension('astrid.server', ['astrid/server.c']),
''' SASSIE Copyright (C) 2011 Joseph E. Curtis This program comes with ABSOLUTELY NO WARRANTY; This is free software, and you are welcome to redistribute it under certain conditions; see http://www.gnu.org/licenses/gpl-3.0.html for details. ''' # System imports from distutils.core import * from distutils import sysconfig # Third-party modules - we depend on numpy for everything import numpy # Obtain the numpy include directory. This logic works across numpy versions. try: numpy_include = numpy.get_include() except AttributeError: numpy_include = numpy.get_numpy_include() # simple extension module _view_vmd = Extension( "_view_vmd", ["view_vmd.i", "view_vmd.c", "imd.c", "vmdsock.c"], include_dirs=[numpy_include], ) # NumyTypemapTests setup setup(name="SASVIEW VMD I/O", description= "Module handles sending and receiving coordinates to VMD using numpy.i", author="Joseph E. Curtis",
def run(): ''' Dump on stdout the config flags required to compile pythran-generated code. ''' import argparse import distutils.sysconfig import pythran import numpy parser = argparse.ArgumentParser( prog='pythran-config', description='output build options for pythran-generated code', epilog="It's a megablast!" ) parser.add_argument('--compiler', action='store_true', help='print default compiler') parser.add_argument('--cflags', action='store_true', help='print compilation flags') parser.add_argument('--libs', action='store_true', help='print linker flags') parser.add_argument('--no-python', action='store_true', help='do not include Python-related flags') parser.add_argument('--verbose', '-v', action='count', default=0, help=( 'verbose mode: [-v] prints warnings if pythranrc ' 'has an invalid configuration; use ' '[-vv] for more information') ) args = parser.parse_args(sys.argv[1:]) args.python = not args.no_python output = [] extension = pythran.config.make_extension(python=args.python) if args.verbose >= 1: if args.verbose == 1: logger.setLevel(logging.WARNING) else: logger.setLevel(logging.INFO) lint_cfg(cfg) if args.compiler or args.verbose >= 2: cxx = compiler() or 'c++' logger.info('CXX = '.rjust(10) + cxx) if args.compiler: output.append(cxx) if args.cflags or args.verbose >= 2: def fmt_define(define): name, value = define if value is None: return '-D' + name else: return '-D' + name + '=' + value cflags = [] cflags.extend(fmt_define(define) for define in extension['define_macros']) cflags.extend(('-I' + include) for include in extension['include_dirs']) if args.python: cflags.append('-I' + numpy.get_include()) cflags.append('-I' + distutils.sysconfig.get_python_inc()) logger.info('CFLAGS = '.rjust(10) + ' '.join(cflags)) if args.cflags: output.extend(cflags) if args.libs or args.verbose >= 2: ldflags = [] ldflags.extend(('-L' + include) for include in extension['library_dirs']) ldflags.extend(('-l' + include) for include in extension['libraries']) if args.python: ldflags.append('-L' + distutils.sysconfig.get_config_var('LIBPL')) ldflags.extend(distutils.sysconfig.get_config_var('LIBS').split()) ldflags.append('-lpython' + distutils.sysconfig.get_config_var('VERSION')) logger.info('LDFLAGS = '.rjust(10) + ' '.join(ldflags)) if args.libs: output.extend(ldflags) if output: print(' '.join(output))
from setuptools import setup, Extension from setuptools.command.build_ext import build_ext import numpy as np # Avoid a gcc warning below: # cc1plus: warning: command line option ‘-Wstrict-prototypes’ is valid # for C/ObjC but not for C++ class BuildExt(build_ext): def build_extensions(self): self.compiler.compiler_so.remove('-Wstrict-prototypes') super(BuildExt, self).build_extensions() module = 'HWDecode' hwdecode_utils_module = Extension( module, sources=['hw_decode.cpp'], include_dirs=[np.get_include(), '/usr/local/include/'], extra_compile_args=['-DNDEBUG', '-O3'], extra_link_args=[ '-lavutil', '-lavcodec', '-lavformat', '-lswscale', '-L/usr/local/lib/' ]) setup(name=module, version='0.1', description='Utils for hwdecode', ext_modules=[hwdecode_utils_module], cmdclass={'build_ext': BuildExt})
from distutils.extension import Extension import numpy as np import os.path as op import sys sys.path.insert(0, op.dirname(__file__)) try: import methylcoder.version as V except ImportError: class V(object): version = 0.0 ext_modules = [ Extension("methylcoder.cbowtie", sources=["methylcoder/cbowtie.c"], include_dirs=[np.get_include(), "methylcoder"]) ] setup( license="BSD", name = "methylcoder", version = V.version, ext_modules = ext_modules, packages=['methylcoder'], package_dir={'methylcoder': 'methylcoder'}, zip_safe=False, requires=['numpy', 'pyfasta'], test_suite="nose.collector", entry_points = { 'console_scripts': ['methylcoder = methylcoder:main'] } )
# CC=cc LDSHARED="cc -shared" python setup.py build_ext --inplace from distutils.core import setup from distutils.extension import Extension from Cython.Build import cythonize import numpy extensions = [ Extension( "kernels_spectral_cy", sources=["kernels_spectral_cy.pyx"], #libraries=["kernels"], #library_dirs=["lib"], include_dirs=[numpy.get_include(), "lib"], extra_compile_args=[ "-fopenmp", ], ## for GCC. Recommended with Python extra_link_args=["-fopenmp"]), ] setup(name="kernels_spectral_cy", ext_modules=cythonize(extensions, compiler_directives={'language_level': "3"}))
# -*- coding: utf-8 -*- """ Created on Wed Feb 10 12:45:53 2016 @author: ricketsonl """ import numpy as np #from numpy.linalg import norm import pyximport pyximport.install(setup_args={"include_dirs":np.get_include()},reload_support=True) import InterpolationRoutines as IR import SpectralOps as SO #from scipy.optimize import newton_krylov import matplotlib.pyplot as plt from matplotlib import animation #plt.rcParams['animation.ffmpeg_path'] = '/usr/local/bin/ffmpeg' class PICES2DFGV: chrge = -1. chrgi = +1. space_norm = 1; time_norm = np.inf # Sets the space- and time-norms for variances imptol = 1.e-10 # Tolerance for Newton-Krylov iteration B = 10. def __init__(self,T_final,domx,domy,numcellsx,numcellsy,numsteps,idata,Npi,varwts=False,RHSe=0.,RHSi=0.,Efield = 0.,mSe=0.,mSi=0.): self.T = T_final self.ncelx = numcellsx; self.ncely = numcellsy self.nstep = numsteps self.dt = T_final/numsteps self.DomLenX = domx; self.DomLenY = domy
def make_extension(python, **extra): # load platform specific configuration then user configuration cfg = init_cfg('pythran.cfg', 'pythran-{}.cfg'.format(sys.platform), '.pythranrc', extra.get('config', None)) if 'config' in extra: extra.pop('config') def parse_define(define): index = define.find('=') if index < 0: return (define, None) else: return define[:index], define[index + 1:] extension = { "language": "c++", # forcing str conversion to handle Unicode case (the default on MS) "define_macros": [str(x) for x in cfg.get('compiler', 'defines').split()], "undef_macros": [str(x) for x in cfg.get('compiler', 'undefs').split()], "include_dirs": [str(x) for x in cfg.get('compiler', 'include_dirs').split()], "library_dirs": [str(x) for x in cfg.get('compiler', 'library_dirs').split()], "libraries": [str(x) for x in cfg.get('compiler', 'libs').split()], "extra_compile_args": [str(x) for x in cfg.get('compiler', 'cflags').split()], "extra_link_args": [str(x) for x in cfg.get('compiler', 'ldflags').split()], "extra_objects": [] } if python: extension['define_macros'].append('ENABLE_PYTHON_MODULE') extension['define_macros'].append( '__PYTHRAN__={}'.format(sys.version_info.major)) here = os.path.dirname(os.path.dirname(__file__)) or '.' # using / as separator as advised in the distutils doc extension["include_dirs"].append(here + '/pythran') extra.pop('language', None) # forced to c++ anyway cxx = extra.pop('cxx', None) cc = extra.pop('cc', None) if cxx is None: cxx = compiler() if cxx is not None: extension['cxx'] = cxx extension['cc'] = cc or cxx for k, w in extra.items(): extension[k].extend(w) if cfg.getboolean('pythran', 'complex_hook'): # the patch is *not* portable extension["include_dirs"].append(here + '/pythran/pythonic/patch') # Numpy can pollute stdout with warning message which should be on stderr old_stdout = sys.stdout try: sys.stdout = sys.stderr # numpy specific if python: extension['include_dirs'].append(numpy.get_include()) # blas dependency user_blas = cfg.get('compiler', 'blas') if user_blas == 'pythran-openblas': try: import pythran_openblas as openblas # required to cope with atlas missing extern "C" extension['define_macros'].append('PYTHRAN_BLAS_OPENBLAS') extension['include_dirs'].extend(openblas.include_dirs) extension['extra_objects'].append(os.path.join(openblas.library_dir, openblas.static_library)) except ImportError: logger.warn("Failed to find 'pythran-openblas' package. " "Please install it or change the compiler.blas setting. " "Defaulting to 'blas'") user_blas = 'blas' if user_blas != 'pythran-openblas': numpy_blas = numpy_sys.get_info(user_blas) # required to cope with atlas missing extern "C" extension['define_macros'].append('PYTHRAN_BLAS_{}' .format(user_blas.upper())) extension['libraries'].extend(numpy_blas.get('libraries', [])) extension['library_dirs'].extend(numpy_blas.get('library_dirs', [])) extension['include_dirs'].extend(numpy_blas.get('include_dirs', [])) finally: sys.stdout = old_stdout # final macro normalization extension["define_macros"] = [ dm if isinstance(dm, tuple) else parse_define(dm) for dm in extension["define_macros"]] return extension
def finalize_options(self): TestCommand.finalize_options(self) if self.args: self.args = __import__('shlex').split(self.args) def run_tests(self): # Run nose ensuring that argv simulates running nosetests directly nose_args = ['nosetests'] nose_args.extend(self.args) __import__('nose').run_exit(argv=nose_args) commands = versioneer.get_cmdclass() commands['test'] = NoseTestCommand incDirs = [sysconfig.get_python_inc(), numpy.get_include()] ext = [ Extension("radiomics._cmatrices", ["radiomics/src/_cmatrices.c", "radiomics/src/cmatrices.c"], include_dirs=incDirs), Extension("radiomics._cshape", ["radiomics/src/_cshape.c", "radiomics/src/cshape.c"], include_dirs=incDirs) ] setup( name='pyradiomics', url='http://github.com/Radiomics/pyradiomics#readme', author='pyradiomics community', author_email='*****@*****.**',
libs.append('CbcSolver') libDirs.extend(['.', join('.', cythonFilesDir)]) try: libDirs.append(join(CoinDir, 'lib')) except: pass if operatingSystem == 'windows': try: libDirs.append(join(CoinDir, 'lib', 'intel')) except: pass incDirs.extend([join('.', cppFilesDir), join('.', cythonFilesDir), numpy.get_include(), '.']) try: incDirs.extend([join(CoinDir, 'include', 'coin')]) except: pass cmdclass = {} if USECYTHON: from Cython.Distutils import build_ext from Cython.Distutils import extension Extension = extension.Extension import Cython.Compiler.Options Cython.Compiler.Options.annotate = True cmdclass.update({'build_ext': build_ext}) fileext = '.pyx' else:
exit_with_usage(code=0) for opt in opt_flags: if opt == '--prefix': print sysconfig.PREFIX elif opt == '--exec-prefix': print sysconfig.EXEC_PREFIX elif opt in ('--includes', '--cflags'): flags = ['-I' + sysconfig.get_python_inc(), '-I' + sysconfig.get_python_inc(plat_specific=True)] try: import numpy flags += ['-I' + numpy.get_include() + ' -DSMILEI_USE_NUMPY -DNPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION'] except: pass if opt == '--cflags': flags.extend(getvar('CFLAGS').split()) print ' '.join(flags) elif opt in ('--libs', '--ldflags'): libs = ['-lpython' + pyver] libs += getvar('LIBS').split() libs += getvar('SYSLIBS').split() # add the prefix/lib/pythonX.Y/config dir, but only if there is no # shared library in prefix/lib/. if opt == '--ldflags': if not getvar('Py_ENABLE_SHARED'):
use_setuptools = False print("distutils is used.") try: from setuptools_scm import get_version except ImportError: git_num = None if 'setuptools_scm' in sys.modules.keys(): try: git_ver = get_version() git_num = int(git_ver.split('.')[3].split('+')[0].replace("dev", "")) except: git_num = None include_dirs_numpy = [numpy.get_include()] cc = None if 'CC' in os.environ: if 'clang' in os.environ['CC']: cc = 'clang' if 'gcc' in os.environ['CC']: cc = 'gcc' # Workaround Python issue 21121 import sysconfig config_var = sysconfig.get_config_var("CFLAGS") if config_var is not None and "-Werror=declaration-after-statement" in config_var: os.environ['CFLAGS'] = config_var.replace( "-Werror=declaration-after-statement", "") ######################