def __init__(self, **kwargs): name = kwargs['pkc_name'] if 'include_dirs' in kwargs: kwargs['include_dirs'] += self.get_include_dirs(name) + GLOBAL_INC else: kwargs['include_dirs'] = self.get_include_dirs(name) + GLOBAL_INC kwargs['define_macros'] = GLOBAL_MACROS if 'libraries' in kwargs: kwargs['libraries'] += self.get_libraries(name) else: kwargs['libraries'] = self.get_libraries(name) if 'library_dirs' in kwargs: kwargs['library_dirs'] += self.get_library_dirs(name) else: kwargs['library_dirs'] = self.get_library_dirs(name) if 'pygobject_pkc' in kwargs: self.pygobject_pkc = kwargs.pop('pygobject_pkc') if self.pygobject_pkc: kwargs['include_dirs'] += self.get_include_dirs(self.pygobject_pkc) kwargs['libraries'] += self.get_libraries(self.pygobject_pkc) kwargs['library_dirs'] += self.get_library_dirs(self.pygobject_pkc) self.name = kwargs['name'] self.pkc_name = kwargs['pkc_name'] self.pkc_version = kwargs['pkc_version'] del kwargs['pkc_name'], kwargs['pkc_version'] Extension.__init__(self, **kwargs)
def __init__(self, *args, **kwargs): for name, default in [("extra_pyxtract_cmds", []), ("lib_type", "dynamic")]: setattr(self, name, kwargs.get(name, default)) if name in kwargs: del kwargs[name] Extension.__init__(self, *args, **kwargs)
def __init__(self, *a, **kw): self.use_mysql_flags = True self._extra_compile_args = [] self._extra_link_args = [] self._mysql_compile_args = [] self._mysql_link_args = [] self.get_mysql_compile_args = self.get_mysql_link_args = None Extension.__init__(self, *a, **kw)
def __init__ ( self, name, sources, include_dirs=None, define_macros=None, undef_macros=None, library_dirs=None, libraries=None, runtime_library_dirs=None, extra_objects=None, extra_compile_args=None, extra_link_args=None, export_symbols=None, swig_opts=None, depends=None, language=None, f2py_options=None, module_dirs=None, extra_f77_compile_args=None, extra_f90_compile_args=None,): old_Extension.__init__( self, name, [], include_dirs=include_dirs, define_macros=define_macros, undef_macros=undef_macros, library_dirs=library_dirs, libraries=libraries, runtime_library_dirs=runtime_library_dirs, extra_objects=extra_objects, extra_compile_args=extra_compile_args, extra_link_args=extra_link_args, export_symbols=export_symbols) # Avoid assert statements checking that sources contains strings: self.sources = sources # Python 2.4 distutils new features self.swig_opts = swig_opts or [] # swig_opts is assumed to be a list. Here we handle the case where it # is specified as a string instead. if isinstance(self.swig_opts, basestring): import warnings msg = "swig_opts is specified as a string instead of a list" warnings.warn(msg, SyntaxWarning) self.swig_opts = self.swig_opts.split() # Python 2.3 distutils new features self.depends = depends or [] self.language = language # numpy_distutils features self.f2py_options = f2py_options or [] self.module_dirs = module_dirs or [] self.extra_f77_compile_args = extra_f77_compile_args or [] self.extra_f90_compile_args = extra_f90_compile_args or [] return
def __init__(self, *args, **kwargs): Extension.__init__(self, *args, **kwargs) self.pyrex_directives = { 'profile': 'USE_PROFILE' in environ, 'embedsignature': 'USE_EMBEDSIGNATURE' in environ} # XXX with pip, setuptools is imported before distutils, and change # our pyx to c, then, cythonize doesn't happen. So force again our # sources self.sources = args[1]
def __init__(self, **kwargs): name = kwargs["pkc_name"] kwargs["include_dirs"] = self.get_include_dirs(name) + GLOBAL_INC kwargs["define_macros"] = GLOBAL_MACROS kwargs["libraries"] = self.get_libraries(name) kwargs["library_dirs"] = self.get_library_dirs(name) self.pkc_name = kwargs["pkc_name"] self.pkc_version = kwargs["pkc_version"] del kwargs["pkc_name"], kwargs["pkc_version"] Extension.__init__(self, **kwargs)
def get_distutils_extension(modname, pyxfilename, language_level=None): # try: # import hashlib # except ImportError: # import md5 as hashlib # extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest() # modname = modname + extra extension_mod,setup_args = handle_special_build(modname, pyxfilename) if not extension_mod: from distutils.extension import Extension extension_mod = Extension(name = modname, sources=[pyxfilename]) if language_level is not None: extension_mod.cython_directives = {'language_level': language_level} return extension_mod,setup_args
def check_extensions_list(self, extensions): """Ensure that the list of extensions (presumably provided as a command option 'extensions') is valid, i.e. it is a list of Extension objects. We also support the old-style list of 2-tuples, where the tuples are (ext_name, build_info), which are converted to Extension instances here. Raise DistutilsSetupError if the structure is invalid anywhere; just returns otherwise. """ if not isinstance(extensions, list): raise DistutilsSetupError, "'ext_modules' option must be a list of Extension instances" for i, ext in enumerate(extensions): if isinstance(ext, Extension): continue if not isinstance(ext, tuple) or len(ext) != 2: raise DistutilsSetupError, "each element of 'ext_modules' option must be an Extension instance or 2-tuple" ext_name, build_info = ext log.warn("old-style (ext_name, build_info) tuple found in ext_modules for extension '%s'-- please convert to Extension instance" % ext_name) if not (isinstance(ext_name, str) and extension_name_re.match(ext_name)): raise DistutilsSetupError, "first element of each tuple in 'ext_modules' must be the extension name (a string)" if not isinstance(build_info, dict): raise DistutilsSetupError, "second element of each tuple in 'ext_modules' must be a dictionary (build info)" ext = Extension(ext_name, build_info['sources']) for key in ('include_dirs', 'library_dirs', 'libraries', 'extra_objects', 'extra_compile_args', 'extra_link_args'): val = build_info.get(key) if val is not None: setattr(ext, key, val) ext.runtime_library_dirs = build_info.get('rpath') if 'def_file' in build_info: log.warn("'def_file' element of build info dict no longer supported") macros = build_info.get('macros') if macros: ext.define_macros = [] ext.undef_macros = [] for macro in macros: if not (isinstance(macro, tuple) and len(macro) in (1, 2)): raise DistutilsSetupError, "'macros' element of build info dict must be 1- or 2-tuple" if len(macro) == 1: ext.undef_macros.append(macro[0]) elif len(macro) == 2: ext.define_macros.append(macro) extensions[i] = ext return
def __init__(self, *args, **kwargs): # Small hack to only compile for x86_64 on OSX. # Is there a better way to do this? if platform == 'darwin': extra_args = ['-arch', 'x86_64'] kwargs['extra_compile_args'] = extra_args + \ kwargs.get('extra_compile_args', []) kwargs['extra_link_args'] = extra_args + \ kwargs.get('extra_link_args', []) Extension.__init__(self, *args, **kwargs) self.pyrex_directives = { 'profile': 'USE_PROFILE' in environ, 'embedsignature': True} # XXX with pip, setuptools is imported before distutils, and change # our pyx to c, then, cythonize doesn't happen. So force again our # sources self.sources = args[1]
def get_extensions (buildsystem, compiler): extensions = [] compatpath = "src" docpath = os.path.join ("src", "doc") baseincpath = os.path.join ("src", "base") config.config_modules.prepare_modules (buildsystem, modules, cfg, compiler) alldefines = [] for mod in modules: # Build the availability defines if mod.canbuild: for entry in mod.globaldefines: if entry not in alldefines: alldefines.append (entry) # Create the extensions for mod in modules: if not mod.canbuild: print ("Skipping module '%s'" % mod.name) continue ext = Extension ("pygame2." + mod.name, sources=mod.sources) ext.define_macros.append (("PYGAME_INTERNAL", None)) for entry in alldefines: ext.define_macros.append (entry) ext.extra_compile_args = mod.cflags ext.extra_link_args = mod.lflags ext.include_dirs = mod.incdirs + [ baseincpath, compatpath, docpath ] ext.library_dirs = mod.libdirs ext.libraries = mod.libs ext.basemodule = mod extensions.append (ext) return extensions
def get_distutils_extension(modname, pyxfilename, language_level=None): # try: # import hashlib # except ImportError: # import md5 as hashlib # extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest() # modname = modname + extra extension_mod,setup_args = handle_special_build(modname, pyxfilename) if not extension_mod: if not isinstance(pyxfilename, str): # distutils is stupid in Py2 and requires exactly 'str' # => encode accidentally coerced unicode strings back to str pyxfilename = pyxfilename.encode(sys.getfilesystemencoding()) from distutils.extension import Extension extension_mod = Extension(name = modname, sources=[pyxfilename]) if language_level is not None: extension_mod.cython_directives = {'language_level': language_level} return extension_mod,setup_args
def __init__ (self, name, sources, include_dirs=None, define_macros=None, undef_macros=None, library_dirs=None, libraries=None, runtime_library_dirs=None, extra_objects=None, extra_compile_args=None, extra_link_args=None, export_symbols=None, swig_opts=None, depends=None, language=None, f2py_options=None, module_dirs=None, optional=False ): old_Extension.__init__(self,name, [], include_dirs, define_macros, undef_macros, library_dirs, libraries, runtime_library_dirs, extra_objects, extra_compile_args, extra_link_args, export_symbols) # Avoid assert statements checking that sources contains strings: self.sources = sources # Python 2.4 distutils new features self.swig_opts = swig_opts or [] # Python 2.3 distutils new features self.depends = depends or [] self.language = language # numpy_distutils features self.f2py_options = f2py_options or [] self.module_dirs = module_dirs or [] return
def __init__(self, name, *args, **kwargs): kwargs["include_dirs"] = ([apr_includedir, apu_includedir] + svn_includedirs + ["subvertpy"]) kwargs["library_dirs"] = svn_libdirs # Note that the apr-util link flags are not included here, as # subvertpy only uses some apr util constants but does not use # the library directly. kwargs["extra_link_args"] = apr_link_flags + svn_link_flags if os.name == 'nt': # on windows, just ignore and overwrite the libraries! kwargs["libraries"] = extra_libs # APR needs WIN32 defined. kwargs["define_macros"] = [("WIN32", None)] if sys.platform == 'darwin': # on Mac OS X, we need to check for Keychain availability if is_keychain_provider_available(): if "define_macros" not in kwargs: kwargs["define_macros"] = [] kwargs["define_macros"].extend(( ('DARWIN', None), ('SVN_KEYCHAIN_PROVIDER_AVAILABLE', '1')) ) Extension.__init__(self, name, *args, **kwargs)
elif (sys.platform == 'openbsd5'): macros.append(('__OPENBSD__','1')) os.environ["CC"] = "eg++" elif (sys.platform == 'darwin'): macros.append(('__DARWIN__','1')) os.environ["CC"] = "g++" else: os.environ["CC"] = "g++" os.environ["CXX"] = "g++" return includes,macros aiengine_module = Extension("pyaiengine", sources = src_files, libraries = ["boost_system","boost_python","pcap","pcre","boost_iostreams"], # define_macros = [('__OPENBSD__','1'),('PYTHON_BINDING','1'),('HAVE_LIBPCRE','1')], # define_macros = [('PYTHON_BINDING','1'),('HAVE_LIBPCRE','1')], extra_compile_args = ["-O3","-Wreorder","-std=c++11","-lpthread","-lstdc++"], ) if __name__ == "__main__": includes,macros = setup_compiler() print("Compiling aiengine extension for %s" % sys.platform) print("\tOS name %s" % (os.name)) print("\tArchitecture %s" % os.uname()[4]) aiengine_module.include_dirs = includes aiengine_module.define_macros = macros
from setuptools import setup, find_namespace_packages from distutils.extension import Extension from Cython.Build import cythonize setup(name='cyADS1256', version='0.3.0', package_dir={"": "src"}, dependencies=["cython"], install_requires=["asgiref"], packages=find_namespace_packages(where="src"), ext_modules=cythonize([ Extension("_bcm2835", ["src/cyads1256/_bcm2835.pyx"], libraries=['bcm2835', 'cap']) ]))
# ext_args['extra_compile_args'].append('/openmp') # include_dirs=[pcl_root + '\\include\\pcl' + pcl_version, pcl_root + '\\3rdParty\\Eigen\\include', pcl_root + '\\3rdParty\\Boost\\include', pcl_root + '\\3rdParty\\FLANN\include', 'C:\\Anaconda2\\envs\\my_env\\Lib\\site-packages\\numpy\\core\\include'], # library_dirs=[pcl_root + '\\lib', pcl_root + '\\3rdParty\\Boost\\lib', pcl_root + '\\3rdParty\\FLANN\\lib'], # libraries=["pcl_apps_debug", "pcl_common_debug", "pcl_features_debug", "pcl_filters_debug", "pcl_io_debug", "pcl_io_ply_debug", "pcl_kdtree_debug", "pcl_keypoints_debug", "pcl_octree_debug", "pcl_registration_debug", "pcl_sample_consensus_debug", "pcl_segmentation_debug", "pcl_search_debug", "pcl_surface_debug", "pcl_tracking_debug", "pcl_visualization_debug", "flann-gd", "flann_s-gd", "boost_chrono-vc100-mt-1_49", "boost_date_time-vc100-mt-1_49", "boost_filesystem-vc100-mt-1_49", "boost_graph-vc100-mt-1_49", "boost_graph_parallel-vc100-mt-1_49", "boost_iostreams-vc100-mt-1_49", "boost_locale-vc100-mt-1_49", "boost_math_c99-vc100-mt-1_49", "boost_math_c99f-vc100-mt-1_49", "boost_math_tr1-vc100-mt-1_49", "boost_math_tr1f-vc100-mt-1_49", "boost_mpi-vc100-mt-1_49", "boost_prg_exec_monitor-vc100-mt-1_49", "boost_program_options-vc100-mt-1_49", "boost_random-vc100-mt-1_49", "boost_regex-vc100-mt-1_49", "boost_serialization-vc100-mt-1_49", "boost_signals-vc100-mt-1_49", "boost_system-vc100-mt-1_49", "boost_thread-vc100-mt-1_49", "boost_timer-vc100-mt-1_49", "boost_unit_test_framework-vc100-mt-1_49", "boost_wave-vc100-mt-1_49", "boost_wserialization-vc100-mt-1_49", "libboost_chrono-vc100-mt-1_49", "libboost_date_time-vc100-mt-1_49", "libboost_filesystem-vc100-mt-1_49", "libboost_graph_parallel-vc100-mt-1_49", "libboost_iostreams-vc100-mt-1_49", "libboost_locale-vc100-mt-1_49", "libboost_math_c99-vc100-mt-1_49", "libboost_math_c99f-vc100-mt-1_49", "libboost_math_tr1-vc100-mt-1_49", "libboost_math_tr1f-vc100-mt-1_49", "libboost_mpi-vc100-mt-1_49", "libboost_prg_exec_monitor-vc100-mt-1_49", "libboost_program_options-vc100-mt-1_49", "libboost_random-vc100-mt-1_49", "libboost_regex-vc100-mt-1_49", "libboost_serialization-vc100-mt-1_49", "libboost_signals-vc100-mt-1_49", "libboost_system-vc100-mt-1_49", "libboost_test_exec_monitor-vc100-mt-1_49", "libboost_thread-vc100-mt-1_49", "libboost_timer-vc100-mt-1_49", "libboost_unit_test_framework-vc100-mt-1_49", "libboost_wave-vc100-mt-1_49", "libboost_wserialization-vc100-mt-1_49"], ## define_macros=[('BOOST_NO_EXCEPTIONS', 'None')], # define_macros=[('EIGEN_YES_I_KNOW_SPARSE_MODULE_IS_NOT_STABLE_YET', '1')], # extra_compile_args=["/EHsc"], print(ext_args) if pcl_version == '-1.6': module = [ Extension( "pcl._pcl", ["pcl/_pcl.pyx", "pcl/minipcl.cpp", "pcl/ProjectInliers.cpp"], language="c++", **ext_args), Extension("pcl.pcl_visualization", ["pcl/pcl_visualization.pyx"], language="c++", **ext_args), # Extension("pcl.pcl_grabber", ["pcl/pcl_grabber.pyx", "pcl/grabber_callback.cpp"], language="c++", **ext_args), # debug # gdb_debug=True, ] elif pcl_version == '-1.7': module = [ Extension("pcl._pcl", [ "pcl/_pcl_172.pyx", "pcl/minipcl.cpp", "pcl/ProjectInliers.cpp" ], language="c++",
self.compiler_so = default_compiler_so # inject our redefined _compile method into the class self._compile = _compile # run the customize_compiler class custom_build_ext(build_ext): def build_extensions(self): customize_compiler_for_nvcc(self.compiler) build_ext.build_extensions(self) ext_modules = [ Extension("utils.cython_bbox", ["utils/bbox.pyx"], extra_compile_args={'gcc': ["-Wno-cpp", "-Wno-unused-function"]}, include_dirs=[numpy_include]), Extension("utils.cython_nms", ["utils/nms.pyx"], extra_compile_args={'gcc': ["-Wno-cpp", "-Wno-unused-function"]}, include_dirs=[numpy_include]), Extension("nms.cpu_nms", ["nms/cpu_nms.pyx"], extra_compile_args={'gcc': ["-Wno-cpp", "-Wno-unused-function"]}, include_dirs=[numpy_include]), Extension( 'nms.gpu_nms', ['nms/nms_kernel.cu', 'nms/gpu_nms.pyx'], library_dirs=[CUDA['lib64']], libraries=['cudart'], language='c++', runtime_library_dirs=[CUDA['lib64']], # this syntax is specific to this build system
# Compile command: python setup.py build_ext --inplace --compiler=mingw32 from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext import numpy setup(cmdclass={'build_ext': build_ext}, ext_modules=[Extension("QuadTree", ["QuadTree.pyx"])], include_dirs=[numpy.get_include()]) print(""" ############ # Success! # ############""")
from distutils.core import setup from distutils.extension import Extension import os.path import sys files = ["neuroPython.cpp", "command.cpp" ,"instruction.cpp", "stdOdeIntegrator.cpp","lyapunov.cpp", "fullNetwork.cpp", "generatedNodes.cpp", "globals.cpp", "sdeNode.cpp", "stdSdeIntegrator.cpp","odeNode.cpp", "gslOdeNode.cpp" ,"ioNode.cpp" ,"pulseCoupledPhaseOscillator.cpp","pulseCoupledExcitatoryNeuron.cpp" ,"network.cpp" ,"dynNetwork.cpp" ,"createNetwork.cpp", "spatialNetwork.cpp","mapNode.cpp" ,"eventHandler.cpp" ,"node.cpp" ,"priorityQueue.cpp", "dynNode.cpp" ,"gslNoise.cpp" ,"statisticsNetwork.cpp" ,"commandLineArguments.cpp" ] if sys.platform == "win32" : include_dirs = ["C:\\Program Files (x86)\\boost\\boost_1_46_1"] libraries=["gsl", "cblas"] library_dirs=['C:\\Program Files (x86)\\boost\\boost_1_46_1\\lib'] module = Extension("conedy", files, library_dirs=library_dirs, libraries=libraries, include_dirs=include_dirs, depends=[]) module.extra_compile_args = ['/GX'] else : include_dirs = ["/usr/include/boost","."] libraries=["boost_python","gsl", "gslcblas", "boost_iostreams", "z"] library_dirs=['/usr//lib'] module = Extension("conedy", files, library_dirs=library_dirs, libraries=libraries, include_dirs=include_dirs, depends=[])
) extensions = [] for name, data in ext_data.items(): sources = [srcpath(data['pyxfile'], suffix=suffix, subdir='')] pxds = [pxd(x) for x in data.get('pxdfiles', [])] if suffix == '.pyx' and pxds: sources.extend(pxds) sources.extend(data.get('sources', [])) include = data.get('include', common_include) obj = Extension('pandas.%s' % name, sources=sources, depends=data.get('depends', []), include_dirs=include) extensions.append(obj) sparse_ext = Extension('pandas._sparse', sources=[srcpath('sparse', suffix=suffix)], include_dirs=[], libraries=libraries) parser_ext = Extension('pandas._parser', depends=[ 'pandas/src/parser/tokenizer.h', 'pandas/src/parser/io.h', 'pandas/src/numpy_helper.h' ],
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext ext_modules = [Extension("aliaser", ["aliaser.py"])] setup(name='Xplex', cmdclass={'build_ext': build_ext}, ext_modules=ext_modules)
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext __version__ = '0.2.6' setup(name="mdb", version=__version__, description='Python client of MDB-Lightning', cmdclass={'build_ext': build_ext}, author='Chango Inc.', keywords=['mdb-ligtning', 'mdb', 'lmdb', 'key-value store'], license='MIT', ext_modules=[ Extension("mdb", [ "db.pyx", ], libraries=["lmdb"], library_dirs=["/usr/local/lib"], include_dirs=["/usr/local/include"], runtime_library_dirs=["/usr/local/lib"]) ])
("hdf5interface", ["petsc"]), ("mgimpl", ["petsc"]), ("patchimpl", ["petsc"]), ("spatialindex", ["spatialindex_c"]), ("supermeshimpl", ["supermesh", "petsc"])] petsc_dirs = get_petsc_dir() include_dirs = [np.get_include(), petsc4py.get_include()] include_dirs += ["%s/include" % d for d in petsc_dirs] dirs = (sys.prefix, *petsc_dirs) link_args = ["-L%s/lib" % d for d in dirs] + ["-Wl,-rpath,%s/lib" % d for d in dirs] extensions = [ Extension( "firedrake.cython.{}".format(ext), sources=[os.path.join("firedrake", "cython", "{}.pyx".format(ext))], include_dirs=include_dirs, libraries=libs, extra_link_args=link_args) for (ext, libs) in cythonfiles ] if 'CC' not in env: env['CC'] = "mpicc" setup(name='firedrake', version=versioneer.get_version(), cmdclass=cmdclass, description="""Firedrake is an automated system for the portable solution of partial differential equations using the finite element method (FEM)""", author="Imperial College London and others", author_email="*****@*****.**", url="http://firedrakeproject.org",
sourcefiles = [ os.path.join(project_path,'api','beeview.pyx'), os.path.join(project_path,'api','beeview_api.cpp'), os.path.join(project_path,'src','bee_eye.cpp'), os.path.join(project_path,'src','camera.cpp'), os.path.join(project_path,'src','image.cpp'), os.path.join(project_path,'src','obj_loader.cpp'), os.path.join(project_path,'src','renderer.cpp'), os.path.join(project_path,'src','sampler.cpp'), os.path.join(project_path,'src','scene.cpp'), os.path.join(project_path,'src','stdafx.cpp'), os.path.join(project_path,'src','texture.cpp') ] ext=[Extension('beeview', sources = sourcefiles, libraries=["embree"], extra_link_args=["/LIBPATH:"+ os.path.join(project_path,'lib')], # for linux -L extra_compile_args=["-I" + os.path.join(project_path,'external')], define_macros=[('_ENABLE_EXTENDED_ALIGNED_STORAGE', null)], language='c++')] setup( name = 'beeview', version = '1.0', author = 'Johannes Polster', url = 'https://github.com/tschopo/bee_view', description = 'Bee Eye Camera for Virtual Environments', ext_modules=cythonize(ext), py_modules = ['beeview'] )
from distutils.core import setup from distutils.extension import Extension from Cython.Build import cythonize extensions = Extension( name="raw_eater", sources=["cython/raw_eater.pyx"], # libraries=[""], library_dirs=["src"], include_dirs=["src"], language='c++', extra_compile_args=['-std=c++11', '-Wno-unused-variable'], extra_link_args=['-std=c++11'], #extra_objects=["lib/parquet/libarrow.so.200.0.0"], ) setup(name="raw_eater", ext_modules=cythonize(extensions))
#!/usr/bin/env python from setuptools import setup, find_packages from distutils.extension import Extension import logging import os import sys try: from Cython.Build import cythonize extension = cythonize( [ Extension('psd_tools.compression._rle', ['src/psd_tools/compression/_rle.pyx']) ], language_level=sys.version_info[0], ) except ImportError: logging.error('Cython not found, no extension will be built.') extension = [] def get_version(): """ Get package version. """ curdir = os.path.dirname(__file__) filename = os.path.join(curdir, 'src', 'psd_tools', 'version.py') with open(filename, 'r') as fp: return fp.read().split('=')[1].strip(" \r\n'")
def initialize_options(self): pass def finalize_options(self): pass def run(self): import subprocess import sys errno = subprocess.call([sys.executable, 'runtests.py']) raise SystemExit(errno) setup( cmdclass={ 'build_ext': build_ext, 'test': PyTest }, ext_modules=[ Extension("optspace", sources=[ "OptSpace_C/las2.c", "OptSpace_C/matops.c", "OptSpace_C/rand.c", "OptSpace_C/svdlib.c", "OptSpace_C/svdutil.c", "OptSpace_C/OptSpace.c", "optspace.pyx" ], include_dirs=["OptSpace_C"]) ], )
libs = ["ceres", boost_lib] + opencv_libraries xtra_obj2d = [] library_dirs = [opencv_library_dir] extensions = [ Extension( name="pupil_detectors.detector_2d", sources=[ "detector_2d.pyx", "singleeyefitter/ImageProcessing/cvx.cpp", "singleeyefitter/utils.cpp", "singleeyefitter/detectorUtils.cpp", ], include_dirs=include_dirs, libraries=libs, library_dirs=library_dirs, extra_link_args=[], # '-WL,-R/usr/local/lib' extra_compile_args=[ "-D_USE_MATH_DEFINES", "-std=c++11", "-w", "-O2", ], # ,'-O2'], #-w hides warnings extra_objects=xtra_obj2d, depends=dependencies, language="c++", ), Extension( name="pupil_detectors.detector_3d", sources=[ "detector_3d.pyx", "singleeyefitter/ImageProcessing/cvx.cpp",
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext from Cython.Build import cythonize import numpy ext_modules = Extension( "*", ["mizzle/*.pyx"], extra_compile_args = ["-O3", "-ffast-math", "-march=native", "-fopenmp" ], extra_link_args=['-fopenmp'], include_dirs=[numpy.get_include()], ) setup(name='mizzle', version='0.1', description='Hydrates arbitrary metal-oxide surfaces', author='Samuel Stenberg', author_email='*****@*****.**', license='MIT', packages=['mizzle'], install_requires=['argcomplete','numpy','pandas','mdtraj','tqdm', 'scipy', 'radish', 'sphinx_rtd_theme'], scripts=['bin/mizzler'], include_package_data=True, cmdclass = {'build_ext': build_ext}, ext_modules = cythonize([ext_modules]), zip_safe=False)
# Recover the gcc compiler GCCPATH_STRING = sbp.Popen( ['gcc', '-print-libgcc-file-name'], stdout=sbp.PIPE).communicate()[0] GCCPATH = osp.normpath(osp.dirname(GCCPATH_STRING)) GCCPATH = str(GCCPATH, "UTF-8") # Recover the CLASS version with open(os.path.join('..', 'include', 'common.h'), 'r') as v_file: for line in v_file: if line.find("_VERSION_") != -1: # get rid of the " and the v VERSION = line.split()[-1][2:-1] break setup( name='classy', version=VERSION, description='Python interface to the Cosmological Boltzmann code CLASS', url='http://www.class-code.net', cmdclass={'build_ext': build_ext}, ext_modules=[Extension("classy", ["classy.pyx"], include_dirs=[nm.get_include(), "../include"], libraries=["class"], library_dirs=["../", GCCPATH], extra_link_args=['-lgomp'], )], data_files=(('bbn', ['../bbn/sBBN.dat']),) )
try: from setuptools import setup # try first in case it's already there. except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup from distutils.extension import Extension try: from Cython.Build import cythonize USE_CYTHON = True extensions = cythonize('vpython/cyvector.pyx') except ImportError: extensions = [Extension('vpython.cyvector', ['vpython/cyvector.c'])] import versioneer setup( name='vpython', packages=['vpython'], version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='VPython for Jupyter Notebook', long_description=open('README.md').read(), author='John Coady / Ruth Chabay / Bruce Sherwood / Steve Spicklemire', author_email='*****@*****.**', url='http://pypi.python.org/pypi/vpython/', license='LICENSE.txt', keywords='vpython',
from distutils.core import setup from distutils.extension import Extension import platform from Cython.Distutils import build_ext import numpy math_lib = ["m"] if platform.platform()[:7] == "Windows": math_lib = [] ext_modules = [ Extension( name="klsyn.klatt_wrap", sources=["klsyn/klatt_wrap.pyx"], libraries=math_lib, include_dirs=[numpy.get_include()], language="c", ) ] setup( name='klsyn', cmdclass={'build_ext': build_ext}, #ext_package='klatt_wrap', ext_modules=ext_modules, packages=['klsyn'], scripts=[ 'scripts/klattsyn.py', 'scripts/klattsyn_interactive.py', 'scripts/klp_continuum.py', 'scripts/doc2klp.py', 'scripts/wxklsyn.pyw' ],
from distutils.core import setup from distutils.extension import Extension import os import sys ext = Extension('imagetask.core', []) ext.language = 'c++' for dir, sub_dirs, files in os.walk( 'imagetask' ): ext.include_dirs.append(dir) for f in files: if f.endswith( '.cpp' ): ext.sources.append(os.path.join(dir,f) ) ext.include_dirs.append('.') setup(name='imagetask', version='0.1', packages=['imagetask'], ext_modules=[ext] )
#!/usr/bin/env python # coding=utf-8 from distutils.core import setup from distutils.extension import Extension from Cython.Build import cythonize sourcefiles = ["axidma.pyx", "libaxidma/libaxidma.c", "libaxidma/libgpio.c"] ext_modules = [ Extension("axidma", sources=sourcefiles)] setup(name="axidma", ext_modules=cythonize(ext_modules))
def __init__(self, *args, **kwargs): self.glob_extra_link_args = kwargs.pop('glob_extra_link_args', []) Extension.__init__(self, *args, **kwargs)
for ext_name in pyxfiles: if has_cython: ext = ".pyx" else: ext = ".cpp" pyxfile = ext_name + ext # replace "/" by "." get module if "/" in ext_name: ext_name = ext_name.replace("/", ".") sources = [pyxfile] extmod = Extension(ext_name, sources=sources, libraries=['cpptraj'], language='c++', library_dirs=[libdir, ], include_dirs=[cpptraj_include, pytraj_home], extra_compile_args=extra_compile_args, extra_link_args=extra_link_args) extmod.cython_directives = { 'embedsignature': True, 'boundscheck': False, 'wraparound': False, } ext_modules.append(extmod) setup_args = {} packages = [ 'pytraj', 'pytraj.utils',
#!/usr/bin/env python from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext from Cython.Build import cythonize macros = [('PYREX_WITHOUT_ASSERTIONS', None)] cmdclass = {} print("Compiling with Cython") ext_modules = [ Extension('_cbitstring', ["_cbitstring.pyx"], define_macros=macros) ] cythonize(ext_modules) cmdclass.update({'build_ext': build_ext}) setup( name='bitstring', ext_modules=ext_modules, )
""" compiler_args_unix = ["-O3", "-ffast-math", "-march=native"] compiler_args_vcpp = ["/O2", "/fp:fast", "/GL"] platform = sys.platform if platform.startswith('win'): compiler_args = compiler_args_vcpp elif platform.startswith('darwin'): compiler_args = [] else: compiler_args = compiler_args_unix ext_modules=[ Extension("*", ["dominance/*.pyx"], extra_compile_args = compiler_args, ), Extension("*", ["loader/levenshtein_wrapper.pyx"], extra_compile_args = compiler_args, ), Extension("*", ["loader/distance_mtr.pyx"], extra_compile_args = compiler_args, ) ] setup( name = "rfd-discovery", version = "0.0.1", cmdclass = {"build_ext": build_ext},
except ImportError: HAVE_CYTHON = False def _read(fn): path = os.path.join(os.path.dirname(__file__), fn) return open(path).read() # Search far and wide for the dependencies. INC_DIRS = ['/opt/local/include', os.path.expanduser('~/.brew/include')] ext = Extension( "lastfp._fplib", [ "fplib.pyx", "fplib/src/Filter.cpp", "fplib/src/FingerprintExtractor.cpp", "fplib/src/OptFFT.cpp", ], language="c++", include_dirs=['fplib/include'] + INC_DIRS, libraries=["stdc++", "samplerate", "fftw3f"], ) # If we don't have Cython, build from *.cpp instead of the Cython # source. Also, if we do have Cython, make sure we use its build # command. cmdclass = {} if HAVE_CYTHON: cmdclass['build_ext'] = build_pyx else: ext.sources[0] = 'fplib.cpp'
#self.rc = default_compiler_so # inject our redefined _compile method into the class self.compile = compile # run the customize_compiler class custom_build_ext(build_ext): def build_extensions(self): customize_compiler_for_nvcc(self.compiler) build_ext.build_extensions(self) ext_modules = [ Extension("model.utils.cython_bbox", ["model/utils/bbox.pyx"], extra_compile_args={'gcc': []}, include_dirs=[numpy_include]), Extension( 'pycocotools._mask', sources=['pycocotools/maskApi.c', 'pycocotools/_mask.pyx'], include_dirs=[numpy_include, 'pycocotools'], extra_compile_args={'gcc': ['/Qstd=c99']}, ), ] setup( name='faster_rcnn', ext_modules=ext_modules, # inject our custom trigger cmdclass={'build_ext': custom_build_ext}, )
def __init__(self, name, wrap_sources, aux_sources, **kw): Extension.__init__(self, name, wrap_sources+aux_sources, **kw) self.module_name = self.name.split(".", 1)[-1] self.wrap_sources = wrap_sources
#!/usr/bin/env python from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext import numpy as np setup( cmdclass={'build_ext': build_ext}, ext_modules=[ Extension("joint_bilateral_filter", sources=["joint_bilateral_filter.pyx", "joint_bilateral.c"], include_dirs=[np.get_include()]) ], ) def demo(): import joint_bilateral_filter as jbf import matplotlib.pyplot as plt from skimage.color import lab2rgb from skimage.transform import resize L = plt.imread('./demo_images/L.png')[:, :, 0] ab = np.stack([ plt.imread('./demo_images/a.png')[::2, ::2, 0], plt.imread('./demo_images/b.png')[::2, ::2, 0] ], axis=0)
cmdclass={'build_ext': build_ext}, ext_modules=[ Extension( "simpleforest", ["simpleforest.pyx"], language="c++", include_dirs=get_numpy_include_dirs() + ['src', '.', "/vol/medic02/users/kpk09/LOCAL/include"], library_dirs=["/vol/medic02/users/kpk09/LOCAL/lib"], libraries=[ 'opencv_core', 'opencv_imgproc', 'opencv_features2d', # 'opencv_gpu', 'opencv_calib3d', 'opencv_objdetect', 'opencv_video', 'opencv_highgui', 'opencv_ml', # 'opencv_legacy', # 'opencv_contrib', 'opencv_flann', 'tbb', 'boost_filesystem', 'boost_system', 'boost_regex' ], extra_link_args=["-w"], extra_compile_args=["-w"]), Extension( "integralforest",
from distutils.core import setup from distutils.extension import Extension from Cython.Build import cythonize import numpy import pkgconfig extension = [ Extension('svd3', ['src/svd3.pyx'], language='c++', include_dirs=[numpy.get_include()]) ] setup(ext_modules=cythonize(extension))
def c_to_dll(filename, ext = None, force_rebuild = 0, build_in_temp=False, cbuild_dir=None, setup_args={}, reload_support=False, inplace=False): """Compile a C file to a DLL and return the name of the generated .so or .dll .""" assert os.path.exists(filename), "Could not find %s" % os.path.abspath(filename) path, name = os.path.split(os.path.abspath(filename)) if not ext: modname, extension = os.path.splitext(name) assert extension in (".c", ".py"), extension if not HAS_CYTHON: filename = filename[:-len(extension)] + '.c' ext = Extension(name=modname, sources=[filename]) if not cbuild_dir: cbuild_dir = os.path.join(path, "_cbld") package_base_dir = path for package_name in ext.name.split('.')[-2::-1]: package_base_dir, pname = os.path.split(package_base_dir) if pname != package_name: # something is wrong - package path doesn't match file path package_base_dir = None break script_args=setup_args.get("script_args",[]) if DEBUG or "--verbose" in script_args: quiet = "--verbose" else: quiet = "--quiet" args = [quiet, "build_ext"] if force_rebuild: args.append("--force") if inplace and package_base_dir: args.extend(['--build-lib', package_base_dir]) if ext.name == '__init__' or ext.name.endswith('.__init__'): # package => provide __path__ early if not hasattr(ext, 'cython_directives'): ext.cython_directives = {'set_initial_path' : 'SOURCEFILE'} elif 'set_initial_path' not in ext.cython_directives: ext.cython_directives['set_initial_path'] = 'SOURCEFILE' if HAS_CYTHON and build_in_temp: args.append("--pyrex-c-in-temp") sargs = setup_args.copy() sargs.update({ "script_name": None, "script_args": args + script_args, }) # late import, in case setuptools replaced it from distutils.dist import Distribution dist = Distribution(sargs) if not dist.ext_modules: dist.ext_modules = [] dist.ext_modules.append(ext) if HAS_CYTHON: dist.cmdclass = {'build_ext': build_ext} build = dist.get_command_obj('build') build.build_base = cbuild_dir cfgfiles = dist.find_config_files() dist.parse_config_files(cfgfiles) try: ok = dist.parse_command_line() except DistutilsArgError: raise if DEBUG: print("options (after parsing command line):") dist.dump_option_dicts() assert ok try: obj_build_ext = dist.get_command_obj("build_ext") dist.run_commands() so_path = obj_build_ext.get_outputs()[0] if obj_build_ext.inplace: # Python distutils get_outputs()[ returns a wrong so_path # when --inplace ; see http://bugs.python.org/issue5977 # workaround: so_path = os.path.join(os.path.dirname(filename), os.path.basename(so_path)) if reload_support: org_path = so_path timestamp = os.path.getmtime(org_path) global _reloads last_timestamp, last_path, count = _reloads.get(org_path, (None,None,0) ) if last_timestamp == timestamp: so_path = last_path else: basename = os.path.basename(org_path) while count < 100: count += 1 r_path = os.path.join(obj_build_ext.build_lib, basename + '.reload%s'%count) try: import shutil # late import / reload_support is: debugging try: # Try to unlink first --- if the .so file # is mmapped by another process, # overwriting its contents corrupts the # loaded image (on Linux) and crashes the # other process. On Windows, unlinking an # open file just fails. if os.path.isfile(r_path): os.unlink(r_path) except OSError: continue shutil.copy2(org_path, r_path) so_path = r_path except IOError: continue break else: # used up all 100 slots raise ImportError("reload count for %s reached maximum"%org_path) _reloads[org_path]=(timestamp, so_path, count) return so_path except KeyboardInterrupt: sys.exit(1) except (IOError, os.error): exc = sys.exc_info()[1] error = grok_environment_error(exc) if DEBUG: sys.stderr.write(error + "\n") raise
import distutils distutils.log.set_verbosity(1) import os from distutils.sysconfig import get_config_vars remove_flag = ['-Wstrict-prototypes'] os.environ['OPT'] = " ".join( flag for flag in get_config_vars('OPT')[0].split() if flag not in remove_flag ) VERSION = '0.1' worker = Extension('worker', define_macros = [('PYTHON', '1')], include_dirs = ['/usr/include/cppconn', '/usr/include/boost'], libraries = ['mysqlcppconn', 'boost_python'], sources = ['worker.cpp', 'worker_extra.cpp'], extra_compile_args = ['-std=c++11']) local_libs = True if local_libs: worker.include_dirs = ['/home/scolvin/cppconn/include', '/home/scolvin/boost'] worker.library_dirs = ['/home/scolvin/cppconn/lib', '/home/scolvin/boost/lib'] worker.extra_compile_args = ['-std=c++0x'] setup (name = 'worker', version = VERSION, description = 'Performs business grunt work in c++ interacting directly with the db.', author = 'Samuel Colvin', author_email = '*****@*****.**', long_description = '''
c_polypart_src = join(c_polypart_root, 'src') c_polypart_incs = [c_polypart_src] c_polypart_files = [join(c_polypart_src, x) for x in [ 'polypartition.cpp']] if have_cython: pypolypart_files = [ 'pypolypart/python/pypolypart.pyx' ] cmdclass = {'build_ext': build_ext} else: pypolypart_files = ['pypolypart/python/pypolypart.c'] cmdclass = {} ext = Extension('pypolypart', pypolypart_files + c_polypart_files, include_dirs=c_polypart_incs, language="c++") if environ.get('READTHEDOCS', None) == 'True': ext.pyrex_directives = {'embedsignature': True} setup( name='pypolypart', description='Cython bindings for Polygon Partition', author='Alex Chozabu PB - refrencing cymunk', author_email='*****@*****.**', cmdclass=cmdclass, ext_modules=[ext])
def platform(): map1 = {'win32':'win', 'linux2':'linux', 'linux':'linux'} map2 = {'darwin':'ios'} os = sys.platform if os in map1: return '%s%d' % (map1[os], struct.calcsize('P')*8) return map2.get(os, os) platform = platform() api_dir = PREFIX+'api/%s'%platform include_dirs = [PREFIX+'ctp', api_dir] library_dirs = [api_dir] ext_modules = []; package_data = [] for k,v in BUILD: extm = Extension(name='ctp._'+k, language='c++', include_dirs=include_dirs, library_dirs=library_dirs, libraries=[v], sources=['ctp/%s.cpp'%k], ) ext_modules.append(extm) if platform.startswith('win'): k = '%s.dll'%v else: extm.extra_link_args = ['-Wl,-rpath,$ORIGIN'] k = 'lib%s.so'%v package_data.append(k) v = 'ctp/' + k if not os.path.exists(v): shutil.copy2('%s/%s'%(api_dir,k), v) setup( name='ctp', version=__version__, author=__author__, cmdclass={}, ext_modules=ext_modules,
def __init__(self, *args, **kwargs): self._init_func = kwargs.pop("init_func", None) Extension.__init__(self, *args, **kwargs)
"""distutils.command.build_ext
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext from Cython.Build import cythonize import numpy as np # /opt/anaconda3/bin/python3 setup.py build_ext --inplace try: numpy_include = np.get_include() except AttributeError: numpy_include = np.get_numpy_include() ext_modules = [ Extension("cython_nms", ["cython_nms.pyx"], extra_compile_args=["-Wno-cpp", "-Wno-unused-function"], include_dirs=[numpy_include]), ] setup(name='mask_rcnn', cmdclass={"build_ext": build_ext}, ext_modules=cythonize(ext_modules))
except: from setuptools.dist import Distribution Distribution(dict(setup_requires=CYTHON_VERSION)) finally: from Cython.Build import cythonize ext_modules = cythonize([ Extension( "DTLSSocket.dtls", [ "DTLSSocket/dtls.pyx", "DTLSSocket/tinydtls/ccm.c", "DTLSSocket/tinydtls/crypto.c", "DTLSSocket/tinydtls/dtls.c", "DTLSSocket/tinydtls/dtls_debug.c", "DTLSSocket/tinydtls/dtls_time.c", "DTLSSocket/tinydtls/hmac.c", "DTLSSocket/tinydtls/netq.c", "DTLSSocket/tinydtls/peer.c", "DTLSSocket/tinydtls/session.c", "DTLSSocket/tinydtls/aes/rijndael.c", "DTLSSocket/tinydtls/sha2/sha2.c" ], include_dirs=['DTLSSocket/tinydtls'], define_macros=[('DTLSv12', '1'), ('WITH_SHA256', '1'), ('DTLS_CHECK_CONTENTTYPE', '1'), ('_GNU_SOURCE', '1')], undef_macros=["NDEBUG"], ) ]) setup( name="DTLSSocket", version='0.1.4', description= "DTLSSocket is a cython wrapper for tinydtls with a Socket like interface",
def run(self): self.distribution.run_command('configure') return build_ext.run(self) cmdclass['cython'] = CythonCommand cmdclass['build_ext'] = zbuild_ext extensions = [] for submod, packages in submodules.items(): for pkg in sorted(packages): sources = [pjoin('zmq', submod, pkg+suffix)] if suffix == '.pyx': sources.extend(packages[pkg]) ext = Extension( 'zmq.%s.%s'%(submod, pkg), sources = sources, include_dirs=[pjoin('zmq', sub) for sub in ('utils','core','devices')], ) if suffix == '.pyx' and ext.sources[0].endswith('.c'): # undo setuptools stupidly clobbering cython sources: ext.sources = sources extensions.append(ext) if 'PyPy' in sys.version: extensions = [] package_data = {'zmq':['*.pxd'], 'zmq.core':['*.pxd'], 'zmq.devices':['*.pxd'], 'zmq.utils':['*.pxd', '*.h'], }
__author__ = 'quentin' from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext import numpy setup( cmdclass = {'build_ext': build_ext}, ext_modules = [ Extension("cpython_proto", ["cpython_proto.pyx"], include_dirs=[numpy.get_include()])] )
class custom_build_ext(build_ext): def build_extensions(self): customize_compiler_for_nvcc(self.compiler) build_ext.build_extensions(self) ext_modules = [ Extension( 'gpu_nms', ['nms_kernel.cu', 'gpu_nms.pyx'], library_dirs=[CUDA['lib64']], libraries=['cudart'], language='c++', runtime_library_dirs=[CUDA['lib64']], # this syntax is specific to this build system # we're only going to use certain compiler args with nvcc and not with # gcc the implementation of this trick is in customize_compiler() below extra_compile_args={ 'gcc': ["-Wno-unused-function"], 'nvcc': [ '-arch=sm_35', '--ptxas-options=-v', '-c', '--compiler-options', "'-fPIC'" ] }, include_dirs=[numpy_include, CUDA['include']]), Extension("soft_cpu_nms", ["soft_cpu_nms.pyx"], extra_compile_args={'gcc': ["-Wno-cpp", "-Wno-unused-function"]}, include_dirs=[numpy_include]), Extension("cpu_nms_cython", ["cpu_nms_cython.pyx"], extra_compile_args={'gcc': ["-Wno-cpp", "-Wno-unused-function"]}, include_dirs=[numpy_include]), ]
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext import numpy setup(name="CPWOPTcritical", cmdclass={"build_ext": build_ext}, ext_modules=[ Extension("CPWOPTcritical", ["CPWOPTcritical.pyx"], include_dirs=[numpy.get_numpy_include()]) ])
files = ["neuroPython.cpp", "neuroPythonNodes.cpp", "command.cpp" ,"instruction.cpp", "stdOdeIntegrator.cpp","lyapunov.cpp", "fullNetworkWithNodes.cpp", "generatedNodes.cpp", "globals.cpp", "sdeNode.cpp", "stdSdeIntegrator.cpp","odeNode.cpp", "gslOdeNode.cpp" ,"ioNode.cpp" ,"pulseCoupledPhaseOscillator.cpp","network.cpp" ,"dynNetwork.cpp" ,"createNetwork.cpp", "spatialNetwork.cpp","mapNode.cpp" ,"eventHandler.cpp" ,"node.cpp" ,"priorityQueue.cpp", "dynNode.cpp" ,"gslNoise.cpp" ,"statisticsNetwork.cpp" ,"commandLineArguments.cpp", "params.cpp" ] if sys.platform == "win32" : include_dirs = ["C:\\Program Files (x86)\\boost\\boost_1_46_1"] libraries=["gsl", "gslcblas"] library_dirs=['C:\\Program Files (x86)\\boost\\boost_1_46_1\\lib'] module = Extension("conedy", files, library_dirs=library_dirs, libraries=libraries, include_dirs=include_dirs, depends=[]) module.extra_compile_args = ['/GX', '-DPYTHON', '-DSVN_REV=0.262'] else : include_dirs = ["/usr/include/boost","."] libraries=["boost_python","gsl", "gslcblas", "boost_iostreams", "z"] library_dirs=['/usr//lib'] module = Extension("conedy", files, library_dirs=library_dirs, libraries=libraries, include_dirs=include_dirs, depends=[])
def __init__(self, names, sources, openmp=False, **kw): self.openmp = openmp _Extension.__init__(self, names, sources, **kw)
def check_extensions_list(self, extensions): """Ensure that the list of extensions (presumably provided as a command option 'extensions') is valid, i.e. it is a list of Extension objects. We also support the old-style list of 2-tuples, where the tuples are (ext_name, build_info), which are converted to Extension instances here. Raise DistutilsSetupError if the structure is invalid anywhere; just returns otherwise. """ if not isinstance(extensions, list): raise DistutilsSetupError, \ "'ext_modules' option must be a list of Extension instances" for i, ext in enumerate(extensions): if isinstance(ext, Extension): continue # OK! (assume type-checking done # by Extension constructor) if not isinstance(ext, tuple) or len(ext) != 2: raise DistutilsSetupError, \ ("each element of 'ext_modules' option must be an " "Extension instance or 2-tuple") ext_name, build_info = ext log.warn(("old-style (ext_name, build_info) tuple found in " "ext_modules for extension '%s'" "-- please convert to Extension instance" % ext_name)) if not (isinstance(ext_name, str) and extension_name_re.match(ext_name)): raise DistutilsSetupError, \ ("first element of each tuple in 'ext_modules' " "must be the extension name (a string)") if not isinstance(build_info, dict): raise DistutilsSetupError, \ ("second element of each tuple in 'ext_modules' " "must be a dictionary (build info)") # OK, the (ext_name, build_info) dict is type-safe: convert it # to an Extension instance. ext = Extension(ext_name, build_info['sources']) # Easy stuff: one-to-one mapping from dict elements to # instance attributes. for key in ('include_dirs', 'library_dirs', 'libraries', 'extra_objects', 'extra_compile_args', 'extra_link_args'): val = build_info.get(key) if val is not None: setattr(ext, key, val) # Medium-easy stuff: same syntax/semantics, different names. ext.runtime_library_dirs = build_info.get('rpath') if 'def_file' in build_info: log.warn("'def_file' element of build info dict " "no longer supported") # Non-trivial stuff: 'macros' split into 'define_macros' # and 'undef_macros'. macros = build_info.get('macros') if macros: ext.define_macros = [] ext.undef_macros = [] for macro in macros: if not (isinstance(macro, tuple) and len(macro) in (1, 2)): raise DistutilsSetupError, \ ("'macros' element of build info dict " "must be 1- or 2-tuple") if len(macro) == 1: ext.undef_macros.append(macro[0]) elif len(macro) == 2: ext.define_macros.append(macro) extensions[i] = ext
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext import numpy setup( cmdclass = {'build_ext': build_ext}, ext_modules = [Extension("multithread_integrated", ["multithread_integrated.pyx"], include_dirs=[numpy.get_include()] )] )