def __init__(self, name, sources, **kw): for key, vals in get_flann_info().items(): kw[key] = l = kw.get(key) or [] for v in vals: if v not in l: l.append(v) Extension.__init__(self, name, sources, **kw)
def __init__(self, name, sources, **kw): def add_to(arg, val): kw[arg] = l = kw.get(arg) or [] for v in val: if v not in l: l.append(v) libdir = os.path.dirname(get_flann_lib()) add_to('libraries', ['flann', 'flann_cpp']) add_to('include_dirs', [get_flann_include()]) add_to('library_dirs', [libdir]) add_to('runtime_library_dirs', [libdir]) Extension.__init__(self, name, sources, **kw)
def __init__(self, name, sources, libraries=()): transform = {} exclude = [] compile_args = ["-O3"] link_args = [] if sys.platform == "win32": transform = {'GL':'opengl32', 'GLU':'glu32'} exclude = ['m'] compile_args.append("-fno-strict-aliasing") libraries = [transform.get(l,l) for l in libraries if l not in exclude] if sys.platform == "darwin" and "GL" in libraries: compile_args.extend(['-fno-common', '-I', '/System/Library/Frameworks/OpenGL.framework/Headers']) link_args.extend(['-dynamic', '-L/System/Library/Frameworks/OpenGL.framework/Libraries']) BaseExtension.__init__(self, name, sources, libraries=libraries, extra_compile_args=compile_args, extra_link_args=link_args)
here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() if '--use-cython' in sys.argv: USE_CYTHON = True sys.argv.remove('--use-cython') else: USE_CYTHON = False ext = '.pyx' if USE_CYTHON else '.c' extensions = [ Extension('adeft.score._score', ['adeft/score/_score' + ext]), ] if USE_CYTHON: from Cython.Build import cythonize extensions = cythonize(extensions, compiler_directives={'language_level': 3}) setup(name='adeft', version='0.10.0', description=('Acromine based Disambiguation of Entities From' ' Text'), long_description=long_description, long_description_content_type='text/markdown', url='https://github.com/indralab/adeft', download_url='https://github.com/indralab/adeft/archive/0.10.0.tar.gz',
def __init__(self, name, sources, *args, **kwargs): _Extension.__init__(self, name, sources, *args, **kwargs) self.sources = sources
import commands get_output = commands.getoutput except ImportError: import subprocess def _get_output(*args, **kwargs): res = subprocess.check_output(*args, shell=True, **kwargs) decoded = res.decode('utf-8') return decoded.strip() get_output = _get_output ext_module_misc = Extension( 'gssapi.base.misc', extra_link_args=get_output('krb5-config --libs gssapi').split(), extra_compile_args=get_output('krb5-config --cflags gssapi').split(), sources=[ 'gssapi/base/misc.pyx', # 'gssapi/base/cython_converters.pyx' ]) ext_module_creds = Extension( 'gssapi.base.creds', extra_link_args=get_output('krb5-config --libs gssapi').split(), extra_compile_args=get_output('krb5-config --cflags gssapi').split(), sources=[ 'gssapi/base/creds.pyx', # 'gssapi/base/cython_converters.pyx' ]) ext_module_names = Extension( 'gssapi.base.names',
libs.append('CUDA') runtime_lib_dirs.append(cuda_lib_dir) include_dirs.append(cuda_include_dir) exc_list = [] else: exc_list = ['AlgoLibR/**/*gpu.pyx'] cython_files = ['AlgoLibR/**/**.pyx'] extensions = [ Extension( "*", sources=cython_files, include_dirs=include_dirs, library_dirs=[get_python_lib()], libraries=libs, language='c++', runtime_library_dirs=runtime_lib_dirs, extra_compile_args=['-std=c++11'], #,'-fopenmp'], #extra_link_args=['-lgomp'] ) ] shutil.rmtree('build', ignore_errors=True) setup( name=name, description='AlgoLibR - Algorithms Lib R', #long_description=open('README.md', encoding='UTF-8').read(), #long_description_content_type='text/markdown', #url='https://github.com/raoqiyu',
url = "https://github.com/KlugerLab/FIt-SNE" download_url = "https://github.com/KlugerLab/pyFIt-SNE/archive/%s.tar.gz" % __version__ keywords = ["tSNE", "embedding"] description = "Fast Fourier Transform-accelerated Interpolation-based t-SNE (FIt-SNE)" license = "BSD3" #Try...except because for some OS X setups, the compilation fails without -stdlib=libc++ try: if platform == "darwin": extensions = [ Extension("fitsne.cppwrap", [ "fitsne/cppwrap.pyx", "fitsne/src/nbodyfft.cpp", "fitsne/src/sptree.cpp", "fitsne/src/tsne.cpp" ], language="c++", extra_compile_args=[ "-std=c++11", "-O3", '-pthread', "-lfftw3", "-lm" ], extra_link_args=[ '-lfftw3', '-lm', "-mmacosx-version-min=10.9" ]) ] else: extensions = [ Extension("fitsne.cppwrap", [ "fitsne/cppwrap.pyx", "fitsne/src/nbodyfft.cpp", "fitsne/src/sptree.cpp", "fitsne/src/tsne.cpp" ], language="c++", extra_compile_args=[ "-std=c++11", "-O3", '-pthread', "-lfftw3", "-lm"
try: import numpy except ImportError: build_requires = ['numpy>=1.9.0'] hgversion = 'unknown' clstm = Extension('_clstm', libraries=['png', 'protobuf'], include_dirs=[ '/usr/include/eigen3', '/usr/local/include/eigen3', '/usr/local/include', '/usr/include/hdf5/serial' ], swig_opts=['-c++', '-I/usr/local/include/eigen3'], extra_compile_args=[ '-std=c++11', '-w', '-Dadd_raw=add', '-DNODISPLAY=1', '-DTHROW=throw', '-DHGVERSION="\\"' + hgversion + '\\""' ], sources=[ 'clstm.i', 'clstm.cc', 'clstm_prefab.cc', 'extras.cc', 'ctc.cc', 'clstm_proto.cc', 'clstm.pb.cc' ]) print("making proto file") os.system("protoc clstm.proto --cpp_out=.") setup( name='clstm', version='0.0.5', cmdclass=custom_cmd_class,
"deps/gsl/sys/infnan.c", "deps/gsl/sys/fdiv.c", "deps/gsl/sys/coerce.c", "deps/gsl/err/stream.c" ] # Create the extensions. Manually enumerate the required extensions = [] # PyPolyaGamma and GSL source files extensions.append( Extension( 'pypolyagamma.pypolyagamma', depends=headers, extra_compile_args=["-w", "-DHAVE_INLINE"], extra_link_args=[], include_dirs=include_dirs, language="c++", sources=["pypolyagamma/pypolyagamma" + ext] + sources, )) # If OpenMP is requested, compile the parallel extension if USE_OPENMP: extensions.append( Extension( 'pypolyagamma.parallel', depends=headers, extra_compile_args=["-w", "-fopenmp", "-DHAVE_INLINE"], extra_link_args=["-fopenmp"], include_dirs=include_dirs, language="c++",
def __init__(self, name, sources, **kwargs): """Initialize the extension with parameters. External package extensions (mostly coming from pkg-config), adds a single parameter to the standard arguments of the constructor: packages : [string] This should be a list of strings indicating the name of the bob (pkg-config) modules you would like to have linked to your extension **additionally** to ``bob-python``. Candidates are module names like "bob-machine" or "bob-math". For convenience, you can also specify "opencv" or other 'pkg-config' registered packages as a dependencies. boost_modules : [string] A list of boost modules that we need to link against. bob_packages : [string] A list of bob libraries (such as ``'bob.core'``) containing C++ code that should be included and linked system_include_dirs : [string] A list of include directories that are not in one of our packages, and which should be included with the -isystem compiler option """ packages = [] if 'packages' in kwargs: if isinstance(kwargs['packages'], str): packages.append(kwargs['packages']) else: packages.extend(kwargs['packages']) del kwargs['packages'] # uniformize packages packages = normalize_requirements([k.strip().lower() for k in packages]) # check if we have bob libraries to link against if 'bob_packages' in kwargs: self.bob_packages = kwargs['bob_packages'] del kwargs['bob_packages'] else: self.bob_packages = None bob_includes, bob_libraries, bob_library_dirs = get_bob_libraries(self.bob_packages) # system include directories if 'system_include_dirs' in kwargs: system_includes = kwargs['system_include_dirs'] del kwargs['system_include_dirs'] else: system_includes = [] # Boost requires a special treatment boost_req = '' for i, pkg in enumerate(packages): if pkg.startswith('boost'): boost_req = pkg del packages[i] # We still look for the keyword 'boost_modules' boost_modules = [] if 'boost_modules' in kwargs: if isinstance(kwargs['boost_modules'], str): boost_modules.append(kwargs['boost_modules']) else: boost_modules.extend(kwargs['boost_modules']) del kwargs['boost_modules'] if boost_modules and not boost_req: boost_req = 'boost >= 1.0' # Was a version parameter given? version = None if 'version' in kwargs: version = kwargs['version'] del kwargs['version'] # Mixing parameters = { 'define_macros': generate_self_macros(name, version), 'extra_compile_args': ['-std=c++0x'], #synonym for c++11? 'extra_link_args': [], 'library_dirs': [], 'libraries': bob_libraries, } # Compilation options if platform.system() == 'Darwin': parameters['extra_compile_args'] += ['-Wno-#warnings'] user_includes = kwargs.get('include_dirs', []) self.pkg_includes = [] self.pkg_libraries = [] self.pkg_library_directories = [] self.pkg_macros = [] # Updates for boost if boost_req: boost_pkg = boost(boost_req.replace('boost', '').strip()) # Adds macros parameters['define_macros'] += boost_pkg.macros() # Adds the include directory (enough for using just the template library) if boost_pkg.include_directory not in user_includes: system_includes.append(boost_pkg.include_directory) self.pkg_includes.append(boost_pkg.include_directory) # Adds specific boost libraries requested by the user if boost_modules: boost_libdirs, boost_libraries = boost_pkg.libconfig(boost_modules) parameters['library_dirs'].extend(boost_libdirs) self.pkg_library_directories.extend(boost_libdirs) parameters['libraries'].extend(boost_libraries) self.pkg_libraries.extend(boost_libraries) # Checks all other pkg-config requirements pkgs = check_packages(packages) for pkg in pkgs: # Adds parameters for each package, in order parameters['define_macros'] += pkg.package_macros() self.pkg_macros += pkg.package_macros() # Include directories are added with a special path for k in pkg.include_directories(): if k in user_includes or k in self.pkg_includes: continue system_includes.append(k) self.pkg_includes.append(k) parameters['library_dirs'] += pkg.library_directories() self.pkg_library_directories += pkg.library_directories() if pkg.name.find('bob-') == 0: # one of bob's packages # make-up the names of versioned Bob libraries we must link against if platform.system() == 'Darwin': libs = ['%s.%s' % (k, pkg.version) for k in pkg.libraries()] elif platform.system() == 'Linux': libs = [':lib%s.so.%s' % (k, pkg.version) for k in pkg.libraries()] else: raise RuntimeError("supports only MacOSX and Linux builds") else: libs = pkg.libraries() parameters['libraries'] += libs self.pkg_libraries += libs parameters['extra_link_args'] += pkg.other_libraries() # add the -isystem to all system include dirs for k in system_includes: parameters['extra_compile_args'].extend(['-isystem', k]) # Filter and make unique for key in parameters.keys(): # Tune input parameters if they were set, but assure that our parameters come first if key in kwargs: kwargs[key] = parameters[key] + kwargs[key] else: kwargs[key] = parameters[key] if key in ('extra_compile_args'): continue kwargs[key] = uniq(kwargs[key]) # add our include dir by default self_include_dir = resource_filename(__name__, 'include') kwargs.setdefault('include_dirs', []).append(self_include_dir) kwargs['include_dirs'] = user_includes + bob_includes + kwargs['include_dirs'] # Uniq'fy parameters that are not on our parameter list kwargs['include_dirs'] = uniq_paths(kwargs['include_dirs']) # Stream-line '-isystem' includes kwargs['extra_compile_args'] = reorganize_isystem(kwargs['extra_compile_args']) # Make sure the language is correctly set to C++ kwargs['language'] = 'c++' # On Linux, set the runtime path if platform.system() == 'Linux': kwargs.setdefault('runtime_library_dirs', []) kwargs['runtime_library_dirs'] += kwargs['library_dirs'] kwargs['runtime_library_dirs'] = uniq_paths(kwargs['runtime_library_dirs']) # .. except for the bob libraries kwargs['library_dirs'] += bob_library_dirs # Uniq'fy library directories kwargs['library_dirs'] = uniq_paths(kwargs['library_dirs']) # Run the constructor for the base class DistutilsExtension.__init__(self, name, sources, **kwargs)
from Cython.Distutils import build_ext except ImportError: use_cython = False else: use_cython = True cmdclass = { } ext_modules = [ ] #For this to work the .c files are not include in GIT except in the release #release branch (the c files would be created using python setup.py sdist) if use_cython: ext_modules += [ Extension("pygom.model._tau_leap", ["pygom/model/_tau_leap.pyx"], include_dirs=[numpy.get_include()], # extra_compile_args=['-fopenmp'], # extra_link_args=['-fopenmp']), ) ] cmdclass.update({ 'build_ext': build_ext }) else: # raise ImportError('You will need Cython installed to create' # 'the c extensions. Try installing with' # '"pip install cython" before installing PyGOM.') ext_modules += [ Extension("pygom.model._tau_leap", [ "pygom/model/_tau_leap.c" ], include_dirs=[numpy.get_include()], # extra_compile_args=['-fopenmp'], # extra_link_args=['-fopenmp']), )
cddlib_pxi.close() cddlib_f_pxi = open("cddlib_f.pxi", "w") cddlib_f_pxi.write( cddlib_pxi_in.replace("@cddhdr@", "cdd_f.h").replace("@dd@", "ddf").replace( "@mytype@", "myfloat")) cddlib_f_pxi.close() setup( name="pycddlib", version=version, ext_modules=[ Extension( "cdd", ["cdd.pyx"] + cddgmp_sources, include_dirs=[cdd_dir], depends=cddgmp_headers, define_macros=define_macros, libraries=libraries, ), ], author="Matthias Troffaes", author_email="*****@*****.**", license="GPL", keywords= "convex, polyhedron, linear programming, double description method", platforms="any", description=doclines[0], long_description="\n".join(doclines[2:]), url="http://pypi.python.org/pypi/pycddlib", classifiers=classifiers.split('\n'), setup_requires=[
def __init__(self, *args, **kwargs): Extension.__init__(self, *args, **kwargs) self.export_symbols = finallist(self.export_symbols)
def initialize_options(self, *args, **kwargs): return self._command.initialize_options(*args, **kwargs) def finalize_options(self, *args, **kwargs): ret = self._command.finalize_options(*args, **kwargs) import numpy self.include_dirs.append(numpy.get_include()) return ret def run(self, *args, **kwargs): return self._command.run(*args, **kwargs) extensions = [ Extension('keras_retinanet.utils.compute_overlap', ['keras_retinanet/utils/compute_overlap.pyx']), ] setuptools.setup( name='keras-retinanet', version='0.5.1', description='Keras implementation of RetinaNet object detection.', url='https://github.com/fizyr/keras-retinanet', author='Hans Gaiser', author_email='*****@*****.**', maintainer='Hans Gaiser', maintainer_email='*****@*****.**', cmdclass={'build_ext': BuildExtension}, packages=setuptools.find_packages(), install_requires=[ 'keras-resnet==0.2.0', 'six', 'scipy', 'cython', 'Pillow',
def get_long_description(): return _read('README.md') install_requires = [ 'numpy', 'cython', 'click', 'cooler>=0.6', ] extensions = [ Extension( "cooltools.io.fastsavetxt", ["cooltools/io/fastsavetxt.pyx"], ), Extension( "cooltools.num.kernels", ["cooltools/num/kernels.pyx"], ) ] packages = find_packages() setup(name='cooltools', author='Mirny Lab', author_email='*****@*****.**', version=get_version(), license='BSD3', description= 'Analysis tools for genomic interaction data stored in .cool format',
from setuptools import setup from Cython.Build import cythonize from setuptools.extension import Extension extensions = [ Extension( 'tlwpy.liblorawan', ['liblorawan/lorawan.pyx', 'liblorawan/crypto.c', 'liblorawan/packet.c'], libraries=['crypto'], extra_compile_args=['-std=gnu99'] ) ] setup(name='tlwpy', version='0.1', author='Daniel Palmer', author_email='*****@*****.**', license='GPLv3', packages=['tlwpy'], zip_safe=False, ext_modules=cythonize(extensions))
if os.path.exists('MANIFEST'): os.remove('MANIFEST') if check_for_openmp() is True: omp_args = ['-fopenmp'] else: omp_args = None if os.name == "nt": std_libs = [] else: std_libs = ["m"] cython_extensions = [ Extension("yt_astro_analysis.ppv_cube.ppv_utils", ["yt_astro_analysis/ppv_cube/ppv_utils.pyx"], libraries=std_libs), ] extensions = [ Extension("yt_astro_analysis.halo_finding.fof.EnzoFOF", [ "yt_astro_analysis/halo_finding/fof/EnzoFOF.c", "yt_astro_analysis/halo_finding/fof/kd.c" ], libraries=std_libs), Extension("yt_astro_analysis.halo_finding.hop.EnzoHop", glob.glob("yt_astro_analysis/halo_finding/hop/*.c")), ] # ROCKSTAR if os.path.exists("rockstar.cfg"):
def __init__(self, *args, **kwargs): Extension.__init__(self, *args, **kwargs) self._include_dirs = self.include_dirs del self.include_dirs # restore overwritten property
# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Installer and testing script for fanotify.""" from setuptools import setup from setuptools.extension import Extension EXT_MODULES = [ Extension( 'fanotify', sources=['fanotify.c'], ), ] setup( name='fanotify', version='0.1', author='Mike Gerow', author_email='*****@*****.**', description=('Library to interface with linux fanotify features.'), license='Apache 2.0', test_suite='nose.collector', ext_modules=EXT_MODULES, )
with open('openpifpaf/__init__.py', 'r') as f: VERSION_LINE = [l for l in f if l.startswith('__version__')][0] VERSION = VERSION_LINE.split('=')[1].strip()[1:-1] class NumpyIncludePath(object): """Lazy import of numpy to get include path.""" @staticmethod def __str__(): import numpy return numpy.get_include() if cythonize is not None and numpy is not None: EXTENSIONS = cythonize([ Extension('openpifpaf.functional', ['openpifpaf/functional.pyx'], include_dirs=[numpy.get_include()]), ], annotate=True, compiler_directives={'language_level': 3}) else: EXTENSIONS = [ Extension('openpifpaf.functional', ['openpifpaf/functional.pyx'], include_dirs=[NumpyIncludePath()]) ] setup( name='openpifpaf', version=VERSION, packages=[ 'openpifpaf', 'openpifpaf.decoder',
def _remove_prefix(string, prefix='hadoopy/'): if string.startswith(prefix): return string[len(prefix):] glibc_version = get_glibc_version() tb_extra_args = [] if sys.byteorder != 'little': tb_extra_args.append('-D BYTECONVERSION_ISBIGENDIAN') if glibc_version and (glibc_version[0] == 2 and glibc_version[1] >= 9): tb_extra_args.append('-D BYTECONVERSION_HASENDIAN_H') # Since package_data doesn't handle directories, we find all of the files thirdparty_paths = map(_remove_prefix, _glob_recursive('hadoopy/thirdparty/*')) ext_modules = [Extension("_hadoopy_main", ["hadoopy/_hadoopy_main" + source_ext, "hadoopy/getdelim.c"]), Extension("_hadoopy_typedbytes", ["hadoopy/_hadoopy_typedbytes" + source_ext], extra_compile_args=tb_extra_args)] setup(name='hadoopy', cmdclass=cmdclass, version='0.6.0', packages=find_packages(), package_data={'hadoopy': thirdparty_paths}, author='Brandyn A. White', author_email='*****@*****.**', license='GPLv3', url='https://github.com/bwhite/hadoopy', ext_modules=ext_modules)
from setuptools import setup from setuptools.extension import Extension from Cython.Build import cythonize import numpy #import Cython.Compiler.Options #Cython.Compiler.Options.get_directive_defaults()['cdivision'] = True #Cython.Compiler.Options.get_directive_defaults()['boundscheck'] = False #Cython.Compiler.Options.get_directive_defaults()['wraparound'] = False #Cython.Compiler.Options.get_directive_defaults()['profile'] = True ext_modules=[ Extension('slowquant.molecularintegrals.runMIcython',['slowquant/molecularintegrals/runMIcython.pyx']), Extension('slowquant.coupledcluster.CythonCC',['slowquant/coupledcluster/CythonCC.pyx'])] setup(ext_modules=cythonize(ext_modules), include_dirs=[numpy.get_include()])
version=__version__, platforms='Windows, Linux, Darwin', author=__author__, author_email='*****@*****.**', maintainer='Brockmann Consult GmbH', maintainer_email='*****@*****.**', license=__license__, url='https://github.com/bcdev/jpy', download_url='https://pypi.python.org/pypi/jpy/' + __version__, py_modules=['jpyutil'], ext_modules=[ Extension('jpy', sources=sources, depends=headers, include_dirs=include_dirs, library_dirs=library_dirs, libraries=libraries, extra_link_args=extra_link_args, extra_compile_args=extra_compile_args, define_macros=define_macros), Extension('jdl', sources=[os.path.join(src_main_c_dir, 'jni/org_jpy_DL.c')], depends=[os.path.join(src_main_c_dir, 'jni/org_jpy_DL.h')], include_dirs=include_dirs, library_dirs=library_dirs, libraries=libraries, extra_link_args=extra_link_args, extra_compile_args=extra_compile_args, define_macros=define_macros), ], test_suite='setup.test_suite',
try: if (force_cython or not os.path.exists(c_file) or os.path.getmtime(pyx_file) > os.path.getmtime(c_file)): log.info("Updating C extension with Cython.") subprocess.check_call(["cython", "shapely/speedups/_speedups.pyx"]) except (subprocess.CalledProcessError, OSError): log.warn("Could not (re)create C extension with Cython.") if force_cython: raise if not os.path.exists(c_file): log.warn("speedup extension not found") ext_modules = [ Extension("shapely.speedups._speedups", ["shapely/speedups/_speedups.c"], include_dirs=include_dirs, library_dirs=library_dirs, libraries=libraries, extra_link_args=extra_link_args) ] cmd_classes = setup_args.setdefault('cmdclass', {}) try: import numpy from Cython.Distutils import build_ext as cython_build_ext from distutils.extension import Extension as DistutilsExtension if 'build_ext' in setup_args['cmdclass']: raise ValueError('We need to put the Cython build_ext in ' 'cmd_classes, but it is already defined.') setup_args['cmdclass']['build_ext'] = cython_build_ext
"../cpp/src/twisterx/util", arrow_library_directory, arrow_lib_include_dir, pyarrow_include_dir, np.get_include(), ] # Adopted the Cudf Python Build format # https://github.com/rapidsai/cudf extensions = [ Extension( "*", sources=cython_files, include_dirs=_include_dirs, language='c++', extra_compile_args=extra_compile_args, extra_link_args=extra_link_args, libraries=libraries, library_dirs=library_directories, ) ] compiler_directives = {"language_level": 3, "embedsignature": True} packages = find_packages(include=["pytwisterx", "pytwisterx.*"]) setup( name="pytwisterx", packages=packages, version='0.0.1', setup_requires=[ "cython",
include_package_data=True, zip_safe=False, setup_requires=['numpy>=1.5', 'cython>=0.26'], install_requires=['numpy>=1.5', 'nose>=0.11', 'cython>=0.26', 'matplotlib>1.0.0', 'h5py>=2.0.0', 'molmod>=1.4.1', 'scipy>=0.17.1'], ext_modules=[ Extension("yaff.pes.ext", sources=['yaff/pes/ext.pyx', 'yaff/pes/nlist.c', 'yaff/pes/pair_pot.c', 'yaff/pes/ewald.c', 'yaff/pes/comlist.c', 'yaff/pes/dlist.c', 'yaff/pes/grid.c', 'yaff/pes/iclist.c', 'yaff/pes/vlist.c', 'yaff/pes/cell.c', 'yaff/pes/truncation.c', 'yaff/pes/slater.c', 'yaff/pes/tailcorr.c'], depends=['yaff/pes/nlist.h', 'yaff/pes/nlist.pxd', 'yaff/pes/pair_pot.h', 'yaff/pes/pair_pot.pxd', 'yaff/pes/ewald.h', 'yaff/pes/ewald.pxd', 'yaff/pes/comlist.h', 'yaff/pes/comlist.pxd', 'yaff/pes/dlist.h', 'yaff/pes/dlist.pxd', 'yaff/pes/grid.h', 'yaff/pes/grid.pxd', 'yaff/pes/iclist.h', 'yaff/pes/iclist.pxd', 'yaff/pes/vlist.h', 'yaff/pes/vlist.pxd', 'yaff/pes/cell.h', 'yaff/pes/cell.pxd', 'yaff/pes/truncation.h', 'yaff/pes/truncation.pxd', 'yaff/pes/slater.h', 'yaff/pes/slater.pxd', 'yaff/pes/constants.h', 'yaff/pes/tailcorr.h'], include_dirs=[np.get_include()], ), ], classifiers=[ 'Environment :: Console', 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python :: 2.7',
data_files = [('share/caiman', ['LICENSE.txt', 'README.md', 'test_demos.sh']), ('share/caiman/example_movies', ['example_movies/data_endoscope.tif', 'example_movies/demoMovie.tif']), ('share/caiman/testdata', ['testdata/groundtruth.npz', 'testdata/example.npz']) ] for part in extra_dirs: newpart = [("share/caiman/" + d, [os.path.join(d,f) for f in files]) for d, folders, files in os.walk(part)] for newcomponent in newpart: data_files.append(newcomponent) data_files.append(['bin', binaries]) ############ # compile with: python setup.py build_ext -i # clean up with: python setup.py clean --all ext_modules = [Extension("caiman.source_extraction.cnmf.oasis", sources=["caiman/source_extraction/cnmf/oasis.pyx"], include_dirs=[np.get_include()], language="c++")] setup( name='caiman', version='1.0', author='Andrea Giovannucci, Eftychios Pnevmatikakis, Johannes Friedrich, Valentina Staneva, Ben Deverett, Erick Cobos, Jeremie Kalfon', author_email='*****@*****.**', url='https://github.com/simonsfoundation/CaImAn', license='GPL-2', description='Advanced algorithms for ROI detection and deconvolution of Calcium Imaging datasets.', long_description=readme, # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ # How mature is this project? Common values are # 3 - Alpha
import numpy import sys from setuptools import setup from setuptools.extension import Extension from Cython.Build import cythonize ext = 'tensornets.references.darkflow_utils' ext_modules = [ Extension("%s.%s" % (ext, n), sources=["%s/%s.pyx" % (ext.replace('.', '/'), n)], libraries=([] if sys.platform.startswith("win") else ['m']), include_dirs=[numpy.get_include()]) for n in ['nms', 'get_boxes'] ] setup(name='tensornets', version='0.3.5', description='high level network definitions in tensorflow', author='Taehoon Lee', author_email='*****@*****.**', url='https://github.com/taehoonlee/tensornets', download_url='https://github.com/taehoonlee/tensornets/tarball/0.3.5', license='MIT', packages=[ 'tensornets', 'tensornets.datasets', 'tensornets.references', ext ], include_package_data=True, ext_modules=cythonize(ext_modules))
#!/usr/bin/env python if __name__ == '__main__': from setuptools import setup from setuptools.extension import Extension from Cython.Build import cythonize import numpy as np ext = [] ext += [ Extension(name='rbf.halton', sources=['rbf/halton.pyx'], include_dirs=[np.get_include()]) ] ext += [ Extension(name='rbf.misc.bspline', sources=['rbf/misc/bspline.pyx'], include_dirs=[np.get_include()]) ] ext += [ Extension(name='rbf.geometry', sources=['rbf/geometry.pyx'], include_dirs=[np.get_include()]) ] ext += [ Extension(name='rbf.poly', sources=['rbf/poly.pyx'], include_dirs=[np.get_include()]) ] setup( name='RBF', version='2018.10.31', description=
cmdclass = {} ext_modules = [] # pypy detection PYPY = "__pypy__" in sys.modules UNIX = platform.system() in ("Linux", "Darwin") # only build ext in CPython with UNIX platform if UNIX and not PYPY: # rebuild .c files if cython available if CYTHON: cythonize("thriftpy/transport/cybase.pyx") cythonize("thriftpy/transport/**/*.pyx") cythonize("thriftpy/protocol/cybin/cybin.pyx") ext_modules.append(Extension("thriftpy.transport.cybase", ["thriftpy/transport/cybase.c"])) ext_modules.append(Extension("thriftpy.transport.buffered.cybuffered", ["thriftpy/transport/buffered/cybuffered.c"])) ext_modules.append(Extension("thriftpy.transport.memory.cymemory", ["thriftpy/transport/memory/cymemory.c"])) ext_modules.append(Extension("thriftpy.transport.framed.cyframed", ["thriftpy/transport/framed/cyframed.c"])) ext_modules.append(Extension("thriftpy.protocol.cybin", ["thriftpy/protocol/cybin/cybin.c"])) setup(name="thriftpy", version=version, description="Pure python implementation of Apache Thrift.", keywords="thrift python thriftpy", author="Lx Yu", author_email="*****@*****.**",
author_email="*****@*****.**", license="MIT", classifiers=[ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "Topic :: Scientific/Engineering :: Mathematics", ], setup_requires=["pytest-runner"], install_requires=["pybind11>=2.2"], tests_require=["pytest"], packages=["pysarplus"], ext_modules=[ Extension( "pysarplus_cpp", ["src/pysarplus.cpp"], include_dirs=[get_pybind_include(), get_pybind_include(user=True)], extra_compile_args=sysconfig.get_config_var("CFLAGS").split() + ["-std=c++11", "-Wall", "-Wextra"], libraries=["stdc++"], language="c++11", ) ], zip_safe=False, )
from setuptools import setup, find_packages from setuptools.extension import Extension from Cython.Build import cythonize from numpy import get_include sphere_ext = Extension('pypuffersphere.sphere.sphere_cy', sources=['pypuffersphere/sphere/sphere_cy.pyx']) # doesn't work on OSX unless you do this explicitly for some reason... sphere_ext.include_dirs = [get_include()] setup( # basic info name = 'pypuffersphere', version = '0.0.1', packages = find_packages(), # also pygame, but can't easily install that like this install_requires = ['numpy', 'pyopengl', 'pyglet', 'Cython', 'pyosc' ], # files ext_modules = cythonize([sphere_ext]), include_package_data = True, # metadata description = 'Python/Pyglet code for rendering on the PufferSphere', author = 'John Williamson', author_email = '*****@*****.**', url = 'https://github.com/johnhw/pypypuffersphere', keywords = ['pypuffersphere', 'sphere', 'touch', 'spherical'], classifiers = [], )
#!/usr/bin/env python import os import sys from setuptools import setup, find_packages from setuptools.extension import Extension try: from Cython.Build import cythonize USE_CYTHON = True except ImportError: USE_CYTHON = False ext = 'pyx' if USE_CYTHON else 'c' extensions = [ Extension('blitzloop._audio', ['blitzloop/_audio.%s' % ext], libraries=['jack']), ] if USE_CYTHON: extensions = cythonize(extensions) # res_files = [] # for dirpath, dirname, files in os.walk('blitzloop/res'): # for fn in files: # res_files.append(os.path.join(dirpath, fn)) # print res_files if sys.version_info[0] >= 3: extra_requires = [] else: extra_requires = ['3to2']
def __init__(self, names, sources, openmp=False, **kw): self.openmp = openmp _Extension.__init__(self, names, sources, **kw)
# look for libraries in _PREFIX library_dirs = [os.path.join(_PREFIX, "lib")] include_dirs = [os.path.join(_PREFIX, "include")] # also look in LIBRARY_PATH, CPATH (needed for macports etc.) if "LIBRARY_PATH" in os.environ: library_dirs += os.environ["LIBRARY_PATH"].rstrip(os.pathsep).split(os.pathsep) if "CPATH" in os.environ: include_dirs += os.environ["CPATH"].rstrip(os.pathsep).split(os.pathsep) # define each extension ext_Emhaplofreq = Extension("_Emhaplofreqmodule", ["emhaplofreq/emhaplofreq_wrap.i", "emhaplofreq/emhaplofreq.c"], swig_opts = ["-ISWIG"], include_dirs=include_dirs + ["emhaplofreq"], define_macros=[('__SWIG__', '1'), ('DEBUG', '0'), ('EXTERNAL_MODE', '1'), ('XML_OUTPUT', '1')] ) ext_EWSlatkinExact = Extension("_EWSlatkinExactmodule", ["slatkin-exact/monte-carlo_wrap.i", "slatkin-exact/monte-carlo.c"], swig_opts = ["-ISWIG"], ) ext_Pvalue = Extension("_Pvaluemodule", ["pval/pval_wrap.i", "pval/pval.c", "pval/pchisq.c", "pval/chebyshev.c",
if proc.returncode != 0: print "WARN: fail to build Google url code from SVN, error code: ", proc.returncode def run(self): self.checkout_googleurl() self.patch_googleurl() self.build_googleurl() _build.run(self) gurl_module = Extension( name="_gurl", sources=[os.path.join("src", file) for file in source_files], define_macros=macros, include_dirs=include_dirs, library_dirs=library_dirs, libraries=libraries, extra_compile_args=extra_compile_args, extra_link_args=extra_link_args, ) setup( name="python-google-url", version="0.5", cmdclass={'build': build}, ext_package="gurl", ext_modules=[gurl_module], packages=["gurl"], package_dir={"gurl": os.path.join("src", "gurl")}, package_data={"gurl": ["*.dat", "*.dll"]},
# TODO: # - Wrap learning. # - Make LabelCompatibility, UnaryEnergy, PairwisePotential extensible? (Maybe overkill?) # If Cython is available, build using Cython. # Otherwise, use the pre-built (by someone who has Cython, i.e. me) wrapper `.cpp` files. try: from Cython.Build import cythonize ext_modules = cythonize( ['pydensecrf/eigen.pyx', 'pydensecrf/densecrf.pyx']) except ImportError: from setuptools.extension import Extension ext_modules = [ Extension("pydensecrf/eigen", ["pydensecrf/eigen.cpp", "pydensecrf/eigen_impl.cpp"], language="c++", include_dirs=["pydensecrf/densecrf/include"]), Extension("pydensecrf/densecrf", [ "pydensecrf/densecrf.cpp", "pydensecrf/densecrf/src/densecrf.cpp", "pydensecrf/densecrf/src/unary.cpp", "pydensecrf/densecrf/src/pairwise.cpp", "pydensecrf/densecrf/src/permutohedral.cpp", "pydensecrf/densecrf/src/optimization.cpp", "pydensecrf/densecrf/src/objective.cpp", "pydensecrf/densecrf/src/labelcompatibility.cpp", "pydensecrf/densecrf/src/util.cpp", "pydensecrf/densecrf/external/liblbfgs/lib/lbfgs.c" ], language="c++", include_dirs=[ "pydensecrf/densecrf/include",
EXTRA_LINK_ARGS.append("-Wl,-rpath," + LIBS_INSTALL_DIR) else: EXTRA_LINK_ARGS.append("-Wl,-rpath=%r" % LIBS_INSTALL_DIR + ",--no-as-needed") LIBRARY_DIRS.append(DYNET_LIB_DIR) INCLUDE_DIRS[:] = filter(None, [PROJECT_SOURCE_DIR, EIGEN3_INCLUDE_DIR]) TARGET = [ Extension( "_dynet", # name of extension ["_dynet.pyx"], # filename of our Pyrex/Cython source language="c++", # this causes Pyrex/Cython to create C++ source include_dirs=INCLUDE_DIRS, libraries=LIBRARIES, library_dirs=LIBRARY_DIRS, extra_link_args=EXTRA_LINK_ARGS, extra_compile_args=COMPILER_ARGS, runtime_library_dirs=RUNTIME_LIB_DIRS, ) ] class build(_build): user_options = [ ("build-dir=", None, "New or existing DyNet build directory."), ("skip-build", None, "Assume DyNet C++ library is already built."), ] def __init__(self, *args, **kwargs):