def __init__(self, *args, **kwargs): from numpy import get_include from numpy.distutils.misc_util import get_info kwargs.update(get_info('npymath')) kwargs['include_dirs'] += [get_include()] Extension.__init__(self, *args, **kwargs)
def __init__(self, *args, **kwargs): if 'error' in kwargs: self.error = kwargs['error'] del kwargs['error'] args = ('Cannot be built', []) _Extension.__init__(self, *args, **kwargs)
def __init__(self, *args, **kwargs): self._include_dirs = [] eprsrcdir = kwargs.pop('eprsrcdir', None) Extension.__init__(self, *args, **kwargs) if not any('epr_' in src for src in self.sources): self.sources.extend(self._extra_sources(eprsrcdir)) self.setup_requires_cython = False
def __init__(self, *args, **kwargs): Extension.__init__(self, *args, **kwargs) self.cython_directives = { 'c_string_encoding': 'utf-8', 'profile': 'USE_PROFILE' in environ, 'embedsignature': 'USE_EMBEDSIGNATURE' in environ} # XXX with pip, setuptools is imported before distutils, and change # our pyx to c, then, cythonize doesn't happen. So force again our # sources self.sources = args[1]
def __init__(self, key, **opts): self.boodler_key = key modname = 'boodle.cboodle_'+key ls = ['audev-'+key, 'cboodle-'+key, 'noteq', 'sample'] ls = [ ('src/cboodle/' + val + '.c') for val in ls ] avail = opts.pop('available', None) if (avail): self.ext_available = avail Extension.__init__(self, modname, ls, **opts)
def __init__(self, name): # Describe the extension sources = [ 'rl/readline.c', 'rl/stringarray.c', 'rl/unicode.c', 'rl/iterator.c', 'rl/modulestate.c', ] Extension.__init__(self, name, sources) # Use include and library dirs from Python build self.use_include_dirs() self.use_library_dirs() # Use Mac Python library dir if sys.platform == 'darwin': if sys_path_contains('/Library/Frameworks/Python.framework'): self.library_dirs.append( '/Library/Frameworks/Python.framework/Versions/%d.%d/lib' % sys.version_info[:2]) self.use_static_readline() self.suppress_warnings() self.strip_debug_symbols()
def setup_package(): root = os.path.abspath(os.path.dirname(__file__)) if len(sys.argv) > 1 and sys.argv[1] == 'clean': return clean(root) with chdir(root): with open(os.path.join(root, 'spacy', 'about.py')) as f: about = {} exec(f.read(), about) with open(os.path.join(root, 'README.rst')) as f: readme = f.read() include_dirs = [ get_python_inc(plat_specific=True), os.path.join(root, 'include') ] ext_modules = [] for mod_name in MOD_NAMES: mod_path = mod_name.replace('.', '/') + '.cpp' ext_modules.append( Extension(mod_name, [mod_path], language='c++', include_dirs=include_dirs)) if not is_source_release(root): generate_cython(root, 'spacy') setup( name=about['__title__'], zip_safe=False, packages=PACKAGES, package_data={'': ['*.pyx', '*.pxd', '*.txt', '*.tokens']}, description=about['__summary__'], long_description=readme, author=about['__author__'], author_email=about['__email__'], version=about['__version__'], url=about['__uri__'], license=about['__license__'], ext_modules=ext_modules, install_requires=[ 'numpy', 'murmurhash>=0.26,<0.27', 'cymem>=1.30,<1.32.0', 'preshed>=0.46.1,<0.47', 'thinc>=5.0.0,<5.1.0', 'plac', 'six', 'ujson', 'cloudpickle', 'sputnik>=0.9.2,<0.10.0' ], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX :: Linux', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Programming Language :: Cython', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Scientific/Engineering' ], cmdclass={'build_ext': build_ext_subclass}, )
print("Done!") if not os.path.isdir(SQLPARSER_DIR): download_library() # check again (the user might have downloaded the library) if os.path.isdir(SQLPARSER_DIR): parsebridge = Extension('sqlparser', sources = ['Parser.c', 'Statement.c', 'Node.c', 'ENodeType.c', 'parsebridgemodule.c', SQLPARSER_DIR + 'ext/node_visitor/node_visitor.c', SQLPARSER_DIR + 'ext/expr_traverse/expr_traverse.c', SQLPARSER_DIR + 'ext/modifysql/modifysql.c' ], include_dirs = [ SQLPARSER_DIR + 'core/', SQLPARSER_DIR + 'ext/collection/includes/', SQLPARSER_DIR + 'ext/expr_traverse/', SQLPARSER_DIR + 'ext/modifysql/', SQLPARSER_DIR + 'ext/node_visitor/' ], library_dirs = [ SQLPARSER_DIR + '/lib/' ], libraries = [ 'gspcollection', 'gspcore' ], define_macros = [ ('_CRT_SECURE_NO_WARNINGS', None), ('DONT_FIX_FRAGMENTS', None), ], extra_compile_args = ['-Wno-strict-prototypes'], ) if sys.platform == 'win32' or sys.platform == 'win64': parsebridge.extra_link_args = [ '/MANIFEST', '/DEBUG' ] parsebridge.extra_compile_args = [ '/Zi' ] setup (name = 'sqlparser', version = '1.0', description = 'A package for parsing SQL queries',
return filenames kinectsdk_dir = os.environ.get('KINECTSDK10_DIR', '') if kinectsdk_dir: kinectsdk_inc = os.path.join(kinectsdk_dir, 'inc') kinectsdk_lib = os.path.join(kinectsdk_dir, 'lib', distutils.msvc9compiler.PLAT_TO_VCVARS[get_platform()]) else: warn("Cannot find KINECTSDK10_DIR environment variable. You will need to install the Kinect for Windows SDK if building.") pykinectaudio_ext = Extension( 'pykinect.audio.PyKinectAudio', include_dirs=filter(None, ['src', kinectsdk_inc]), libraries=['Msdmo', 'dmoguids', 'mf', 'mfuuid', 'mfplat', 'avrt', 'Kinect10'], library_dirs=filter(None, [kinectsdk_lib]), sources=[ 'src\\stdafx.cpp', 'src\\PyKinectAudio.cpp', 'src\\AudioStream.cpp', 'src\\MediaBuffer.cpp', ], ) pykinectaudio_ext.headers=[ 'src\\AudioStream.h', 'src\\MediaBuffer.h', 'src\\PyKinectAudio.h', 'src\\stdafx.h', 'src\\targetver.h', ]
def __init__(self, *args, **kwargs): self.avx2_defs = kwargs.pop("avx2_defs", {}) Extension.__init__(self, *args, **kwargs)
author='OED/SSB, etc', author_email='*****@*****.**', description='JWST', url='http://ssb.stsci.edu', license='BSD', classifiers=[ 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ], scripts=SCRIPTS, packages=find_packages(), package_data=PACKAGE_DATA, ext_modules=[ Extension('jwst.tweakreg.chelp', glob('src/tweakreg/*.c'), include_dirs=[np_include()], define_macros=[('NUMPY', '1')]), ], tests_require=[ 'backports.tempfile', 'pytest', 'requests_mock', 'pytest-catchlog' ], cmdclass={ 'test': PyTest }, )
source_extension('ring'), os.path.join(here, 'src', 'hash_ring.c'), os.path.join(here, 'src', 'md5.c'), os.path.join(here, 'src', 'sha1.c'), os.path.join(here, 'src', 'sort.c'), ], extra_compile_args=['-std=c99'], ), } # collect extensions for module, kwargs in modules.items(): kwargs = dict(extension_kwargs, **kwargs) kwargs.setdefault('sources', [source_extension(module)]) kwargs['sources'] = prepare_sources(kwargs['sources']) ext = Extension('{0}.{1}'.format(PACKAGE, module), **kwargs) if suffix == '.pyx' and ext.sources[0].endswith('.c'): # undo setuptools stupidly clobbering cython sources: ext.sources = kwargs['sources'] extensions.append(ext) #----------------------------------------------------------------------------- # Description, version and other meta information. #----------------------------------------------------------------------------- re_meta = re.compile(r'__(\w+?)__\s*=\s*(.*)') re_vers = re.compile(r'VERSION\s*=\s*\((.*?)\)') re_doc = re.compile(r'^"""(.+?)"""') rq = lambda s: s.strip("\"'")
import warnings try: from Cython.Distutils import build_ext from setuptools import setup, Extension HAVE_CYTHON = True except ImportError as e: warnings.warn(e.message) from setuptools import setup, Extension from setuptools.command.build_ext import build_ext HAVE_CYTHON = False import numpy _hdbscan_tree = Extension('hdbscan._hdbscan_tree', sources=['hdbscan/_hdbscan_tree.pyx'], include_dirs=[numpy.get_include()]) _hdbscan_linkage = Extension('hdbscan._hdbscan_linkage', sources=['hdbscan/_hdbscan_linkage.pyx'], include_dirs=['hdbscan', numpy.get_include()]) _hdbscan_boruvka = Extension('hdbscan._hdbscan_boruvka', sources=['hdbscan/_hdbscan_boruvka.pyx'], include_dirs=['hdbscan', numpy.get_include()]) _hdbscan_reachability = Extension('hdbscan._hdbscan_reachability', sources=['hdbscan/_hdbscan_reachability.pyx'], include_dirs=[numpy.get_include()]) dist_metrics = Extension('hdbscan.dist_metrics', sources=['hdbscan/dist_metrics.pyx'], include_dirs=[numpy.get_include()]) def readme():
with open("README.md", "r") as f: long_description = f.read() setup( name="robustats", version="0.1.7", description="Robustats is a Python library for high-performance computation" " of robust statistical estimators.", long_description=long_description, long_description_content_type="text/markdown", classifiers=[ "Programming Language :: Python :: 3", ], url="https://github.com/FilippoBovo/robustats", download_url="https://github.com/FilippoBovo/robustats/archive/" "v0.1.5.tar.gz", author="Filippo Bovo", author_email="*****@*****.**", license="MIT", packages=["robustats"], install_requires=["numpy"], ext_modules=[ Extension( name="_robustats", sources=["c/_robustats.c", "c/robustats.c", "c/base.c"], extra_compile_args=["-std=c99"], include_dirs=numpy.distutils.misc_util.get_numpy_include_dirs(), ) ], )
ext_modules = [ # Extension( # 'python_example', # ['src/main.cpp'], # include_dirs=[ # # Path to pybind11 headers # get_pybind_include(), # get_pybind_include(user=True) # ], # language='c++' # ), Extension( 'python_example', ['src/stsp.cpp'], include_dirs=[ # Path to pybind11 headers get_pybind_include(), get_pybind_include(user=True) ], language='c++'), ] # As of Python 3.6, CCompiler has a `has_flag` method. # cf http://bugs.python.org/issue26689 def has_flag(compiler, flagname): """Return a boolean indicating whether a flag name is supported on the specified compiler. """ import tempfile with tempfile.NamedTemporaryFile('w', suffix='.cpp') as f:
extra_files = ['sources.list', 'includes.list'] + original_sources else: # if amalgamation does not exist, we are in a package distribution # read the include files, source list and include files from the supplied lists with open_utf8('sources.list', 'r') as f: duckdb_sources = [x for x in f.read().split('\n') if len(x) > 0] with open_utf8('includes.list', 'r') as f: duckdb_includes = [x for x in f.read().split('\n') if len(x) > 0] source_files += duckdb_sources include_directories = duckdb_includes + include_directories libduckdb = Extension('duckdb', include_dirs=include_directories, sources=source_files, extra_compile_args=toolchain_args, extra_link_args=toolchain_args, language='c++') else: sys.path.append(os.path.join(script_path, '..', '..', 'scripts')) import package_build toolchain_args += ['-I' + x for x in package_build.includes(extensions)] result_libraries = package_build.get_libraries(existing_duckdb_dir, libraries, extensions) library_dirs = [x[0] for x in result_libraries if x[0] is not None] libnames = [x[1] for x in result_libraries if x[1] is not None] libduckdb = Extension('duckdb', include_dirs=include_directories,
"Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", ], keywords="skycoin crypto coin currency blockchain", # Optional py_modules=["skycoin"], packages=find_packages(exclude=["contrib", "docs", "tests"]), # Required install_requires=[], extras_require={ "dev": ["check-manifest"], "test": ["coverage"] }, # Optional package_data={}, entry_points={"console_scripts": []}, cmdclass={"build_ext": skycoin_build_ext}, ext_modules=[ Extension( "_skycoin", ["swig/pyskycoin_wrap.c"], include_dirs=["swig/include", path.join(skypath, "include")], extra_link_args=extra_link_args, depends=[], ) ], )
] ######################################################################################## # Extension specs # TODO: Cython recommends to include the generated cpp files in the source distribution # and try to build from those first, only regenerating the cpp files from cython as a # fallback. We don't do this currently, but since we are going to ship wheels, it won't # be so bad since most users can just install the wheels. Read about this here: # https://cython.readthedocs.io/en/latest/src/userguide/source_files_and_compilation.html#distributing-cython-modules # Also: Does that mean we have to store the generated cpp files in git? extensions = [ Extension( name="pupil_detectors.detector_base", sources=[f"{package_dir}/pupil_detectors/detector_base.pyx"], language="c++", extra_compile_args=extra_compile_args, ), Extension( name="pupil_detectors.detector_2d.detector_2d", sources=[ f"{package_dir}/pupil_detectors/detector_2d/detector_2d.pyx", f"{package_dir}/singleeyefitter/ImageProcessing/cvx.cpp", f"{package_dir}/singleeyefitter/utils.cpp", f"{package_dir}/singleeyefitter/detectorUtils.cpp", ], language="c++", include_dirs=include_dirs, libraries=libraries, library_dirs=library_dirs, extra_compile_args=extra_compile_args,
if not os.path.exists(eigenpath): print('Downloading Eigen...') urlretrieve(eigenurl, eigentarpath) with tarfile.open(eigentarpath, 'r') as tar: tar.extractall('deps') thedir = glob(os.path.join('deps', 'eigen-eigen-*'))[0] shutil.move(os.path.join(thedir, 'Eigen'), eigenpath) print('...done!') # make a list of extension modules extension_pathspec = os.path.join('pyhsmm','**','*.pyx') # not recursive before Python 3.5 paths = [os.path.splitext(fp)[0] for fp in glob(extension_pathspec)] names = ['.'.join(os.path.split(p)) for p in paths] ext_modules = [ Extension( name, sources=[path + '.cpp'], include_dirs=['deps'], extra_compile_args=['-O3','-std=c++11','-DNDEBUG','-w','-DHMM_TEMPS_ON_HEAP']) for name, path in zip(names,paths)] # if using cython, rebuild the extension files from the .pyx sources if use_cython: from Cython.Build import cythonize try: ext_modules = cythonize(extension_pathspec) except: warn('Failed to generate extension module code from Cython files') # put it all together with a call to setup() setup(name='pyhsmm', version='0.1.6', description="Bayesian inference in HSMMs and HMMs",
from _setupares import ARES CORE = cythonize1(build_libev_extension()) # Modules that we cythonize for performance. # Be careful not to use simple names for these modules, # as the non-static symbols cython generates do not include # the module name. Thus an extension of 'gevent._queue' # results in symbols like 'PyInit__queue', which is the same # symbol used by the standard library _queue accelerator module. # The name of the .pxd file must match the local name of the accelerator # extension; however, sadly, the generated .c and .html files will # still use the same name as the .py source. SEMAPHORE = Extension(name="gevent._gevent_c_semaphore", sources=["src/gevent/_semaphore.py"], depends=['src/gevent/_gevent_c_semaphore.pxd'], include_dirs=get_include_dirs()) LOCAL = Extension(name="gevent._gevent_clocal", sources=["src/gevent/local.py"], depends=['src/gevent/_gevent_clocal.pxd'], include_dirs=get_include_dirs()) GREENLET = Extension(name="gevent._gevent_cgreenlet", sources=[ "src/gevent/greenlet.py", ], depends=[ 'src/gevent/_gevent_cgreenlet.pxd', 'src/gevent/_gevent_c_ident.pxd', 'src/gevent/_ident.py'
def create_exension(): global EPICSBASE, HOSTARCH umacros = [] macros = [] cflags = [] lflags = [] dlls = [] extra_objects = [] libraries = ["ca", "Com"] CMPL = 'gcc' UNAME = platform.system() ARCH = platform.architecture()[0] # platform dependent libraries and macros if UNAME.lower() == "windows": UNAME = "WIN32" static = False if HOSTARCH in ['win32-x86', 'windows-x64', 'win32-x86-debug', 'windows-x64-debug']: if not SHARED: dlls = ['Com.dll', 'ca.dll'] for dll in dlls: dllpath = os.path.join(EPICSBASE, 'bin', HOSTARCH, dll) if not os.path.exists(dllpath): static = True break shutil.copy(dllpath, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'src', 'CaChannel')) macros += [('_CRT_SECURE_NO_WARNINGS', 'None'), ('EPICS_CALL_DLL', '')] cflags += ['/Z7'] CMPL = 'msvc' if HOSTARCH in ['win32-x86-static', 'windows-x64-static'] or static: libraries += ['ws2_32', 'user32', 'advapi32'] macros += [('_CRT_SECURE_NO_WARNINGS', 'None'), ('EPICS_DLL_NO', '')] umacros += ['_DLL'] cflags += ['/EHsc', '/Z7'] lflags += ['/LTCG'] if HOSTARCH[-5:] == 'debug': libraries += ['msvcrtd'] lflags += ['/NODEFAULTLIB:libcmtd.lib'] else: libraries += ['msvcrt'] lflags += ['/NODEFAULTLIB:libcmt.lib'] CMPL = 'msvc' # GCC compiler if HOSTARCH in ['win32-x86-mingw', 'windows-x64-mingw']: macros += [('_MINGW', ''), ('EPICS_DLL_NO', '')] lflags += ['-static'] CMPL = 'gcc' if HOSTARCH == 'windows-x64-mingw': macros += [('MS_WIN64', '')] CMPL = 'gcc' elif UNAME.lower() == "darwin": CMPL = 'clang' HOSTARCH = 'darwin-x86' if not SHARED: extra_objects = [os.path.join(EPICSBASE, 'lib', HOSTARCH, 'lib%s.a' % lib) for lib in libraries] libraries = [] elif UNAME.lower() == "linux": CMPL = 'gcc' if not SHARED: extra_objects = [os.path.join(EPICSBASE, 'lib', HOSTARCH, 'lib%s.a' % lib) for lib in libraries] libraries = ['rt'] if subprocess.call('nm %s | grep -q rl_' % os.path.join(EPICSBASE, 'lib', HOSTARCH, 'libCom.a'), shell=True) == 0: libraries += ['readline'] else: print("Platform", UNAME, ARCH, " Not Supported") sys.exit(1) include_dirs = [os.path.join(EPICSBASE, "include"), os.path.join(EPICSBASE, "include", "os", UNAME), os.path.join(EPICSBASE, "include", "compiler", CMPL), ] ca_module = Extension('CaChannel._ca', sources=['src/CaChannel/_ca.cpp'], extra_compile_args=cflags, include_dirs=include_dirs, define_macros=macros, undef_macros=umacros, extra_link_args=lflags, extra_objects=extra_objects, libraries=libraries, library_dirs=[os.path.join(EPICSBASE, "lib", HOSTARCH)]) if UNAME == "Linux" and SHARED: ca_module.runtime_library_dirs = [os.path.join(EPICSBASE, "lib", HOSTARCH)] return [ca_module], dlls
setup( name='imgui', version=VERSION, packages=find_packages('.'), author=u'Michał Jaworski', author_email='*****@*****.**', description="Cython-based Python bindings for dear imgui", long_description=read_md(README), url="https://github.com/swistakm/pyimgui", ext_modules=cythonize( [ Extension( "imgui.core", ["imgui/core.pyx"], extra_compile_args=os_specific_flags, define_macros=[ # note: for raising custom exceptions directly in ImGui code ('PYIMGUI_CUSTOM_EXCEPTION', None) ] + os_specific_macros + general_macros, include_dirs=['imgui', 'config-cpp'], ), ], compiler_directives=compiler_directives, **cythonize_opts), setup_requires=['cython'], include_package_data=True, license='BSD', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Programming Language :: Cython', 'Programming Language :: Python :: 2',
import subprocess import sys from distutils.command import build_ext from setuptools import Extension, setup VERSION = re.search("__version__\s*=\s*\"(.*)\"", open('ssdeep.pyx').read(), re.M).group(1) if sys.version_info[0] == 3: CYTHON_OPTS = "-3 -f" else: CYTHON_OPTS = "-2 -f" ssdeep_extension = Extension( include_dirs=["ssdeep"], name="ssdeep", sources=["ssdeep.c"] ) class BuildExtension(build_ext.build_ext): def build_extension(self, ext): self.compile_cython() self.build_ssdeep() return build_ext.build_ext.build_extension(self, ext) def build_ssdeep(self): if len(get_objects()) == 0: try: os.chmod( "ssdeep/configure", stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO
] #CLFFT_LIB_DIRS = [] #CLFFT_INCL_DIRS = [] CL_INCL_DIRS = [] EXTRA_COMPILE_ARGS = ['-stdlib=libc++'] EXTRA_LINK_ARGS = ['-stdlib=libc++'] import Cython.Compiler.Options Cython.Compiler.Options.generate_cleanup_code = 2 extensions = [ Extension( "gpyfft.gpyfftlib", [os.path.join('gpyfft', 'gpyfftlib.pyx')], include_dirs=CLFFT_INCL_DIRS + CL_INCL_DIRS, extra_compile_args=EXTRA_COMPILE_ARGS, extra_link_args=EXTRA_LINK_ARGS, libraries=['clFFT'], library_dirs=CLFFT_LIB_DIRS, language='c++', ) ] def copy_clfftdll_to_package(): import shutil shutil.copy(os.path.join(CLFFT_DIR, 'bin', 'clFFT.dll'), 'gpyfft') shutil.copy(os.path.join(CLFFT_DIR, 'bin', 'StatTimer.dll'), 'gpyfft') print("copied clFFT.dll, StatTimer.dll")
def __init__(self, *args, **kwargs): self.condition = kwargs.pop("condition", lambda builder: True) Extension.__init__(self, *args, **kwargs)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError("Unable to find version string.") # Extensions Compilation arguments # extra_compile_args = ['-std=c++11', "-O3"] # Extensions info # ext_modules = [ Extension( name = "pysurvival.utils._functions", sources = ["pysurvival/cpp_extensions/_functions.cpp", "pysurvival/cpp_extensions/functions.cpp" , ], extra_compile_args = extra_compile_args, language="c++", ), Extension( name = "pysurvival.utils._metrics", sources = ["pysurvival/cpp_extensions/_metrics.cpp", "pysurvival/cpp_extensions/non_parametric.cpp", "pysurvival/cpp_extensions/metrics.cpp", "pysurvival/cpp_extensions/functions.cpp", ], extra_compile_args = extra_compile_args, language="c++", ),
# run the customize_compiler class custom_build_ext(build_ext): def build_extensions(self): customize_compiler_for_nvcc(self.compiler) build_ext.build_extensions(self) cmdclass = {} if sys.platform.startswith('win') and os.path.exists(win_cuda_dir): arch = int(platform.architecture()[0][0:2]) ts_module = Extension('_trottersuzuki_wrap', sources=['trottersuzuki/trottersuzuki_wrap.cxx'], extra_objects=['trottersuzuki/src/common.obj', 'trottersuzuki/src/cpublock.obj', 'trottersuzuki/src/model.obj', 'trottersuzuki/src/solver.obj', 'trottersuzuki/src/hybrid.cu.obj', 'trottersuzuki/src/cc2kernel.cu.obj'], define_macros=[('CUDA', None)], library_dirs=[win_cuda_dir+"/lib/x"+str(arch)], libraries=['cudart', 'cublas'], include_dirs=[numpy_include]) else: if sys.platform.startswith('win'): extra_compile_args = ['-openmp', '-DWIN32'] libraries = None elif sys.platform.startswith('darwin') and 'CC' not in os.environ: extra_compile_args = {'cc': []} libraries = None else: extra_compile_args = {'cc': ['-fopenmp']} if 'CC' in os.environ and 'clang-omp' in os.environ['CC']:
import sys if sys.version_info[0] == 2: sys.exit("Python 2 is not supported.") # Bypass import numpy before running install_requires # https://stackoverflow.com/questions/54117786/add-numpy-get-include-argument-to-setuptools-without-preinstalled-numpy class get_numpy_include: def __str__(self): import numpy return numpy.get_include() module = Extension('k4a_module', sources=['pyk4a/pyk4a.cpp'], include_dirs=[get_numpy_include()], libraries=['k4a']) setup(name='pyk4a', version='0.3', description='Python wrapper over Azure Kinect SDK', license='GPL-3.0', author='Etienne Dubeau', install_requires=['numpy'], python_requires='>=3.4', author_email='*****@*****.**', url='https://github.com/etiennedub/pyk4a/', download_url='https://github.com/etiennedub/pyk4a/archive/0.2.tar.gz', packages=['pyk4a'], ext_modules=[module])
def __init__(self, *args, **kwargs): save_sources = kwargs.get('sources', None) _Extension.__init__(self, *args, **kwargs) self.sources = save_sources
src_dir='src/' inc_dir='src/' src=[module_src, src_dir+'abpoa_align.c', src_dir+'abpoa_graph.c', src_dir+'simd_abpoa_align.c', src_dir+'utils.c', src_dir+'simd_check.c', src_dir+'abpoa_plot.c'] long_description = open('python/README.md').read() setup( # Information name = "pyabpoa", description = "pyabpoa: SIMD-based partial order alignment using adaptive band", long_description = long_description, long_description_content_type="text/markdown", version = "1.0.5", url = "https://github.com/yangao07/abPOA", author = "Yan Gao", author_email = "*****@*****.**", license = "GLP", keywords = "multiple-sequence-alignment partial-order-graph-alignment", # Build instructions ext_modules = [Extension("pyabpoa", sources=src, include_dirs=[inc_dir], depends=[src_dir+'abpoa.h', src_dir+'abpoa_align.h', src_dir+'abpoa_graph.h', src_dir+'kdq.h', src_dir+'kseq.h', src_dir+'simd_abpoa_align.h', src_dir+'simd_instruction.h', src_dir+'utils.h', 'python/cabpoa.pxd'], libraries = ['z', 'm', 'pthread'], extra_compile_args=['-O3', '-Wno-error=declaration-after-statement', simd_flag])], install_requires=['cython'], cmdclass = cmdclass )
import warnings try: from Cython.Distutils import build_ext from setuptools import setup, Extension HAVE_CYTHON = True except ImportError as e: warnings.warn(e.message) from setuptools import setup, Extension from setuptools.command.build_ext import build_ext HAVE_CYTHON = False import numpy _utils = Extension('sstsne._utils', sources=['sstsne/_utils.pyx'], include_dirs=[numpy.get_include()]) _barnes_hut_tsne = Extension('sstsne._barnes_hut_tsne', sources=['sstsne/_barnes_hut_tsne.pyx'], include_dirs=[numpy.get_include(), '/System/Library/Frameworks/Accelerate.framework/Versions/A/Frameworks/vecLib.framework/Versions/A/Headers/']) def readme(): with open('README.md') as readme_file: return readme_file.read() configuration = { 'name' : 'sstsne', 'version' : '0.1', 'description' : 'Semi-Supervised t-SNE using a Bayesian prior based on partial labelling', 'long_description' : readme(),
'License :: OSI Approved :: Python Software Foundation License', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Topic :: Scientific/Engineering :: Visualization', ], package_dir={"": "lib"}, packages=find_packages("lib"), namespace_packages=["mpl_toolkits"], py_modules=["pylab"], # Dummy extension to trigger build_ext, which will swap it out with # real extensions that can depend on numpy for the build. ext_modules=[Extension("", [])], package_data=package_data, python_requires='>={}'.format('.'.join(str(n) for n in min_version)), setup_requires=[ "certifi>=2020.06.20", "numpy>=1.15", ], install_requires=[ "certifi>=2020.06.20", "cycler>=0.10", "kiwisolver>=1.0.1", "numpy>=1.16", "pillow>=6.2.0", "pyparsing>=2.2.1", "python-dateutil>=2.7",
#### PRE-CODE (to figure out, whether Cython is installed or not) try: from Cython.Distutils import build_ext CYTHON_FOUND = True except ImportError as e: from distutils.command.build_ext import build_ext warnings.warn(WARN_CYTHON_NOT_FOUND) CYTHON_FOUND = False #### FUNCTIONS def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() ### MAIN SETUP logdet = Extension('logdet', sources = ['logdet/logdet.pyx', 'logdet/logdet.pxd'], libraries=['lapack', 'blas'], extra_compile_args=["-O3"]) configuration = { 'name' : 'logdet', 'version' : '0.1.0', # major.minor.micro 'author' : 'Oskar Maier', 'author_email' : '*****@*****.**', 'url' : 'https://github.com/loli/logdet', 'license' : 'LICENSE.txt', 'keywords' : 'lapack log-determinant computation', 'long_description' : read('README.rst'), 'classifiers' : [ 'Development Status :: 5 - Production/Stable', 'Environment :: Console',
# Find pybind11 header files class get_pybind_include(object): def __init__(self, user=False): self.user = user def __str__(self): import pybind11 return pybind11.get_include(self.user) ext_modules = [ Extension( '_matrix_hal', sources=getCppFiles(['hal_wrapper', 'hal_wrapper/drivers']), include_dirs=[get_pybind_include(), get_pybind_include(user=True)], libraries=['matrix_creator_hal'], extra_compile_args=['-O3'], language='c++'), ] class BuildExt(build_ext): """A custom build extension for adding compiler-specific options.""" c_opts = { 'unix': [], } l_opts = { 'unix': [], }
def __init__(self, *args, **kwargs): packages = kwargs.pop('pkg_config', []) for package in packages: options = self.__options_for_package(package, kwargs) _Extension.__init__(self, *args, **kwargs)
except ImportError: can_build_ext = False else: can_build_ext = True # If this is incremented, also increment in __init__.py VERSION = '0.6.9' cmdclass = {} ext_modules = [] include_dirs = [] if can_build_ext: cmdclass['build_ext'] = build_ext ext_modules.append(Extension('cutils', ['KDEpy/cutils.pyx'])) include_dirs.append(np.get_include()) else: # Build extension with previously Cython generated source. ext_modules.append(Extension('cutils', ['KDEpy/cutils.c'])) setup( name='KDEpy', version=VERSION, description='Kernel Density Estimation in Python.', long_description='Kernel Density Estimation in Python.', url='https://github.com/tommyod/KDEpy', author='tommyod',
def __init__(self, name, cmd, cwd=".", output_dir=".", env=None): Extension.__init__(self, name, sources=[]) self.cmd = cmd self.cwd = path.normpath(cwd) self.output_dir = path.normpath(output_dir) self.env = env or dict(os.environ)
""" CLASSIFIERS = filter(None, map(str.strip, """ Intended Audience :: Developers License :: OSI Approved :: MIT License Programming Language :: Python Topic :: Software Development :: Libraries :: Python Modules """.splitlines())) speedups = Feature( "options C speed-enhancement modules", standard=True, ext_modules = [ Extension("simplejson._speedups", ["simplejson/_speedups.c"]), ], ) class BuildFailed(Exception): pass class ve_build_ext(build_ext): # This class allows C extension building to fail. def run(self): try: build_ext.run(self) except DistutilsPlatformError, x: raise BuildFailed()
"-DBUILD_STATIC_LIBS=OFF", ] else: build_command = ['make'] + LIB_OBJECTS p = Popen(build_command, cwd=DIR) p.wait() if p.returncode != 0: raise Exception("Could not build C/C++ code") BuildExt.run(self) jsonnet_ext = Extension( '_jsonnet', sources=MODULE_SOURCES, extra_objects=LIB_OBJECTS, include_dirs=['include', 'third_party/md5', 'third_party/json'], language='c++' ) setup(name='jsonnet', url='https://jsonnet.org', description='Python bindings for Jsonnet - The data templating language ', author='David Cunningham', author_email='*****@*****.**', version=get_version(), cmdclass={ 'build_ext': BuildJsonnetExt, }, ext_modules=[jsonnet_ext], test_suite="python._jsonnet_test",
cythonize = False ext = '.pyx' if cythonize else '.c' # set runtime libraries runtime_library_dirs = [] extra_link_args = [] if platform.system() == 'Linux': runtime_library_dirs.append(libdir) elif platform.system() == 'Darwin': extra_link_args.append('-Wl,-rpath,' + libdir) extensions = [ Extension('pyscipopt.scip', [os.path.join(packagedir, 'scip' + ext)], include_dirs=[includedir], library_dirs=[libdir], libraries=[libname], runtime_library_dirs=runtime_library_dirs, extra_link_args=extra_link_args) ] if cythonize: extensions = cythonize(extensions) setup(name='PySCIPOpt', version='1.1.0', description='Python interface and modeling environment for SCIP', url='https://github.com/SCIP-Interfaces/PySCIPOpt', author='Zuse Institute Berlin', author_email='*****@*****.**', license='MIT', classifiers=[
from setup_utils import custom_build_ext REQUIRED_PACKAGES = ['wrapt'] for tf_package_name in ['tensorflow', 'tensorflow-gpu']: tf_loader = pkgutil.find_loader(tf_package_name) if tf_loader is not None: REQUIRED_PACKAGES.append(tf_package_name) break else: # in case no available package is found, default to 'tensorflow' REQUIRED_PACKAGES.append('tensorflow') tensorflow_nvtx_lib = Extension('nvtx.plugins.tf.lib.nvtx_ops', sources=[ 'nvtx_plugins/cc/nvtx_ops.cc', 'nvtx_plugins/cc/nvtx_kernels.cc', ], undef_macros=["NDEBUG"], extra_compile_args=['-lnvToolsExt'], extra_link_args=['-lnvToolsExt']) # =================== Reading Readme file as TXT files =================== if os.path.exists('README.rst'): # codec is used for consistent encoding long_description = codecs.open( os.path.join(os.path.abspath(os.path.dirname(__file__)), 'README.rst'), 'r', 'utf-8').read() long_description = long_description.replace( "docs/images/", "https://github.com/NVIDIA/nvtx-plugins/raw/master/docs/images/")
''', author='Casey Duncan', author_email='*****@*****.**', url='https://github.com/caseman/noise', classifiers=[ 'Development Status :: 4 - Beta', 'Topic :: Multimedia :: Graphics', 'License :: OSI Approved :: MIT License', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Programming Language :: C', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', ], package_dir={'noise': ''}, packages=['noise'], ext_modules=[ Extension( 'noise._simplex', ['_simplex.c'], extra_compile_args=compile_args, ), Extension( 'noise._perlin', ['_perlin.c'], extra_compile_args=compile_args, ) ], )
return return loggingDir, importLibraryDir commandClasses = dict( build_ext = build_ext, bdist_rpm = bdist_rpm) if sys.platform == "win32": commandClasses["bdist_msi"] = bdist_msi # build utility module if sys.platform == "win32": libraries = ["imagehlp", "Shlwapi"] else: libraries = [] utilModule = Extension("cx_Freeze.util", ["source/util.c"], libraries = libraries) # build base executables docFiles = "README.txt" scripts = ["cxfreeze", "cxfreeze-quickstart"] options = dict(bdist_rpm = dict(doc_files = docFiles), install = dict(optimize = 1)) depends = ["source/bases/Common.c"] console = Extension("cx_Freeze.bases.Console", ["source/bases/Console.c"], depends = depends, libraries = libraries) extensions = [utilModule, console] if sys.platform == "win32": scripts.append("cxfreeze-postinstall") options["bdist_msi"] = dict(install_script = "cxfreeze-postinstall") gui = Extension("cx_Freeze.bases.Win32GUI", ["source/bases/Win32GUI.c"], depends = depends, libraries = libraries + ["user32"])
def __init__(self, *args, **kwargs): self.export_include = kwargs.pop('export_include', []) Extension.__init__(self, *args, **kwargs)
def __init__(self, *args, **kwargs): Extension.__init__(self, *args, **kwargs) self._include_dirs = self.include_dirs del self.include_dirs # restore overwritten property
def build_extensions(self): pyincdir = dsc.get_python_inc(plat_specific=1) pylibdir = os.path.join('/', *pyincdir.split('/')[:-2] + ['lib']) # Include directories needed by .cpp files in extension try: import numpy as np npyincdir = np.get_include() except ImportError: npyincdir = os.path.join( pylibdir.replace('lib/python', 'local/lib/python'), 'numpy', 'core', 'include') print("Unable to import numpy, trying header %s".format(npyincdir)) library_dirs = [pylibdir, '/usr/local/lib'] include_dirs = ['/usr/include', '/usr/local/include', pyincdir, npyincdir] for k in ('CFLAGS', 'LDFLAGS'): if k in os.environ: for match in re.finditer(r'-I([^\s]+)', os.environ[k]): _add_directory(include_dirs, match.group(1)) for match in re.finditer(r'-L([^\s]+)', os.environ[k]): _add_directory(library_dirs, match.group(1)) # include, rpath, if set as environment variables: for k in ('C_INCLUDE_PATH', 'CPATH', 'INCLUDE'): if k in os.environ: for d in os.environ[k].split(os.path.pathsep): _add_directory(include_dirs, d) for k in ('LD_RUN_PATH', 'LIBRARY_PATH', 'LIB'): if k in os.environ: for d in os.environ[k].split(os.path.pathsep): _add_directory(library_dirs, d) prefix = dsc.get_config_var("prefix") if prefix: _add_directory(library_dirs, os.path.join(prefix, "lib")) _add_directory(include_dirs, os.path.join(prefix, "include")) if sys.platform == "darwin": # fink installation directories _add_directory(library_dirs, "/sw/lib") _add_directory(include_dirs, "/sw/include") # darwin ports installation directories _add_directory(library_dirs, "/opt/local/lib") _add_directory(include_dirs, "/opt/local/include") # if Homebrew is installed, use its lib and include directories import subprocess try: prefix = subprocess.check_output( ['brew', '--prefix'] ).strip().decode('latin1') except: # Homebrew not installed prefix = None if prefix: # add Homebrew's include and lib directories _add_directory(library_dirs, os.path.join(prefix, 'lib')) _add_directory(include_dirs, os.path.join(prefix, 'include')) elif sys.platform.startswith("linux"): arch_tp = (plat.processor(), plat.architecture()[0]) if arch_tp == ("x86_64", "32bit"): # 32 bit build on 64 bit machine. _add_directory(library_dirs, "/usr/lib/i386-linux-gnu") else: for platform_ in arch_tp: if not platform_: continue if platform_ in ["x86_64", "64bit"]: _add_directory(library_dirs, "/lib64") _add_directory(library_dirs, "/usr/lib64") _add_directory( library_dirs, "/usr/lib/x86_64-linux-gnu") break elif platform_ in ["i386", "i686", "32bit"]: _add_directory( library_dirs, "/usr/lib/i386-linux-gnu") break elif platform_ in ["aarch64"]: _add_directory(library_dirs, "/usr/lib64") _add_directory( library_dirs, "/usr/lib/aarch64-linux-gnu") break else: raise ValueError( "Unable to identify Linux platform: `%s`" % platform_) self.compiler.library_dirs = library_dirs + self.compiler.library_dirs self.compiler.include_dirs = include_dirs + self.compiler.include_dirs pylib = "python{}".format(sys.version[:3]) if sys.version[:3] == '3.4': pylib += 'm' libs = [pylib] if _find_include_file(self, "jpeglib.h"): if _find_library_file(self, "jpeg"): libs.append('jpeg') else: raise ValueError("Unable to find libjpeg") else: raise ValueError("Unable to find jpeglib.h") if _find_include_file(self, "boost/thread.hpp"): if _find_library_file(self, "boost_thread"): libs.append("boost_thread") elif _find_library_file(self, "boost_thread-mt"): libs.append("boost_thread-mt") else: raise ValueError("Unable to find libboost_thread") if _find_library_file(self, "boost_system"): libs.append("boost_system") elif _find_library_file(self, "boost_system-mt"): libs.append("boost_system-mt") else: raise ValueError("Unable to find libboost_system") else: raise ValueError("Unable to find boost headers") print(libs) iwt = Extension('_ImgWorker', sources=['imgworker.cpp'], include_dirs=self.compiler.include_dirs, library_dirs=self.compiler.library_dirs, libraries=libs) iwt._needs_stub = False exts = [iwt] self.extensions[:] = exts build_ext.build_extensions(self)
print('Readthedocs environment: %s' % (rtd,)) if 'VSC_SCRATCH' in os.environ.keys(): # we are running on the VSC cluster zlibdir = os.environ.get('EBROOTZLIB') # SOFTROOTZLIB libraries = ['z'] library_dirs = [zlibdir + '/lib'] include_dirs = ['.', 'src', npinclude, zlibdir + '/include'] else: libraries = [] library_dirs = [] include_dirs = ['.', 'src', npinclude] oalib_module = Extension('_oalib', sources=sources, include_dirs=include_dirs, library_dirs=library_dirs, libraries=libraries, swig_opts=swig_opts ) compile_options += ['-DNOOMP'] swig_opts += ['-DNOOMP'] oalib_module.extra_compile_args = compile_options if checkZlib(verbose=1): if platform.system() == 'Windows': pass else: zlibflag = '-DUSEZLIB' oalib_module.extra_compile_args += [zlibflag] swig_opts += [zlibflag] oalib_module.extra_link_args += ['-lz']
"Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Development Status :: 4 - Beta", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Scientific/Engineering :: Information Analysis", "Topic :: Scientific/Engineering :: Physics", "Topic :: Scientific/Engineering :: Chemistry", "Topic :: Software Development :: Libraries :: Python Modules" ], ext_modules=[ Extension("pymatgen.optimization.linear_assignment", ["pymatgen/optimization/linear_assignment.c"], extra_link_args=extra_link_args), Extension("pymatgen.util.coord_cython", ["pymatgen/util/coord_cython.c"], extra_link_args=extra_link_args) ], entry_points={ 'console_scripts': [ 'pmg = pymatgen.cli.pmg:main', 'feff_input_generation = pymatgen.cli.feff_input_generation:main', 'feff_plot_cross_section = pymatgen.cli.feff_plot_cross_section:main', 'feff_plot_dos = pymatgen.cli.feff_plot_dos:main', 'gaussian_analyzer = pymatgen.cli.gaussian_analyzer:main', 'get_environment = pymatgen.cli.get_environment:main', ] })
if sys.platform == 'win32': libraries += ['ws2_32'] define_macros += [('FD_SETSIZE', '1024'), ('_WIN32', '1')] def expand(*lst): result = [] for item in lst: for name in sorted(glob(item)): result.append(name) return result CORE = Extension(name='gevent.core', sources=['gevent/gevent.corecext.c'], include_dirs=['libev'] if LIBEV_EMBED else [], libraries=libraries, define_macros=define_macros, depends=expand('gevent/callbacks.*', 'gevent/stathelper.c', 'gevent/libev*.h', 'libev/*.*')) # QQQ libev can also use -lm, however it seems to be added implicitly ARES = Extension(name='gevent.ares', sources=['gevent/gevent.ares.c'], include_dirs=['c-ares'] if CARES_EMBED else [], libraries=libraries, define_macros=define_macros, depends=expand('gevent/dnshelper.c', 'gevent/cares_*.*')) ARES.optional = True def make_universal_header(filename, *defines): defines = [('#define %s ' % define, define) for define in defines]
def __init__(self, *args, **kwargs): self.libraries = [] self.define_macros = [] # Python 2 has this as an old-style class for some reason # so super() doesn't work. _Extension.__init__(self, *args, **kwargs) # pylint:disable=no-member,non-parent-init-called
base_path = os.path.dirname(os.path.abspath(__file__)) # A sdist will have C files, use those: if os.path.exists(os.path.join(base_path, 'pconsc4/parsing/_load_data.c')): use_cython = False else: # It appears we are on git, go ahead and cythonice everything use_cython = True flags = "-O2 -march=native -pipe -mtune=native".split() if use_cython: extensions = [ Extension('pconsc4.parsing._load_data', ['pconsc4/parsing/_load_data.pyx'], include_dirs=[np.get_include()], extra_compile_args=flags, extra_link_args=flags), Extension('pconsc4.parsing._mi_info', ['pconsc4/parsing/_mi_info.pyx'], include_dirs=[np.get_include()], extra_compile_args=flags, extra_link_args=flags) ] else: extensions = [ Extension('pconsc4.parsing._load_data', ['pconsc4/parsing/_load_data.c'], include_dirs=[np.get_include()], extra_compile_args=flags, extra_link_args=flags), Extension('pconsc4.parsing._mi_info', ['pconsc4/parsing/_mi_info.c'],
from os import path from io import open here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup(name="miceapi",author="Amélia O. F. da S.", author_email="*****@*****.**", description="An API for managing multiple simultaneous mice input on Linux", long_description=long_description, long_description_content_type='text/markdown', url="https://github.com/m3101/miceapi", version="1.0.3.dev4", license="MIT", classifiers=[ "Development Status :: 2 - Pre-Alpha", "Programming Language :: C", "License :: OSI Approved :: MIT License", "Operating System :: POSIX :: Linux", "Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator", "Topic :: Software Development :: User Interfaces" ], keywords="mouse mice touchpad multiple simultaneous", python_requires='>=3', py_modules=["miceapi_async"], ext_modules=[Extension("miceapi", ["wrapper.c","../src/miceapi_main.c","../src/miceapi_events.c"]) ])
from distutils.extension import Extension extra = dict() from distutils import sysconfig if sysconfig.get_config_var("LIBM") == "-lm": libraries = ["m"] else: libraries = [] sources = ["ext-date-lib/astro.c", "ext-date-lib/dow.c", "ext-date-lib/parse_date.c", "ext-date-lib/parse_tz.c", "ext-date-lib/timelib.c", "ext-date-lib/tm2unixtime.c", "ext-date-lib/unixtime2tm.c", "timelib.c"] if not os.path.exists("timelib.c"): os.system("cython timelib.pyx") setup(name="timelib", version="0.2.4", description="parse english textual date descriptions", author="Ralf Schmitt", author_email="*****@*****.**", url="https://github.com/pediapress/timelib/", ext_modules=[Extension("timelib", sources=sources, libraries=libraries, define_macros=[("HAVE_STRING_H", 1)])], include_dirs=[".", "ext-date-lib"], long_description=open("README.rst").read(), license="zlib/php", **extra)
def __init__(self, *args, **kwargs): if 'define_macros' in kwargs or 'undef_macros' in kwargs: raise DistutilsOptionError('D does not support macros, so the' ' "define_macros" and "undef_macros" arguments are not' ' supported. Instead, consider using the "Version Condition"' ' and "Debug Condition" conditional compilation features' ' documented at http://www.digitalmars.com/d/version.html' '\n Version flags can be passed to the compiler via the' ' "version_flags" keyword argument to DExtension; debug flags' ' via the "debug_flags" keyword argument. For example, when' ' used with the DMD compiler,' '\n DExtension(..., version_flags=["a", "b"])' '\nwill cause' '\n -version=a -version=b' '\nto be passed to the compiler.' ) # If the user has requested any version_flags or debug_flags, we use # the distutils 'define_macros' keyword argument to carry them (they're # later unpacked in the dcompiler module). define_macros = [] if 'version_flags' in kwargs or 'debug_flags' in kwargs: if 'version_flags' in kwargs: for flag in kwargs['version_flags']: define_macros.append((flag, 'version')) del kwargs['version_flags'] if 'debug_flags' in kwargs: for flag in kwargs['debug_flags']: define_macros.append((flag, 'debug')) del kwargs['debug_flags'] # Pass in the extension name so the compiler class can know it if 'name' in kwargs: define_macros.append((kwargs['name'], 'name')) elif len(args) > 0: define_macros.append((args[0], 'name')) # Pass in the 'tango' flag, also with_tango = kwargs.pop('tango', False) if with_tango: define_macros.append(('Pyd_with_Tango', 'version')) kwargs['define_macros'] = define_macros # Similarly, pass in with_pyd, &c, via define_macros. if 'raw_only' in kwargs: kwargs['with_pyd'] = False kwargs['with_st'] = False kwargs['with_meta'] = False kwargs['with_main'] = False del kwargs['raw_only'] with_pyd = kwargs.pop('with_pyd', True) with_st = kwargs.pop('with_st', False) # 5/23/07 st off by default. # StackThreads doesn't work with Tango at the moment. if with_tango: with_st = False with_meta = kwargs.pop('with_meta', True) with_main = kwargs.pop('with_main', True) if with_pyd and not with_meta: raise DistutilsOptionError( 'Cannot specify with_meta=False while using Pyd. Specify' ' raw_only=True or with_pyd=False if you want to compile a raw Python/C' ' extension.' ) if with_main and not with_pyd: # The special PydMain function should only be used when using Pyd with_main = False define_macros.append(((with_pyd, with_st, with_meta, with_main), 'aux')) std_Extension.__init__(self, *args, **kwargs)
# # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from setuptools import find_packages, setup, Extension import sys assert sys.version_info >= (3,), 'Python 3 is required' VERSION = '1.6.0' autopilot_tracepoint = Extension( 'autopilot.tracepoint', libraries=['lttng-ust'], include_dirs=['lttng_module'], sources=['lttng_module/autopilot_tracepoint.c'] ) setup( name='autopilot', version=VERSION, description='Functional testing tool for Ubuntu.', author='Thomi Richards', author_email='*****@*****.**', url='https://launchpad.net/autopilot', license='GPLv3', packages=find_packages(), test_suite='autopilot.tests', scripts=['bin/autopilot3-sandbox-run'], ext_modules=[autopilot_tracepoint],
def __init__(self, name): # don't invoke the original build_ext for this special extension Extension.__init__(self, name, sources=[])
try: from setuptools import setup, Extension except ImportError: from distutils.core import setup, Extension import sys, imp, os, glob, io def version(): module = imp.load_source("hiredis.version", "hiredis/version.py") return module.__version__ ext = Extension( "hiredis.hiredis", sources=sorted( glob.glob("src/*.c") + ["vendor/hiredis/%s.c" % src for src in ("alloc", "read", "sds")]), include_dirs=["vendor"]) setup( name="hiredis", version=version(), description="Python wrapper for hiredis", long_description=io.open('README.md', 'rt', encoding='utf-8').read(), long_description_content_type='text/markdown', url="https://github.com/redis/hiredis-py", author="Jan-Erik Rediger, Pieter Noordhuis", author_email="[email protected], [email protected]", keywords=["Redis"], license="BSD", packages=["hiredis"],
def __init__(self, name, sourcedir=''): Extension.__init__(self, name, sources=[]) self.sourcedir = os.path.abspath(sourcedir)
from setuptools import find_packages, Extension, setup extensions = [ Extension('offline_judge._checker', sources=['offline_judge/_checker.c']) ] with open('README.md') as f: readme = f.read() setup( name='offline_judge', version='1.0.4', entry_points={ 'console_scripts': [ 'judge = offline_judge.judge:main', ], }, ext_modules=extensions, author='Evan Zhang, jw4js', install_requires=['termcolor'], description='An offline equivalent of an online judge.', long_description=readme, long_description_content_type="text/markdown", url='https://github.com/Ninjaclasher/offline_judge', packages=find_packages(), classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Operating System :: POSIX :: Linux',