def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from numpy.distutils.system_info import get_info from distutils.sysconfig import get_python_inc config = Configuration('spatial_016', parent_package, top_path) inc_dirs = [get_python_inc()] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.append(get_numpy_include_dirs()) ckdtree_src = ['ckdtree_query.cxx', 'ckdtree_globals.cxx', 'ckdtree_cpp_exc.cxx'] ckdtree_src = [join('ckdtree', 'src', x) for x in ckdtree_src] ckdtree_headers = ['ckdtree_decl.h', 'ckdtree_exc.h', 'ckdtree_methods.h', 'ckdtree_utils.h'] ckdtree_headers = [join('ckdtree', 'src', x) for x in ckdtree_headers] ckdtree_dep = ['ckdtree.cxx'] + ckdtree_headers + ckdtree_src config.add_extension('ckdtree', sources=[join('ckdtree', 'ckdtree.cxx')] + ckdtree_src, depends=ckdtree_dep, include_dirs=inc_dirs + [join('ckdtree','src')]) return config
def compile_src(self, extra_cflags=None): self.args = [os.environ.get("CC", "gcc")] self.args += ["-o", self.modfile] print self.modfile self.args += ["-I" + sc.get_python_inc(), "-I" + sc.get_python_inc(plat_specific=True)] self.args += sc.get_config_var("CFLAGS").split() self.args += ["-Wall", "-Werror"] # during testing # on some builds of Python, CFLAGS does not contain -fPIC, but it # appears to still be necessary: self.args += ["-fPIC"] self.args += ["-shared"] # not sure why this is necessary if extra_cflags: self.args += extra_cflags self.args += [self.srcfile] print (self.args) env = dict(os.environ) env["LC_ALL"] = "C" # Invoke the compiler: self.p = Popen(self.args, env=env, stdout=PIPE, stderr=PIPE) self.out, self.err = self.p.communicate() if six.PY3: self.out = self.out.decode() self.err = self.err.decode() c = self.p.wait() print "c", c if c != 0: raise CompilationError(self) assert os.path.exists(self.modfile) print (self.modfile)
def compile_src(self, extra_cflags = None): self.args = [os.environ.get('CC', 'gcc')] self.args += ['-o', self.modfile] self.args += ['-I' + sc.get_python_inc(), '-I' + sc.get_python_inc(plat_specific=True)] self.args += sc.get_config_var('CFLAGS').split() self.args += ['-Wall', '-Werror'] # during testing # on some builds of Python, CFLAGS does not contain -fPIC, but it # appears to still be necessary: self.args += ['-fPIC'] self.args += ['-shared'] # not sure why this is necessary if extra_cflags: self.args += extra_cflags self.args += [self.srcfile] # print(self.args) env = dict(os.environ) env['LC_ALL'] = 'C' # Invoke the compiler: self.p = Popen(self.args, env=env, stdout=PIPE, stderr=PIPE) self.out, self.err = self.p.communicate() if six.PY3: self.out = self.out.decode() self.err = self.err.decode() c = self.p.wait() if c != 0: raise CompilationError(self) assert os.path.exists(self.modfile)
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from numpy.distutils.system_info import get_info from distutils.sysconfig import get_python_inc config = Configuration('spatial', parent_package, top_path) config.add_data_dir('tests') qhull_src = ['geom2.c', 'geom.c', 'global.c', 'io.c', 'libqhull.c', 'mem.c', 'merge.c', 'poly2.c', 'poly.c', 'qset.c', 'random.c', 'rboxlib.c', 'stat.c', 'user.c', 'usermem.c', 'userprintf.c', 'userprintf_rbox.c'] qhull_src = [join('qhull', 'src', x) for x in qhull_src] inc_dirs = [get_python_inc()] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.append(get_numpy_include_dirs()) cfg = dict(get_info('lapack_opt')) cfg.setdefault('include_dirs', []).extend(inc_dirs) cfg.setdefault('define_macros', []).append(('qh_QHpointer','1')) config.add_extension('qhull', sources=['qhull.c'] + qhull_src, **cfg) config.add_extension('ckdtree', sources=['ckdtree.c']) # FIXME: cython config.add_extension('_distance_wrap', sources=[join('src', 'distance_wrap.c')], depends=[join('src', 'distance_impl.h')], include_dirs=[get_numpy_include_dirs()]) return config
def init_env(): from setup.build_environment import msvc, is64bit, win_inc, win_lib, NMAKE from distutils import sysconfig linker = None if isunix: cc = os.environ.get('CC', 'gcc') cxx = os.environ.get('CXX', 'g++') debug = '' # debug = '-ggdb' cflags = os.environ.get('OVERRIDE_CFLAGS', '-Wall -DNDEBUG %s -fno-strict-aliasing -pipe' % debug) cflags = shlex.split(cflags) + ['-fPIC'] ldflags = os.environ.get('OVERRIDE_LDFLAGS', '-Wall') ldflags = shlex.split(ldflags) cflags += shlex.split(os.environ.get('CFLAGS', '')) ldflags += shlex.split(os.environ.get('LDFLAGS', '')) cflags += ['-fvisibility=hidden'] if islinux: cflags.append('-pthread') ldflags.append('-shared') cflags.append('-I'+sysconfig.get_python_inc()) ldflags.append('-lpython'+sysconfig.get_python_version()) if isbsd: cflags.append('-pthread') ldflags.append('-shared') cflags.append('-I'+sysconfig.get_python_inc()) ldflags.append('-lpython'+sysconfig.get_python_version()) if ishaiku: cflags.append('-lpthread') ldflags.append('-shared') cflags.append('-I'+sysconfig.get_python_inc()) ldflags.append('-lpython'+sysconfig.get_python_version()) if isosx: cflags.append('-D_OSX') ldflags.extend('-bundle -undefined dynamic_lookup'.split()) cflags.extend(['-fno-common', '-dynamic']) cflags.append('-I'+sysconfig.get_python_inc()) if iswindows: cc = cxx = msvc.cc cflags = '/c /nologo /MD /W3 /EHsc /DNDEBUG'.split() ldflags = '/DLL /nologo /INCREMENTAL:NO /NODEFAULTLIB:libcmt.lib'.split() # cflags = '/c /nologo /Ox /MD /W3 /EHsc /Zi'.split() # ldflags = '/DLL /nologo /INCREMENTAL:NO /DEBUG'.split() if is64bit: cflags.append('/GS-') for p in win_inc: cflags.append('-I'+p) for p in win_lib: ldflags.append('/LIBPATH:'+p) cflags.append('-I%s'%sysconfig.get_python_inc()) ldflags.append('/LIBPATH:'+os.path.join(sysconfig.PREFIX, 'libs')) linker = msvc.linker return namedtuple('Environment', 'cc cxx cflags ldflags linker make')( cc=cc, cxx=cxx, cflags=cflags, ldflags=ldflags, linker=linker, make=NMAKE if iswindows else 'make')
def get_gamera_include_dirs(): # the "usr/local" prefix is for recent Ubunut versions, which # install addon modules no longer along with the python core return [os.path.join(get_python_inc(), "gamera"), os.path.join(get_python_inc(prefix="/usr/local"), "gamera"), os.path.join(get_python_inc(), "../gamera"), "/usr/include/gamera"]
def build(extensions, arch='x86_64', global_macros=None, global_includes=None, global_lib_dirs=None): """extensions should be an array of dicts containing: { 'name' : 'mylib.mymodule', 'sources' : [ 'path/to/source1.cpp', 'path/to/source2.f90', 'path/to/source3.pyx', 'path/to/source4.c' ], # optional: 'include_dirs' : ['paths'], 'define_macros' : [("MACRO_NAME", "VALUE")], # or just ("MACRO_NAME",) but remember the ,! 'link_to' : ['gmp'] # passes linker the -lgmp argument. } if global_macros is provided, and 'define_macros' and 'include_dirs' is missing for all extensions, common sources will only be built once, and linked multiple times. note, you may still declare global_macros and global_includes. note also, that the arch argument is now only used to determine temp directory names. """ if global_includes == None: global_includes = [get_python_inc()] else: global_includes = [get_python_inc()] + global_includes if global_lib_dirs == None: global_lib_dirs = [] if (len(extensions) > 1 and all(['define_macros' not in e and 'include_dirs' not in e and 'link_to' not in e for e in extensions])): _common_build(extensions, global_macros, global_includes, global_lib_dirs, arch) else: for e in extensions: e['arch'] = arch _seperate_build(e, global_macros, global_includes, global_lib_dirs)
def test_get_python_inc(self): # The check for srcdir is copied from Python's setup.py, # and is necessary to make this test pass when building # Python in a directory other than the source directory. (srcdir,) = sysconfig.get_config_vars('srcdir') if not srcdir: inc_dir = sysconfig.get_python_inc() else: # This test is not really a proper test: when building # Python from source, even in the same directory, # we won't be testing the same thing as when running # distutils' tests on an installed Python. Nevertheless, # let's try to do our best: if we are running Python's # unittests from a build directory that is not the source # directory, the normal inc_dir will exist, it will just not # contain anything of interest. inc_dir = sysconfig.get_python_inc() self.assert_(os.path.isdir(inc_dir)) # Now test the source location, to make sure Python.h does # exist. inc_dir = os.path.join(os.getcwd(), srcdir, 'Include') inc_dir = os.path.normpath(inc_dir) self.assert_(os.path.isdir(inc_dir), inc_dir) python_h = os.path.join(inc_dir, "Python.h") self.assert_(os.path.isfile(python_h), python_h)
def set_platform_directories(): """Initialise the global variables relating to platform specific directories. """ global plat_py_site_dir, plat_py_inc_dir, plat_py_conf_inc_dir global plat_bin_dir, plat_py_lib_dir, plat_sip_dir # We trust distutils for some stuff. plat_py_site_dir = sysconfig.get_python_lib(plat_specific=1) plat_py_inc_dir = sysconfig.get_python_inc() plat_py_conf_inc_dir = os.path.dirname(sysconfig.get_config_h_filename()) if sys.platform == "win32": if sysconfig.python_build: try: plat_py_lib_dir = sysconfig.project_base + '\\PCBuild' except AttributeError: plat_py_lib_dir = sysconfig.get_python_inc() + '\\..\\pcbuild' else: plat_py_lib_dir = sys.prefix + "\\libs" plat_bin_dir = sys.exec_prefix plat_sip_dir = sys.prefix + "\\sip" else: lib_dir = sysconfig.get_python_lib(plat_specific=1, standard_lib=1) plat_py_lib_dir = lib_dir + "/config" plat_bin_dir = sys.exec_prefix + "/bin" plat_sip_dir = sys.prefix + "/share/sip"
def verifyPythonDevel(): import distutils.sysconfig as c if not os.path.exists(os.path.join(c.get_python_inc(), 'Python.h')): try: installPackage(yum_pkg_name="python-devel", apt_pkg_name="python-dev") except: pass if not os.path.exists(os.path.join(c.get_python_inc(), 'Python.h')): log.error("Python development not installed, nor can we use the local package manager to do so")
def finalize_options (self): from distutils import sysconfig self.set_undefined_options ('build', ('build_lib', 'build_lib'), ('build_temp', 'build_temp'), ('compiler', 'compiler'), ('debug', 'debug'), ('force', 'force')) if self.package is None: self.package = self.distribution.ext_package self.extensions = self.distribution.ext_modules # Make sure Python's include directories (for Python.h, config.h, # etc.) are in the include search path. py_include = sysconfig.get_python_inc() plat_py_include = sysconfig.get_python_inc(plat_specific=1) if self.include_dirs is None: self.include_dirs = self.distribution.include_dirs or [] if type (self.include_dirs) is StringType: self.include_dirs = string.split (self.include_dirs, os.pathsep) # Put the Python "system" include dir at the end, so that # any local include dirs take precedence. self.include_dirs.append (py_include) if plat_py_include != py_include: self.include_dirs.append (plat_py_include) if type (self.libraries) is StringType: self.libraries = [self.libraries] # Life is easier if we're not forever checking for None, so # simplify these options to empty lists if unset if self.libraries is None: self.libraries = [] if self.library_dirs is None: self.library_dirs = [] if self.rpath is None: self.rpath = [] # for extensions under windows use different directories # for Release and Debug builds. # also Python's library directory must be appended to library_dirs if os.name == 'nt': self.library_dirs.append (os.path.join(sys.exec_prefix, 'libs')) self.implib_dir = self.build_temp if self.debug: self.build_temp = os.path.join (self.build_temp, "Debug") else: self.build_temp = os.path.join (self.build_temp, "Release")
def test_embeddable(self): import subprocess import ctypes from distutils import ccompiler, sysconfig with open('embed_pil.c', 'w') as fh: fh.write(""" #include "Python.h" int main(int argc, char* argv[]) { char *home = "%s"; #if PY_MAJOR_VERSION >= 3 wchar_t *whome = Py_DecodeLocale(home, NULL); Py_SetPythonHome(whome); #else Py_SetPythonHome(home); #endif Py_InitializeEx(0); Py_DECREF(PyImport_ImportModule("PIL.Image")); Py_Finalize(); Py_InitializeEx(0); Py_DECREF(PyImport_ImportModule("PIL.Image")); Py_Finalize(); #if PY_MAJOR_VERSION >= 3 PyMem_RawFree(whome); #endif return 0; } """ % sys.prefix.replace('\\', '\\\\')) compiler = ccompiler.new_compiler() compiler.add_include_dir(sysconfig.get_python_inc()) libdir = (sysconfig.get_config_var('LIBDIR') or sysconfig.get_python_inc().replace('include', 'libs')) print(libdir) compiler.add_library_dir(libdir) objects = compiler.compile(['embed_pil.c']) compiler.link_executable(objects, 'embed_pil') env = os.environ.copy() env["PATH"] = sys.prefix + ';' + env["PATH"] # do not display the Windows Error Reporting dialog ctypes.windll.kernel32.SetErrorMode(0x0002) process = subprocess.Popen(['embed_pil.exe'], env=env) process.communicate() self.assertEqual(process.returncode, 0)
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from numpy.distutils.misc_util import get_info as get_misc_info from numpy.distutils.system_info import get_info as get_sys_info from distutils.sysconfig import get_python_inc config = Configuration('spatial', parent_package, top_path) config.add_data_dir('tests') # qhull qhull_src = ['geom2.c', 'geom.c', 'global.c', 'io.c', 'libqhull.c', 'mem.c', 'merge.c', 'poly2.c', 'poly.c', 'qset.c', 'random.c', 'rboxlib.c', 'stat.c', 'user.c', 'usermem.c', 'userprintf.c', 'userprintf_rbox.c'] qhull_src = [join('qhull', 'src', x) for x in qhull_src] inc_dirs = [get_python_inc()] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.append(get_numpy_include_dirs()) cfg = dict(get_sys_info('lapack_opt')) cfg.setdefault('include_dirs', []).extend(inc_dirs) cfg.setdefault('define_macros', []).append(('qh_QHpointer','1')) config.add_extension('qhull', sources=['qhull.c'] + qhull_src, **cfg) # cKDTree ckdtree_src = ['ckdtree_query.cxx', 'ckdtree_globals.cxx', 'ckdtree_cpp_exc.cxx'] ckdtree_src = [join('ckdtree', 'src', x) for x in ckdtree_src] ckdtree_headers = ['ckdtree_decl.h', 'ckdtree_exc.h', 'ckdtree_methods.h', 'ckdtree_utils.h'] ckdtree_headers = [join('ckdtree', 'src', x) for x in ckdtree_headers] ckdtree_dep = ['ckdtree.cxx'] + ckdtree_headers + ckdtree_src config.add_extension('ckdtree', sources=[join('ckdtree', 'ckdtree.cxx')] + ckdtree_src, depends=ckdtree_dep, include_dirs=inc_dirs + [join('ckdtree','src')]) # _distance_wrap config.add_extension('_distance_wrap', sources=[join('src', 'distance_wrap.c')], depends=[join('src', 'distance_impl.h')], include_dirs=[get_numpy_include_dirs()], extra_info=get_misc_info("npymath")) return config
def finalize_options(self): from distutils import sysconfig self.set_undefined_options('build', ('build_qext', 'build_lib'), ('build_temp', 'build_temp'), ('compiler', 'compiler'), ('debug', 'debug'), ('force', 'force'), ('plat_name', 'plat_name'), ) self.extensions = self.distribution.qext_modules # TODO: Don't add python stuff to q extentions that don't need it # Make sure Python's include directories (for Python.h, pyconfig.h, # etc.) are in the include search path. py_include = sysconfig.get_python_inc() plat_py_include = sysconfig.get_python_inc(plat_specific=1) if self.include_dirs is None: self.include_dirs = self.distribution.include_dirs or [] if isinstance(self.include_dirs, str): self.include_dirs = self.include_dirs.split(os.pathsep) # Put the Python "system" include dir at the end, so that # any local include dirs take precedence. self.include_dirs.append(py_include) if plat_py_include != py_include: self.include_dirs.append(plat_py_include) self.ensure_string_list('libraries') # Life is easier if we're not forever checking for None, so # simplify these options to empty lists if unset if self.libraries is None: self.libraries = [] if self.library_dirs is None: self.library_dirs = [] elif isinstance(self.library_dirs, str): self.library_dirs = self.library_dirs.split(os.pathsep) if self.rpath is None: self.rpath = [] elif isinstance(self.rpath, str): self.rpath = self.rpath.split(os.pathsep) if self.define: defines = [dfn.split(':') for dfn in self.define.split(',')] self.define = [(dfn if len(dfn) == 2 else dfn + ['1']) for dfn in defines] else: self.define = [] if self.undef: self.undef = self.undef.split(',')
def __init__(self): self.platform=sys.platform self.version=sys.hexversion>>8 self.inc_dir=sysconfig.get_python_inc() self.venv_inc_dir=sysconfig.get_python_inc(prefix=sys.prefix) self.module_dir=sysconfig.get_python_lib(plat_specific=1) if sys.platform=='win32': self.data_dir=sys.prefix self.lib_dir=sys.prefix+'\\libs' else: self.data_dir=sys.prefix+'/share' self.lib_dir=sys.prefix+'/lib'
def _find_python(self): "Find the appropriate options for creating SWIG-based Python modules" self.python_includes = [] self.python_libdir = "" try: from distutils import sysconfig inc = sysconfig.get_python_inc() plat = sysconfig.get_python_inc(plat_specific=1) self.python_includes.append(inc) if inc != plat: self.python_includes.append(plat) self.python_libdir = self.apath(sysconfig.PREFIX, "libs") except ImportError: pass
def cmake_build(self): src_dir = os.path.dirname(os.path.realpath(__file__)) cmake_exe = find_executable("cmake") if not cmake_exe: raise EnvironmentError("Could not find cmake executable") py_version = "{}.{}".format(sys.version_info[0], sys.version_info[1]) cmake_cmd = [cmake_exe, src_dir, "-DCMAKE_BUILD_TYPE=Release", "-DMETA_STATIC_UTF=On", "-DBUILD_STATIC_ICU=On"] cmake_cmd.append("-DPYTHON_INCLUDE_DIRS={}".format(sysconfig.get_python_inc())) if platform.system() == 'Windows': libpython = "libpython{}{}.a".format(sys.version_info[0], sys.version_info[1]) libpython_path = os.path.join(sysconfig.get_python_inc(), '..', 'libs', libpython) cmake_cmd.append("-DPYTHON_LIBRARY={}".format(libpython_path)) if self.icu_root: cmake_cmd.extend(["-DICU_ROOT={}".format(self.icu_root)]) if self.generator: cmake_cmd.extend(["-G{}".format(self.generator)]) with tempdir() as dirpath: print("Build directory: {}".format(os.getcwd())) if subprocess.call(cmake_cmd) != 0: raise EnvironmentError("CMake invocation failed") if subprocess.call([cmake_exe, "--build", "."]) != 0: raise EnvironmentError("CMake build failed") if subprocess.call([cmake_exe, "--build", ".", "--target", "install"]) != 0: raise EnvironmentError("CMake install failed") # Make dummy __init__.py initpy = os.path.join(src_dir, "dist", "metapy", "__init__.py") with open(initpy, "w") as f: f.write("from .metapy import *\n") f.write('__version__ = "{}"\n'.format(VERSION)) # Copy over extra DLLs on Windows if platform.system() == 'Windows': dlls = ['libwinpthread-1.dll', 'libgcc_s_seh-1.dll', 'libstdc++-6.dll', 'zlib1.dll'] for dll in dlls: shutil.copyfile(os.path.join("c:", os.sep, "msys64", "mingw64", "bin", dll), os.path.join(src_dir, "dist", "metapy", dll))
def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from numpy.distutils.system_info import get_info from distutils.sysconfig import get_python_inc config = Configuration("spatial", parent_package, top_path) config.add_data_dir("tests") config.add_data_dir("benchmarks") qhull_src = [ "geom2.c", "geom.c", "global.c", "io.c", "libqhull.c", "mem.c", "merge.c", "poly2.c", "poly.c", "qset.c", "random.c", "rboxlib.c", "stat.c", "user.c", "usermem.c", "userprintf.c", "userprintf_rbox.c", ] qhull_src = [join("qhull", "src", x) for x in qhull_src] inc_dirs = [get_python_inc()] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.append(get_numpy_include_dirs()) cfg = dict(get_info("lapack_opt")) cfg.setdefault("include_dirs", []).extend(inc_dirs) cfg.setdefault("define_macros", []).append(("qh_QHpointer", "1")) config.add_extension("qhull", sources=["qhull.c"] + qhull_src, **cfg) config.add_extension("ckdtree", sources=["ckdtree.c"]) # FIXME: cython config.add_extension( "_distance_wrap", sources=[join("src", "distance_wrap.c")], include_dirs=[get_numpy_include_dirs()] ) return config
def make_includes(): if include['python'] == '': include['python'] = get_python_inc() if include['cython'] == '': include['cython'] = get_python_lib() + '/Cython/Includes/' if include['numpy'] == '': include['numpy'] = np.get_include()
def load(self): # os.path.join(os.path.dirname(os.path.realpath(__file__)), "jitter") lib_dir = os.path.dirname(os.path.realpath(__file__)) libs = [] libs.append(os.path.join(lib_dir, 'arch/JitCore_%s.so' % (self.ir_arch.arch.name))) libs = ';'.join(libs) jittcc_path = Jittcc.__file__ include_dir = os.path.dirname(jittcc_path) include_dir += ";" + os.path.join(include_dir, "arch") # print include_dir # XXX HACK # As debian/ubuntu have moved some include files using arch directory, # TCC doesn't know them, so we get the info from GCC # For example /usr/include/x86_64-linux-gnu which contains limits.h p = Popen(["cc", "-Wp,-v", "-E", "-"], stdout=PIPE, stderr=PIPE, stdin=PIPE) p.stdin.close() include_files = p.stderr.read().split('\n') include_files = [x[1:] for x in include_files if x.startswith(' /usr/include')] include_files += [include_dir, get_python_inc()] include_files = ";".join(include_files) Jittcc.tcc_set_emul_lib_path(include_files, libs)
def set_platform_directories(): """Initialise the global variables relating to platform specific directories. """ global plat_py_site_dir, plat_py_inc_dir, plat_py_conf_inc_dir global plat_bin_dir, plat_py_lib_dir, plat_sip_dir # We trust distutils for some stuff. if os.environ.get("CROSS_COMPILE") == "yes": ketlaer = os.environ.get("KETLAER") plat_py_site_dir = ketlaer + "/lib/python2.6/site-packages" plat_py_inc_dir = ketlaer + "/include/python2.6" plat_py_conf_inc_dir = ketlaer + "/include/python2.6" else: plat_py_site_dir = sysconfig.get_python_lib(plat_specific=1) plat_py_inc_dir = sysconfig.get_python_inc() plat_py_conf_inc_dir = os.path.dirname(sysconfig.get_config_h_filename()) if sys.platform == "win32": plat_py_lib_dir = sys.prefix + "\\libs" plat_bin_dir = sys.exec_prefix plat_sip_dir = sys.prefix + "\\sip" else: if os.environ.get("CROSS_COMPILE") == "yes": lib_dir = ketlaer + "/python/lib/python2.6" else: lib_dir = sysconfig.get_python_lib(plat_specific=1, standard_lib=1) plat_py_lib_dir = lib_dir + "/config" plat_bin_dir = sys.exec_prefix + "/bin" plat_sip_dir = sys.prefix + "/share/sip"
def set_configuration(env, use_distutils): """Set construction variables which are platform dependants. If use_distutils == True, use distutils configuration. Otherwise, use 'sensible' default. Any variable already defined is untouched.""" # We define commands as strings so that we can either execute them using # eval (same python for scons and distutils) or by executing them through # the shell. dist_cfg = {'PYEXTCC': ("sysconfig.get_config_var('CC')", False), 'PYEXTCFLAGS': ("sysconfig.get_config_var('CFLAGS')", True), 'PYEXTCCSHARED': ("sysconfig.get_config_var('CCSHARED')", False), 'PYEXTLINKFLAGS': ("sysconfig.get_config_var('LDFLAGS')", True), 'PYEXTLINK': ("sysconfig.get_config_var('LDSHARED')", False), 'PYEXTINCPATH': ("sysconfig.get_python_inc()", False), 'PYEXTSUFFIX': ("sysconfig.get_config_var('SO')", False)} from distutils import sysconfig # We set the python path even when not using distutils, because we rarely # want to change this, even if not using distutils ifnotset(env, 'PYEXTINCPATH', sysconfig.get_python_inc()) if use_distutils: for k, (v, should_split) in dist_cfg.items(): val = eval(v) if should_split: val = val.split() ifnotset(env, k, val) else: _set_configuration_nodistutils(env)
def _compile(self, code, f_code, kind): cname = self._cname(code, f_code, kind) soname = self._soname(code, f_code, kind) os.stat(cname) # raises FileNotFoundError if doesn't exist _, err, status = gcc( Flag.O(3), Flag.I(get_python_inc()), Flag.f('PIC'), Flag.std('gnu11'), Flag.shared, Flag.o(soname), *(cname,) + self._extra_compile_args ) if not self._keep_c: os.remove(cname) if err: if self._error_pattern.findall(err): raise CompilationError(err) else: warn(CompilationWarning(err)) f = create_callable(soname) if not self._keep_so: os.remove(soname) return f
def build_dlib(): """use cmake to build and install the extension """ if cmake_path is None: raise DistutilsSetupError("Cannot find cmake in the path. Please specify its path with --cmake parameter.") platform_arch = platform.architecture()[0] log.info("Detected Python architecture: %s" % platform_arch) # make sure build artifacts are generated for the version of Python currently running cmake_extra_arch = [] if sys.version_info >= (3, 0): cmake_extra_arch += ["-DPYTHON3=yes"] if platform_arch == "64bit" and sys.platform == "win32": # 64bit build on Windows if not generator_set: # see if we can deduce the 64bit default generator cmake_extra_arch += get_msvc_win64_generator() # help cmake to find Python library in 64bit Python in Windows # because cmake is 32bit and cannot find PYTHON_LIBRARY from registry. inc_dir = get_python_inc() cmake_extra_arch += ["-DPYTHON_INCLUDE_DIR={inc}".format(inc=inc_dir)] # this imitates cmake in path resolution py_ver = get_python_version() for ext in [py_ver.replace(".", "") + ".lib", py_ver + "mu.lib", py_ver + "m.lib", py_ver + "u.lib"]: py_lib = os.path.abspath(os.path.join(inc_dir, "../libs/", "python" + ext)) if os.path.exists(py_lib): cmake_extra_arch += ["-DPYTHON_LIBRARY={lib}".format(lib=py_lib)] break build_dir = os.path.join(script_dir, "./tools/python/build") if os.path.exists(build_dir): log.info("Removing build directory %s" % build_dir) rmtree(build_dir) try: os.makedirs(build_dir) except OSError: pass # cd build os.chdir(build_dir) log.info("Configuring cmake ...") cmake_cmd = [cmake_path, ".."] + cmake_extra + cmake_extra_arch if run_process(cmake_cmd): raise DistutilsSetupError("cmake configuration failed!") log.info("Build using cmake ...") cmake_cmd = [cmake_path, "--build", ".", "--config", cmake_config, "--target", "install"] if run_process(cmake_cmd): raise DistutilsSetupError("cmake build failed!") # cd back where setup awaits os.chdir(script_dir)
def run(self): # work around bug in Python 2.2-supplied check_header, fixed # in Python 2.3; body needs to be a valid, non-zero-length string if self.try_cpp(body="/* body */", headers=['unicodeobject.h'], include_dirs=[sysconfig.get_python_inc()]): build = self.distribution.reinitialize_command('build_ext') build.define = 'HAVE_UNICODEOBJECT_H'
def configure(): """ Look up the dependencies - Python.h - numpy headers - boost headers (from configuration flag) meanwhile, pass through any config-looking arguments (ex: --boost=...) """ # Python.h python_inc = sysconfig.get_python_inc() # Libpython python_libdir = join(sysconfig.get_python_lib(standard_lib=True), "config") python_lib = "python" + sysconfig.get_python_version() if '--version' in sys.argv: return "--version" # numpy headers try: import numpy numpy_inc = numpy.get_include() except ImportError: die("Requires numpy >= 1.6.0") configArgs = [] for arg in sys.argv[:]: if arg.startswith("-") and arg not in ["--help", "--version"]: configArgs.append(arg) sys.argv.remove(arg) configArgs.append("--python-include=%s " % python_inc) configArgs.append("--numpy-include=%s " % numpy_inc) return " ".join(configArgs)
def compile(self): # Unpack SolverInstaller.untar(os.path.join(self.base_dir, self.archive_name), self.extract_path) # Build lingeling SolverInstaller.run("bash ./contrib/setup-lingeling.sh", directory=self.extract_path) # Build Btor SolverInstaller.run("bash ./contrib/setup-btor2tools.sh", directory=self.extract_path) # Inject Python library and include paths into CMake because Boolector # search system can be fooled in some systems import distutils.sysconfig as sysconfig PYTHON_LIBRARY = sysconfig.get_config_var('LIBDIR') PYTHON_INCLUDE_DIR = sysconfig.get_python_inc() CMAKE_OPTS = ' -DPYTHON_LIBRARY=' + PYTHON_LIBRARY CMAKE_OPTS += ' -DPYTHON_INCLUDE_DIR=' + PYTHON_INCLUDE_DIR # Build Boolector Solver SolverInstaller.run("bash ./configure.sh --python", directory=self.extract_path, env_variables={"CMAKE_OPTS": CMAKE_OPTS}) SolverInstaller.run("make -j2", directory=os.path.join(self.extract_path, "build"))
def generate(env): from distutils import sysconfig env.SetDefault( SWIGPY_APPEND_CPPPATH = [ sysconfig.get_python_inc() ] , SWIGPY_SHLIBPREFIX = '_' , SWIGPY_APPEND_SWIGFLAGS = [ '-python', '-builtin' ]) env.AddMethod(_SwigPyModule, 'SwigPyModule')
def build_extension(self, ext): extdir = os.path.abspath(os.path.dirname( self.get_ext_fullpath(ext.name))) python_library = str(get_config_var('LIBDIR')) python_include_directory = str(get_python_inc()) cmake_args = ['-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=' + extdir, '-DPYTHON_EXECUTABLE=' + sys.executable, '-DPYTHON_INCLUDE_DIR=' + python_include_directory, ] cfg = 'Debug' if self.debug else 'Release' build_args = ['--config', cfg] if platform.system() == "Windows": cmake_args += [ '-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{}={}'.format(cfg.upper(), extdir)] if sys.maxsize > 2**32: cmake_args += ['-A', 'x64'] build_args += ['--', '/m'] else: cmake_args += ['-DCMAKE_BUILD_TYPE=' + cfg] build_args += ['--', '-j2'] cmake_args += CMAKE_ADDITIONAL_OPT env = os.environ.copy() env['CXXFLAGS'] = '{} -DVERSION_INFO=\\"{}\\"'.format(env.get('CXXFLAGS', ''), self.distribution.get_version()) if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) subprocess.check_call(['cmake', ext.sourcedir] + cmake_args, cwd=self.build_temp, env=env) subprocess.check_call(['cmake', '--build', '.'] + build_args, cwd=self.build_temp)
def run(self): if cmake_build.built: return cmake_build.built = True if not os.path.exists(CMAKE_BUILD_DIR): os.makedirs(CMAKE_BUILD_DIR) with cd(CMAKE_BUILD_DIR): # configure cmake_args = [ find_executable('cmake'), '-DBUILD_SHARED_LIBS=OFF', '-DPYTHON_EXECUTABLE:FILEPATH={}'.format(sys.executable), '-DPYTHON_INCLUDE_DIR={}'.format(sysconfig.get_python_inc()), '-DBUILD_TEST=OFF', '-DBUILD_BENCHMARK=OFF', '-DBUILD_BINARY=OFF', ] if NINJA: cmake_args.extend(['-G', 'Ninja']) if 'CMAKE_ARGS' in os.environ: extra_cmake_args = shlex.split(os.environ['CMAKE_ARGS']) # prevent crossfire with downstream scripts del os.environ['CMAKE_ARGS'] log.info('Extra cmake args: {}'.format(extra_cmake_args)) cmake_args.extend(extra_cmake_args) cmake_args.append(TOP_DIR) subprocess.check_call(cmake_args) build_args = [NINJA or MAKE] # control the number of concurrent jobs if self.jobs is not None: build_args.extend(['-j', str(self.jobs)]) subprocess.check_call(build_args)
def setup_package(): root = os.path.abspath(os.path.dirname(__file__)) if len(sys.argv) > 1 and sys.argv[1] == "clean": return clean(root) with chdir(root): with open(os.path.join(root, "thinc", "about.py")) as f: about = {} exec(f.read(), about) with io.open(os.path.join(root, "README.md"), encoding="utf8") as f: readme = f.read() include_dirs = [ get_python_inc(plat_specific=True), os.path.join(root, "include"), ] if (ccompiler.new_compiler().compiler_type == "msvc" and msvccompiler.get_build_version() == 9): include_dirs.append(os.path.join(root, "include", "msvc9")) ext_modules = [] for mod_name in MOD_NAMES: mod_path = mod_name.replace(".", "/") + ".cpp" if mod_name.endswith("gpu_ops"): continue mod_path = mod_name.replace(".", "/") + ".cpp" ext_modules.append( Extension(mod_name, [mod_path], language="c++", include_dirs=include_dirs)) ext_modules.append( Extension( "thinc.extra.wrapt._wrappers", ["thinc/extra/wrapt/_wrappers.c"], include_dirs=include_dirs, )) if not is_source_release(root): generate_cython(root, "thinc") setup( name="thinc", zip_safe=False, packages=PACKAGES, package_data={"": ["*.pyx", "*.pxd", "*.pxi", "*.cpp"]}, description=about["__summary__"], long_description=readme, long_description_content_type="text/markdown", author=about["__author__"], author_email=about["__email__"], version=about["__version__"], url=about["__uri__"], license=about["__license__"], ext_modules=ext_modules, setup_requires=["numpy>=1.7.0"], install_requires=[ # Explosion-provided dependencies "murmurhash>=0.28.0,<1.1.0", "cymem>=2.0.2,<2.1.0", "preshed>=1.0.1,<2.1.0", "blis>=0.4.0,<0.5.0", "wasabi>=0.0.9,<1.1.0", "srsly>=0.0.6,<1.1.0", # Third-party dependencies "numpy>=1.7.0", "plac>=0.9.6,<1.0.0", "tqdm>=4.10.0,<5.0.0", 'pathlib==1.0.1; python_version < "3.4"', ], extras_require={ "cuda": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy>=5.0.0b4"], "cuda80": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy-cuda80>=5.0.0b4"], "cuda90": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy-cuda90>=5.0.0b4"], "cuda91": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy-cuda91>=5.0.0b4"], "cuda92": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy-cuda92>=5.0.0b4"], "cuda100": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy-cuda100>=5.0.0b4"], }, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Operating System :: POSIX :: Linux", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Programming Language :: Cython", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Topic :: Scientific/Engineering", ], cmdclass={"build_ext": build_ext_subclass}, )
# # c_overviewer extension # # Third-party modules - we depend on numpy for everything # Obtain the numpy include directory. This logic works across numpy versions. try: numpy_include = numpy.get_include() except AttributeError: numpy_include = numpy.get_numpy_include() try: pil_include = os.environ['PIL_INCLUDE_DIR'].split(os.pathsep) except Exception: pil_include = [os.path.join(get_python_inc(plat_specific=1), 'Imaging')] if not os.path.exists(pil_include[0]): pil_include = [] # used to figure out what files to compile # auto-created from files in primitives/, but we need the raw names so # we can use them later. primitives = [] for name in glob.glob("overviewer_core/src/primitives/*.c"): name = os.path.split(name)[-1] name = os.path.splitext(name)[0] primitives.append(name) c_overviewer_files = [ 'main.c', 'composite.c', 'iterate.c', 'endian.c', 'rendermodes.c' ]
from distutils.sysconfig import get_python_inc from pkg_resources import resource_filename import re import os import sys import subprocess import numpy as np import platform import shutil # get Python prefix prefix = sys.prefix # get Python header paths: pyhdr = get_python_inc() # get numpy header path: nphdr = np.get_include() # minimum required CUDA compute capability mincc = 35 # --------------------------------------------------------------------------------- def path_niftypet_local(): '''Get the path to the local (home) folder for NiftyPET resources.''' # if using conda put the resources in the folder with the environment name if 'CONDA_DEFAULT_ENV' in os.environ: env = os.environ['CONDA_DEFAULT_ENV'] print 'i> conda environment found:', env else:
def _access_cpu_module(self, name, nvar_arg_pars): ''' Access a C++ module, compiling it if it has not been done yet. :param name: name of the module. :type name: str :returns: compiled module. :rtype: module :raises FileNotFoundError: If the XML file can not be found. ''' modname = core.parse_module_name(name, nvar_arg_pars) pdf_paths = core.get_pdf_src() if modname in self.__cpu_module_cache: # Do not compile again the PDF source if it has already been done module = self.__cpu_module_cache[modname] else: xml_source = None # Check if it exists in any of the provided paths for p in pdf_paths: fp = os.path.join(p, f'{name}.xml') if os.path.isfile(fp): xml_source = fp break if xml_source is None: raise FileNotFoundError( f'XML file for function {name} not found in any of the provided paths: {pdf_paths}') # Write the code source = os.path.join(self.__tmpdir.name, f'{modname}.cpp') code = autocode.generate_code(xml_source, core.CPU, nvar_arg_pars) with open(source, 'wt') as f: f.write(code) # Compile the C++ code and load the library compiler = ccompiler.new_compiler() try: python_inc = sysconfig.get_python_inc() include_dirs = [python_inc] # search for GSL headers search_prefix = os.path.dirname(python_inc) gsl_incpath = find_dir('gsl', search_prefix) if gsl_incpath is not None: include_dirs.append(os.path.dirname( os.path.abspath(gsl_incpath))) # compile the source code objects = compiler.compile( [source], output_dir=self.__tmpdir.name, include_dirs=include_dirs, extra_preargs=CFLAGS) # search for GSL search_prefix = os.path.dirname( sysconfig.get_python_lib(standard_lib=True)) gsl_libpath = find_file( compiler.library_filename('gsl', 'shared'), search_prefix) if gsl_libpath is not None: library_dirs = [os.path.dirname( os.path.abspath(gsl_libpath))] else: library_dirs = None # link the source files libname = os.path.join( self.__tmpdir.name, compiler.library_filename(modname, 'shared')) compiler.link(f'{modname} library', objects, libname, library_dirs=library_dirs, extra_preargs=CFLAGS, libraries=['stdc++', 'gsl', 'gslcblas']) except Exception as ex: nl = len(str(code.count(os.linesep))) code = os.linesep.join(f'{i + 1:>{nl}}: {l}' for i, l in enumerate(code.split(os.linesep))) logger.error(f'Error found compiling:{os.linesep}{code}') raise ex module = ctypes.cdll.LoadLibrary(libname) self.__cpu_module_cache[modname] = module return modname, module
from pake import cc, task, cd from distutils import sysconfig import os libraries = [ 'gdi32', 'user32', 'kernel32', 'shell32', 'imm32', 'ws2_32', ] macros = [('_CRT_SECURE_NO_WARNINGS', True), ('DEBUG', True)] include_dirs = ['.', sysconfig.get_python_inc()] headers = [ 'vuuvv.h', 'defines.h', 'log.h', 'common.h', 'stream.h', 'config.h', 'eventloop.h', 'event_iocp.h', 'os/win32/defines.h', ] vuuvv_srcs = [ ['vuuvv.c', headers],
extras_require={ 'gui_scripts': ['PyQt4'], }, packages=[packname] + subpacknames, package_dir={'pyrocko': 'src'}, ext_package=packname, ext_modules=[ Extension( 'util_ext', extra_compile_args=['-Wextra'], sources=[op.join('src', 'ext', 'util_ext.c')]), Extension( 'signal_ext', include_dirs=[get_python_inc(), numpy.get_include()], extra_compile_args=['-Wextra'], sources=[op.join('src', 'ext', 'signal_ext.c')]), Extension( 'mseed_ext', include_dirs=[get_python_inc(), numpy.get_include(), get_build_include('libmseed/')], library_dirs=[get_build_include('libmseed/')], libraries=['mseed'], extra_compile_args=['-Wextra'], sources=[op.join('src', 'io', 'ext', 'mseed_ext.c')]), Extension( 'evalresp_ext', include_dirs=[get_python_inc(), numpy.get_include(),
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from numpy.distutils.misc_util import get_info as get_misc_info from numpy.distutils.system_info import get_info as get_sys_info from distutils.sysconfig import get_python_inc config = Configuration('spatial', parent_package, top_path) config.add_data_dir('tests') # qhull qhull_src = list(glob.glob(join(dirname(__file__), 'qhull', 'src', '*.c'))) inc_dirs = [get_python_inc()] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.append(get_numpy_include_dirs()) cfg = dict(get_sys_info('lapack_opt')) cfg.setdefault('include_dirs', []).extend(inc_dirs) def get_qhull_misc_config(ext, build_dir): # Generate a header file containing defines config_cmd = config.get_config_cmd() defines = [] if config_cmd.check_func('open_memstream', decl=True, call=True): defines.append(('HAVE_OPEN_MEMSTREAM', '1')) target = join(dirname(__file__), 'qhull_misc_config.h') with open(target, 'w') as f: for name, value in defines: f.write('#define {0} {1}\n'.format(name, value)) config.add_extension('qhull', sources=['qhull.c'] + qhull_src + [get_qhull_misc_config], **cfg) # cKDTree ckdtree_src = [ 'query.cxx', 'build.cxx', 'globals.cxx', 'cpp_exc.cxx', 'query_pairs.cxx', 'count_neighbors.cxx', 'query_ball_point.cxx', 'query_ball_tree.cxx', 'sparse_distances.cxx' ] ckdtree_src = [join('ckdtree', 'src', x) for x in ckdtree_src] ckdtree_headers = [ 'ckdtree_decl.h', 'cpp_exc.h', 'ckdtree_methods.h', 'cpp_utils.h', 'rectangle.h', 'distance.h', 'distance_box.h', 'ordered_pair.h' ] ckdtree_headers = [join('ckdtree', 'src', x) for x in ckdtree_headers] ckdtree_dep = ['ckdtree.cxx'] + ckdtree_headers + ckdtree_src config.add_extension('ckdtree', sources=['ckdtree.cxx'] + ckdtree_src, depends=ckdtree_dep, include_dirs=inc_dirs + [join('ckdtree', 'src')]) # _distance_wrap config.add_extension('_distance_wrap', sources=[join('src', 'distance_wrap.c')], depends=[join('src', 'distance_impl.h')], include_dirs=[get_numpy_include_dirs()], extra_info=get_misc_info("npymath")) config.add_extension('_voronoi', sources=['_voronoi.c']) config.add_extension('_hausdorff', sources=['_hausdorff.c']) return config
def build_pyqt_extension(self, ext, dest): from setup.build_environment import pyqt, qmakespec, QMAKE from setup.parallel_build import cpu_count from distutils import sysconfig pyqt_dir = self.j(self.build_dir, 'pyqt') src_dir = self.j(pyqt_dir, ext.name) if not os.path.exists(src_dir): os.makedirs(src_dir) sip = self.build_sip_files(ext, src_dir) pro = textwrap.dedent('''\ TEMPLATE = lib CONFIG += release plugin QT += widgets TARGET = {target} HEADERS = {headers} SOURCES = {sources} INCLUDEPATH += {sipinc} {pyinc} VERSION = {ver} win32 {{ LIBS += {py_lib} TARGET_EXT = .dll }} macx {{ QMAKE_LFLAGS += "-undefined dynamic_lookup" }} ''').format(target=sip['target'], headers=' '.join(sip['headers'] + ext.headers), sources=' '.join(ext.sources + sip['sources']), sipinc=pyqt['sip_inc_dir'], pyinc=sysconfig.get_python_inc(), py_lib=py_lib, ver=__version__) for incdir in ext.inc_dirs: pro += '\nINCLUDEPATH += ' + incdir if not iswindows and not isosx: # Ensure that only the init symbol is exported pro += '\nQMAKE_LFLAGS += -Wl,--version-script=%s.exp' % sip[ 'target'] with open(os.path.join(src_dir, sip['target'] + '.exp'), 'wb') as f: f.write(('{ global: %s; local: *; };' % init_symbol_name(sip['target'])).encode('utf-8')) if ext.qt_private_headers: qph = ' '.join(x + '-private' for x in ext.qt_private_headers) pro += '\nQT += ' + qph proname = '%s.pro' % sip['target'] with open(os.path.join(src_dir, proname), 'wb') as f: f.write(pro.encode('utf-8')) cwd = os.getcwd() qmc = [] if iswindows: qmc += ['-spec', qmakespec] fext = 'dll' if iswindows else 'dylib' if isosx else 'so' name = '%s%s.%s' % ('release/' if iswindows else 'lib', sip['target'], fext) try: os.chdir(src_dir) if self.newer( dest, sip['headers'] + sip['sources'] + ext.sources + ext.headers): self.check_call([QMAKE] + qmc + [proname]) self.check_call([self.env.make] + ( [] if iswindows else ['-j%d' % (cpu_count or 1)])) shutil.copy2(os.path.realpath(name), dest) if iswindows and os.path.exists(name + '.manifest'): shutil.copy2(name + '.manifest', dest + '.manifest') finally: os.chdir(cwd)
def setup_package(): root = os.path.abspath(os.path.dirname(__file__)) if len(sys.argv) > 1 and sys.argv[1] == "clean": return clean(root) with chdir(root): with io.open(os.path.join(root, "spacy", "about.py"), encoding="utf8") as f: about = {} exec(f.read(), about) with io.open(os.path.join(root, "README.md"), encoding="utf8") as f: readme = f.read() include_dirs = [ get_python_inc(plat_specific=True), os.path.join(root, "include"), ] if (ccompiler.new_compiler().compiler_type == "msvc" and msvccompiler.get_build_version() == 9): include_dirs.append(os.path.join(root, "include", "msvc9")) ext_modules = [] for mod_name in MOD_NAMES: mod_path = mod_name.replace(".", "/") + ".cpp" extra_link_args = [] extra_compile_args = [] # ??? # Imported from patch from @mikepb # See Issue #267. Running blind here... if sys.platform == "darwin": dylib_path = [".." for _ in range(mod_name.count("."))] dylib_path = "/".join(dylib_path) dylib_path = "@loader_path/%s/spacy/platform/darwin/lib" % dylib_path extra_link_args.append("-Wl,-rpath,%s" % dylib_path) ext_modules.append( Extension( mod_name, [mod_path], language="c++", include_dirs=include_dirs, extra_link_args=extra_link_args, )) if not is_source_release(root): generate_cython(root, "spacy") gzip_language_data(root, "spacy/lang") setup( name="spacy", zip_safe=False, packages=PACKAGES, package_data=PACKAGE_DATA, description=about["__summary__"], long_description=readme, long_description_content_type="text/markdown", author=about["__author__"], author_email=about["__email__"], version=about["__version__"], url=about["__uri__"], license=about["__license__"], ext_modules=ext_modules, scripts=["bin/spacy"], install_requires=[ "numpy>=1.15.0", "murmurhash>=0.28.0,<1.1.0", "cymem>=2.0.2,<2.1.0", "preshed>=2.0.1,<2.1.0", "thinc>=7.0.8,<7.1.0", "blis>=0.2.2,<0.3.0", "plac<1.0.0,>=0.9.6", "requests>=2.13.0,<3.0.0", "wasabi>=0.2.0,<1.1.0", "srsly>=0.1.0,<1.1.0", 'pathlib==1.0.1; python_version < "3.4"', ], setup_requires=["wheel"], extras_require={ "cuda": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy>=5.0.0b4"], "cuda80": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy-cuda80>=5.0.0b4"], "cuda90": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy-cuda90>=5.0.0b4"], "cuda91": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy-cuda91>=5.0.0b4"], "cuda92": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy-cuda92>=5.0.0b4"], "cuda100": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy-cuda100>=5.0.0b4"], # Language tokenizers with external dependencies "ja": ["mecab-python3==0.7"], "ko": ["natto-py==0.9.0"], "th": ["pythainlp>=2.0"], }, python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Operating System :: POSIX :: Linux", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Programming Language :: Cython", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Topic :: Scientific/Engineering", ], cmdclass={"build_ext": build_ext_subclass}, )
if not flag: console_scripts.append(script) install_requires = [ 'paramiko>=1.15.2', 'mako>=1.0.0', 'pyelftools>=0.2.4', 'capstone', 'ropgadget>=5.3', 'pyserial>=2.7', 'requests>=2.0', 'pip>=6.0.8', 'tox>=1.8.1', 'pygments>=2.0', 'pysocks', 'python-dateutil', 'pypandoc', 'packaging' ] # This is a hack until somebody ports psutil to OpenBSD if platform.system() != 'OpenBSD': install_requires.append('psutil>=2.1.3') # Check that the user has installed the Python development headers PythonH = os.path.join(get_python_inc(), 'Python.h') if not os.path.exists(PythonH): print >> sys.stderr, "You must install the Python development headers!" print >> sys.stderr, "$ apt-get install python-dev" sys.exit(-1) # Convert README.md to reStructuredText for PyPI long_description = '' try: import pypandoc try: pypandoc.get_pandoc_path() except OSError: pypandoc.download_pandoc() long_description = pypandoc.convert_file('README.md', 'rst') except ImportError:
#lib_build_paths.append(('broot/lib', [self.build_clib + '/lib' + lib_name + '.so'])) self.compiler.create_static_lib(objects, lib_name, output_dir=self.build_clib, debug=self.debug) eigen_dest = "lib/eigen3.3.3" eigen_url = "http://bitbucket.org/eigen/eigen/get/3.3.3.tar.bz2" eigen_inner = "eigen-eigen-67e894c6cd8f" blas_libs = get_blas_libs() inc = [ 'lib/macau-cpp', eigen_dest, 'lib/libfastsparse', np.get_include(), get_python_inc(), "/usr/local/include", "/usr/local/opt/openblas/include" ] ldirs = [ "/opt/OpenBLAS/lib", "/usr/local/lib", "/usr/lib/openblas-base", "/usr/local/opt/openblas/lib", "/usr/local/opt/gcc/lib/gcc/5", "/usr/lib/x86_64-linux-gnu" ] libmacau = ( 'macau-cpp', dict( package='macau', sources=list( filter( lambda a: a.find("tests.cpp") < 0 and a.find("macau_mpi.cpp") < 0, glob('lib/macau-cpp/*.cpp'))),
def src2obj(srcpath, Runner=None, objpath=None, cwd=None, inc_py=False, **kwargs): """ Compiles a source code file to an object file. Files ending with '.pyx' assumed to be cython files and are dispatched to pyx2obj. Parameters ========== srcpath: str Path to source file. Runner: CompilerRunner subclass (optional) If ``None``: deduced from extension of srcpath. objpath : str (optional) Path to generated object. If ``None``: deduced from ``srcpath``. cwd: str (optional) Working directory and root of relative paths. If ``None``: current dir. inc_py: bool Add Python include path to kwarg "include_dirs". Default: False \\*\\*kwargs: dict keyword arguments passed to Runner or pyx2obj """ name, ext = os.path.splitext(os.path.basename(srcpath)) if objpath is None: if os.path.isabs(srcpath): objpath = '.' else: objpath = os.path.dirname(srcpath) objpath = objpath or '.' # avoid objpath == '' if os.path.isdir(objpath): objpath = os.path.join(objpath, name + objext) include_dirs = kwargs.pop('include_dirs', []) if inc_py: from distutils.sysconfig import get_python_inc py_inc_dir = get_python_inc() if py_inc_dir not in include_dirs: include_dirs.append(py_inc_dir) if ext.lower() == '.pyx': return pyx2obj(srcpath, objpath=objpath, include_dirs=include_dirs, cwd=cwd, **kwargs) if Runner is None: Runner, std = extension_mapping[ext.lower()] if 'std' not in kwargs: kwargs['std'] = std flags = kwargs.pop('flags', []) needed_flags = ('-fPIC', ) for flag in needed_flags: if flag not in flags: flags.append(flag) # src2obj implies not running the linker... run_linker = kwargs.pop('run_linker', False) if run_linker: raise CompileError("src2obj called with run_linker=True") runner = Runner([srcpath], objpath, include_dirs=include_dirs, run_linker=run_linker, cwd=cwd, flags=flags, **kwargs) runner.run() return objpath
def compile_cython_modules(profile=False, compile_more=False, cython_with_refnanny=False): source_root = os.path.abspath(os.path.dirname(__file__)) compiled_modules = [ "Cython.Plex.Scanners", "Cython.Plex.Actions", "Cython.Plex.Machines", "Cython.Plex.Transitions", "Cython.Plex.DFA", "Cython.Compiler.Scanning", "Cython.Compiler.Visitor", "Cython.Compiler.FlowControl", "Cython.Runtime.refnanny", "Cython.Compiler.FusedNode", "Cython.Tempita._tempita", ] if compile_more: compiled_modules.extend([ "Cython.StringIOTree", "Cython.Compiler.Code", "Cython.Compiler.Lexicon", "Cython.Compiler.Parsing", "Cython.Compiler.Pythran", "Cython.Build.Dependencies", "Cython.Compiler.ParseTreeTransforms", "Cython.Compiler.Nodes", "Cython.Compiler.ExprNodes", "Cython.Compiler.ModuleNode", "Cython.Compiler.Optimize", ]) from distutils.spawn import find_executable from distutils.sysconfig import get_python_inc pgen = find_executable( 'pgen', os.pathsep.join([ os.environ['PATH'], os.path.join(get_python_inc(), '..', 'Parser') ])) if not pgen: sys.stderr.write( "Unable to find pgen, not compiling formal grammar.\n") else: parser_dir = os.path.join(os.path.dirname(__file__), 'Cython', 'Parser') grammar = os.path.join(parser_dir, 'Grammar') subprocess.check_call([ pgen, os.path.join(grammar), os.path.join(parser_dir, 'graminit.h'), os.path.join(parser_dir, 'graminit.c'), ]) cst_pyx = os.path.join(parser_dir, 'ConcreteSyntaxTree.pyx') if os.stat(grammar)[stat.ST_MTIME] > os.stat(cst_pyx)[stat.ST_MTIME]: mtime = os.stat(grammar)[stat.ST_MTIME] os.utime(cst_pyx, (mtime, mtime)) compiled_modules.extend([ "Cython.Parser.ConcreteSyntaxTree", ]) defines = [] if cython_with_refnanny: defines.append(('CYTHON_REFNANNY', '1')) extensions = [] for module in compiled_modules: source_file = os.path.join(source_root, *module.split('.')) if os.path.exists(source_file + ".py"): pyx_source_file = source_file + ".py" else: pyx_source_file = source_file + ".pyx" dep_files = [] if os.path.exists(source_file + '.pxd'): dep_files.append(source_file + '.pxd') if '.refnanny' in module: defines_for_module = [] else: defines_for_module = defines extensions.append( Extension(module, sources=[pyx_source_file], define_macros=defines_for_module, depends=dep_files)) # XXX hack around setuptools quirk for '*.pyx' sources extensions[-1].sources[0] = pyx_source_file from Cython.Distutils.build_ext import new_build_ext from Cython.Compiler.Options import get_directive_defaults get_directive_defaults().update( language_level=2, binding=False, always_allow_keywords=False, autotestdict=False, ) if profile: get_directive_defaults()['profile'] = True sys.stderr.write("Enabled profiling for the Cython binary modules\n") # not using cythonize() directly to let distutils decide whether building extensions was requested add_command_class("build_ext", new_build_ext) setup_args['ext_modules'] = extensions
def finalize_options(self): from distutils import sysconfig self.set_undefined_options( 'build', ('build_lib', 'build_lib'), ('build_temp', 'build_temp'), ('compiler', 'compiler'), ('debug', 'debug'), ('force', 'force'), ('plat_name', 'plat_name'), ) if self.package is None: self.package = self.distribution.ext_package self.extensions = self.distribution.ext_modules # Make sure Python's include directories (for Python.h, pyconfig.h, # etc.) are in the include search path. py_include = sysconfig.get_python_inc() plat_py_include = sysconfig.get_python_inc(plat_specific=1) if self.include_dirs is None: self.include_dirs = self.distribution.include_dirs or [] if isinstance(self.include_dirs, str): self.include_dirs = self.include_dirs.split(os.pathsep) # Put the Python "system" include dir at the end, so that # any local include dirs take precedence. self.include_dirs.append(py_include) if plat_py_include != py_include: self.include_dirs.append(plat_py_include) self.ensure_string_list('libraries') self.ensure_string_list('link_objects') # Life is easier if we're not forever checking for None, so # simplify these options to empty lists if unset if self.libraries is None: self.libraries = [] if self.library_dirs is None: self.library_dirs = [] elif type(self.library_dirs) is StringType: self.library_dirs = string.split(self.library_dirs, os.pathsep) if self.rpath is None: self.rpath = [] elif type(self.rpath) is StringType: self.rpath = string.split(self.rpath, os.pathsep) # for extensions under windows use different directories # for Release and Debug builds. # also Python's library directory must be appended to library_dirs if os.name == 'nt': # the 'libs' directory is for binary installs - we assume that # must be the *native* platform. But we don't really support # cross-compiling via a binary install anyway, so we let it go. self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) if self.debug: self.build_temp = os.path.join(self.build_temp, "Debug") else: self.build_temp = os.path.join(self.build_temp, "Release") # Append the source distribution include and library directories, # this allows distutils on windows to work in the source tree self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC')) if MSVC_VERSION == 9: # Use the .lib files for the correct architecture if self.plat_name == 'win32': suffix = '' else: # win-amd64 or win-ia64 suffix = self.plat_name[4:] # We could have been built in one of two places; add both for d in ('PCbuild', ), ('PC', 'VS9.0'): new_lib = os.path.join(sys.exec_prefix, *d) if suffix: new_lib = os.path.join(new_lib, suffix) self.library_dirs.append(new_lib) elif MSVC_VERSION == 8: self.library_dirs.append( os.path.join(sys.exec_prefix, 'PC', 'VS8.0')) elif MSVC_VERSION == 7: self.library_dirs.append( os.path.join(sys.exec_prefix, 'PC', 'VS7.1')) else: self.library_dirs.append( os.path.join(sys.exec_prefix, 'PC', 'VC6')) # OS/2 (EMX) doesn't support Debug vs Release builds, but has the # import libraries in its "Config" subdirectory if os.name == 'os2': self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config')) # for extensions under Cygwin and AtheOS Python's library directory must be # appended to library_dirs if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos': if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")): # building third party extensions self.library_dirs.append( os.path.join(sys.prefix, "lib", "python" + get_python_version(), "config")) else: # building python standard extensions self.library_dirs.append('.') # For building extensions with a shared Python library, # Python's library directory must be appended to library_dirs # See Issues: #1600860, #4366 if (sysconfig.get_config_var('Py_ENABLE_SHARED')): if not sysconfig.python_build: # building third party extensions self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) else: # building python standard extensions self.library_dirs.append('.') # The argument parsing will result in self.define being a string, but # it has to be a list of 2-tuples. All the preprocessor symbols # specified by the 'define' option will be set to '1'. Multiple # symbols can be separated with commas. if self.define: defines = self.define.split(',') self.define = map(lambda symbol: (symbol, '1'), defines) # The option for macros to undefine is also a string from the # option parsing, but has to be a list. Multiple symbols can also # be separated with commas here. if self.undef: self.undef = self.undef.split(',') if self.swig_opts is None: self.swig_opts = [] else: self.swig_opts = self.swig_opts.split(' ') # Finally add the user include and library directories if requested if self.user: user_include = os.path.join(USER_BASE, "include") user_lib = os.path.join(USER_BASE, "lib") if os.path.isdir(user_include): self.include_dirs.append(user_include) if os.path.isdir(user_lib): self.library_dirs.append(user_lib) self.rpath.append(user_lib)
def setup_package(): root = os.path.abspath(os.path.dirname(__file__)) if len(sys.argv) > 1 and sys.argv[1] == 'clean': return clean(root) with chdir(root): with io.open(os.path.join(root, 'spacy', 'about.py'), encoding='utf8') as f: about = {} exec(f.read(), about) with io.open(os.path.join(root, 'README.rst'), encoding='utf8') as f: readme = f.read() include_dirs = [ get_python_inc(plat_specific=True), os.path.join(root, 'include') ] if (ccompiler.new_compiler().compiler_type == 'msvc' and msvccompiler.get_build_version() == 9): include_dirs.append(os.path.join(root, 'include', 'msvc9')) ext_modules = [] for mod_name in MOD_NAMES: mod_path = mod_name.replace('.', '/') + '.cpp' extra_link_args = [] # ??? # Imported from patch from @mikepb # See Issue #267. Running blind here... if sys.platform == 'darwin': dylib_path = ['..' for _ in range(mod_name.count('.'))] dylib_path = '/'.join(dylib_path) dylib_path = '@loader_path/%s/spacy/platform/darwin/lib' % dylib_path extra_link_args.append('-Wl,-rpath,%s' % dylib_path) ext_modules.append( Extension(mod_name, [mod_path], language='c++', include_dirs=include_dirs, extra_link_args=extra_link_args)) if not is_source_release(root): generate_cython(root, 'spacy') setup( name=about['__title__'], zip_safe=False, packages=PACKAGES, package_data=PACKAGE_DATA, description=about['__summary__'], long_description=readme, author=about['__author__'], author_email=about['__email__'], version=about['__version__'], url=about['__uri__'], license=about['__license__'], ext_modules=ext_modules, scripts=['bin/spacy'], install_requires=[ 'numpy>=1.7', 'murmurhash>=0.28,<0.29', 'cymem>=1.30,<1.32', 'preshed>=1.0.0,<2.0.0', 'thinc>=6.10.1,<6.11.0', 'plac<1.0.0,>=0.9.6', 'six', 'pathlib', 'ujson>=1.35', 'dill>=0.2,<0.3', 'requests>=2.13.0,<3.0.0', 'regex==2017.4.5', 'ftfy>=4.4.2,<5.0.0', 'msgpack-python', 'msgpack-numpy==0.4.1' ], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX :: Linux', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Programming Language :: Cython', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Scientific/Engineering' ], cmdclass={'build_ext': build_ext_subclass}, )
def createMakefile(): say("==== Checking supported compiler options and available libraries ====\n" ) LINK_FLAGS = LDFLAGS.split( ) # accumulate the linker flags that will be put to Makefile.local COMPILE_FLAGS = CFLAGS.split( ) # same for the compilation of the shared library only # default compilation flags for both the shared library and all example programs that use it CXXFLAGS = ['-fPIC', '-Wall', '-O2'] # [1a]: check if a compiler exists at all if not runCompiler(): raise CompileError( "Could not locate a compiler (set CXX=... environment variable to override)" ) # [1b]: test if OpenMP is supported (optional) OMP_FLAG = '-fopenmp' OMP_CODE = "#include <omp.h>\nint main(){\n#pragma omp parallel for\nfor(int i=0; i<16; i++);\n}\n" if runCompiler(code=OMP_CODE, flags=OMP_FLAG + ' -Werror -Wno-unknown-pragmas'): CXXFLAGS += [OMP_FLAG] else: if not ask( "Warning, OpenMP is not supported\n" + "If you're compiling on MacOS with clang, you'd better install another compiler such as GCC\n" + "Do you want to continue without OpenMP? [Y/N] "): exit(1) # [1c]: test if C++11 is supported (optional) CXX11_FLAG = '-std=c++11' if runCompiler(flags=CXX11_FLAG): CXXFLAGS += [CXX11_FLAG] # [1d]: test the -march flag (optional, allows architecture-dependent compiler optimizations) ARCH_FLAG = '-march=native' ARCH_CODE = 'int main(int c, char** v) { double x=c*3.14; return x==42; }\n' if runCompiler(code=ARCH_CODE, flags=ARCH_FLAG): CXXFLAGS += [ARCH_FLAG] else: ARCH_FLAG = '-march=core2' # try a less ambitious option if runCompiler(code=ARCH_CODE, flags=ARCH_FLAG): CXXFLAGS += [ARCH_FLAG] # [2a]: check that NumPy is present (required by the python interface) try: import numpy NUMPY_INC = '-I' + numpy.get_include() except ImportError: raise CompileError( "NumPy is not present - python extension cannot be compiled") # [2b]: find out the paths to Python.h and libpythonXX.{a,so} (this is rather tricky) and # all other relevant compilation/linking flags needed to build a shared library that uses Python PYTHON_INC = '-I' + sysconfig.get_python_inc() # various other system libraries that are needed at link time PYTHON_LIB_EXTRA = \ sysconfig.get_config_var('LIBS').split() + \ sysconfig.get_config_var('SYSLIBS').split() #+ ['-lz'] # try compiling a test code with the provided link flags (in particular, the name of Python library): # check that a sample C++ program with embedded python compiles, links and runs properly numAttempts = 0 def tryPythonCode(PYTHON_LIB): # test code for a shared library PYTEST_LIB_CODE = """ #include "Python.h" #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION #include "numpy/arrayobject.h" void bla() {PyRun_SimpleString("from distutils import sysconfig;print(sysconfig.PREFIX);");} void run() {Py_Initialize();bla();Py_Finalize();} PyMODINIT_FUNC """ if sys.version_info[0] == 2: # Python 2.6-2.7 PYTEST_LIB_CODE += """ initpytest42(void) { Py_InitModule3("pytest42", NULL, "doc"); import_array(); bla(); } """ else: # Python 3.x PYTEST_LIB_CODE += """ PyInit_pytest42(void) { static PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, "pytest42", "doc", -1, NULL}; PyObject* mod = PyModule_Create(&moduledef); import_array1(mod); bla(); return mod; } """ # test code for a program that loads this shared library PYTEST_EXE_CODE = 'extern void run();int main(){run();}\n' PYTEST_LIB_NAME = './pytest42.so' PYTEST_EXE_NAME = './pytest42.exe' # try compiling the test shared library if not runCompiler(code=PYTEST_LIB_CODE, \ flags=' '.join([PYTHON_INC, NUMPY_INC, '-shared', '-fPIC'] + PYTHON_LIB), \ dest=PYTEST_LIB_NAME): return False # the program couldn't be compiled at all (try the next variant) # if succeeded, compile the test program that uses this library if not runCompiler(code=PYTEST_EXE_CODE, flags=PYTEST_LIB_NAME, dest=PYTEST_EXE_NAME) \ or not os.path.isfile(PYTEST_LIB_NAME) or not os.path.isfile(PYTEST_EXE_NAME): return False # can't find compiled test program resultexe = runProgram(PYTEST_EXE_NAME).rstrip() # also try loading this shared library as an extension module proc = subprocess.Popen(sys.executable+" -c 'import pytest42'", \ stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) resultpy = proc.communicate()[0].decode().rstrip() # clean up os.remove(PYTEST_EXE_NAME) os.remove(PYTEST_LIB_NAME) # check if the results (reported library path prefix) are the same as we have in this script if resultexe != sysconfig.PREFIX or resultpy != sysconfig.PREFIX: # return a warning, the user may still wish to continue return "Test program doesn't seem to use the same version of Python, "+\ "or the library path is reported incorrectly: \n"+\ "Expected: "+sysconfig.PREFIX+"\n"+\ "Received: "+resultexe+"\n"+\ "From py: "+resultpy+"\n"+\ "Should we continue the build (things may go wrong at a later stage)? [Y/N] " return True # this combination of options seems reasonable... # explore various possible combinations of file name and path to the python library... def findPythonLib(): PLANB_LIB = None # try linking against the static python library libpython**.a, if this does not succeed, # try the shared library libpython**.so** LIBNAMES = ['LIBRARY', 'LDLIBRARY', 'INSTSONAME'] for PYTHON_LIB_FILE in [sysconfig.get_config_var(x) for x in LIBNAMES]: for PYTHON_LIB_PATH in [ sysconfig.get_config_var(x) for x in ['LIBPL', 'LIBDIR'] ]: PYTHON_LIB_FILEPATH = os.path.join(PYTHON_LIB_PATH, PYTHON_LIB_FILE) if os.path.isfile(PYTHON_LIB_FILEPATH): # other libraries depend on whether this is a static or a shared python library PYTHON_LIB = [PYTHON_LIB_FILEPATH] + PYTHON_LIB_EXTRA if PYTHON_LIB_FILE.endswith( '.a' ) and not sysconfig.get_config_var('PYTHONFRAMEWORK'): PYTHON_LIB += sysconfig.get_config_var( 'LINKFORSHARED').split() # the stack_size flag is problematic and needs to be removed PYTHON_LIB = [ x for x in PYTHON_LIB if not x.startswith('-Wl,-stack_size,') ] result = tryPythonCode(PYTHON_LIB) if result is True: return PYTHON_LIB # successful compilation if result: # not True, but a warning string # test compiled, but with a version mismatch warning, store it as a backup option PLANB_LIB = PYTHON_LIB PLANB_ASK = result if not PLANB_LIB is None and ask( PLANB_ASK ): # the user wants to continue with the backup option return PLANB_LIB # if none of the above combinations worked, give up... raise CompileError( "Could not compile test program which uses libpython" + sysconfig.get_config_var('VERSION')) # [2c]: find the python library and other relevant linking flags PYTHON_LIB = findPythonLib() COMPILE_FLAGS += ['-DHAVE_PYTHON', PYTHON_INC, NUMPY_INC] LINK_FLAGS += PYTHON_LIB # [3]: check that GSL is present, and find out its version (required) # try compiling a snippet of code into a shared library (tests if GSL has been compiled with -fPIC) GSL_CODE = """#include <gsl/gsl_version.h> #if not defined(GSL_MAJOR_VERSION) || (GSL_MAJOR_VERSION == 1) && (GSL_MINOR_VERSION < 15) #error "GSL version is too old (need at least 1.15)" #endif void dummy(){} """ if runCompiler(code=GSL_CODE, flags='-fPIC -lgsl -lgslcblas -shared'): # apparently the headers and libraries can be found in some standard location, LINK_FLAGS += ['-lgsl', '-lgslcblas'] # so we only list their names else: if not ask("GSL library (required) is not found\n" + "Should we try to download and compile it now? [Y/N] "): exit(1) distutils.dir_util.mkpath(EXTRAS_DIR) os.chdir(EXTRAS_DIR) say('Downloading GSL\n') filename = 'gsl.tar.gz' dirname = 'gsl-2.4' try: urlretrieve('ftp://ftp.gnu.org/gnu/gsl/gsl-2.4.tar.gz', filename) if os.path.isfile(filename): subprocess.call(['tar', '-zxf', filename]) # unpack the archive os.remove(filename) # remove the downloaded archive if not os.path.isdir(dirname): raise Exception("Error unpacking GSL") except Exception as e: raise CompileError( str(e) + "\nError downloading GSL library, aborting...\n" + "You may try to manually compile GSL and install it to " + ROOT_DIR + "/" + EXTRAS_DIR + ", so that " + "the header files are in " + EXTRAS_DIR + "/include and library files - in " + EXTRAS_DIR + "/lib") say('Compiling GSL (may take a few minutes)\n') result = subprocess.call( '(cd ' + dirname + '; ./configure --prefix=' + os.getcwd() + ' CFLAGS="-fPIC -O2" --enable-shared=no; make; make install) > gsl-install.log', shell=True) if result != 0 or not os.path.isfile('lib/libgsl.a'): raise CompileError("GSL compilation failed (check " + EXTRAS_DIR + "/gsl-install.log)") distutils.dir_util.remove_tree( dirname) # clean up source and build directories COMPILE_FLAGS += ['-I' + EXTRAS_DIR + '/include'] LINK_FLAGS += [ EXTRAS_DIR + '/lib/libgsl.a', EXTRAS_DIR + '/lib/libgslcblas.a' ] os.chdir(ROOT_DIR) # [4]: test if Eigen library is present (optional) if runCompiler(code='#include <Eigen/Core>\nint main(){}\n'): COMPILE_FLAGS += ['-DHAVE_EIGEN'] else: if ask("Eigen library (recommended) is not found\n" + "Should we try to download it now (no compilation needed)? [Y/N] " ): distutils.dir_util.mkpath(EXTRAS_DIR + '/include/unsupported') os.chdir(EXTRAS_DIR) say('Downloading Eigen\n') filename = 'Eigen.zip' dirname = 'eigen-git-mirror-3.3.4' try: urlretrieve( 'https://github.com/eigenteam/eigen-git-mirror/archive/3.3.4.zip', filename) if os.path.isfile(filename): subprocess.call('unzip ' + filename + ' >/dev/null', shell=True) # unpack the archive if os.path.isdir(dirname): distutils.dir_util.copy_tree(dirname + '/Eigen', 'include/Eigen', verbose=False) distutils.dir_util.copy_tree( dirname + '/unsupported/Eigen', 'include/unsupported/Eigen', verbose=False) # copy the headers distutils.dir_util.remove_tree( dirname) # and delete the rest COMPILE_FLAGS += [ '-DHAVE_EIGEN', '-I' + EXTRAS_DIR + '/include' ] os.remove(filename) # remove the downloaded archive except: pass # didn't succeed with Eigen os.chdir(ROOT_DIR) # [5a]: test if CVXOPT is present (optional) try: import cvxopt # import the python module except: # import error or some other problem, might be corrected if ask("CVXOPT library (needed only for Schwarzschild modelling) is not found\n" "Should we try to install it now? [Y/N] "): try: import pip pip.main(['install', '--user', 'cvxopt']) except Exception as e: say("Failed to install CVXOPT: " + str(e) + "\n") # [5b]: if the cvxopt module is available in Python, make sure that we also have C header files try: import cvxopt # if this fails, skip cvxopt altogether if runCompiler(code='#include <cvxopt.h>\nint main(){}\n', flags=' '.join(['-c', PYTHON_INC, NUMPY_INC])): COMPILE_FLAGS += ['-DHAVE_CVXOPT'] else: # download the C header file if it does not appear to be present in a default location distutils.dir_util.mkpath(EXTRAS_DIR + '/include') say('Downloading CVXOPT header files\n') try: urlretrieve( 'https://raw.githubusercontent.com/cvxopt/cvxopt/master/src/C/cvxopt.h', EXTRAS_DIR + '/include/cvxopt.h') urlretrieve( 'https://raw.githubusercontent.com/cvxopt/cvxopt/master/src/C/blas_redefines.h', EXTRAS_DIR + '/include/blas_redefines.h') except: pass # problems in downloading, skip it if os.path.isfile(EXTRAS_DIR+'/include/cvxopt.h') and \ os.path.isfile(EXTRAS_DIR+'/include/blas_redefines.h'): COMPILE_FLAGS += [ '-DHAVE_CVXOPT', '-I' + EXTRAS_DIR + '/include' ] except: pass # cvxopt wasn't available # [6]: test if GLPK is present (optional) if runCompiler(code='#include <glpk.h>\nint main(){}\n', flags='-lglpk'): COMPILE_FLAGS += ['-DHAVE_GLPK'] LINK_FLAGS += ['-lglpk'] else: say("GLPK library (optional) is not found\n") # [7]: test if UNSIO is present (optional) if runCompiler(code='#include <uns.h>\nint main(){}\n', flags='-lunsio -lnemo'): COMPILE_FLAGS += ['-DHAVE_UNSIO'] LINK_FLAGS += ['-lunsio', '-lnemo'] else: say("UNSIO library (optional) is not found\n") # [99]: put everything togeter and create Makefile.local with open('Makefile.local', 'w') as f: f.write( "# set the default compiler if no value is found in the environment variables or among command-line arguments\n" + "ifeq ($(origin CXX),default)\nCXX = g++\nendif\n" + "ifeq ($(origin FC), default)\nFC = gfortran\nendif\nLINK = $(CXX)\n" + "# compilation/linking flags for both the shared library and any programs that use it\n" + "CXXFLAGS += " + " ".join(compressList(CXXFLAGS)) + "\n" + "# compilation flags for the shared library only (files in src/)\n" + "COMPILE_FLAGS += " + " ".join(compressList(COMPILE_FLAGS)) + "\n" + "# linking flags for the shared library only\n" + "LINK_FLAGS += " + " ".join(compressList(LINK_FLAGS)) + "\n")
def run(self): if cmake_build.built: return cmake_build.built = True if not os.path.exists(CMAKE_BUILD_DIR): os.makedirs(CMAKE_BUILD_DIR) with cd(CMAKE_BUILD_DIR): build_type = 'Release' # configure cmake_args = [ CMAKE, '-DPYTHON_INCLUDE_DIR={}'.format(sysconfig.get_python_inc()), '-DPYTHON_EXECUTABLE={}'.format(sys.executable), '-DBUILD_ONNX_PYTHON=ON', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '-DONNX_NAMESPACE={}'.format(ONNX_NAMESPACE), '-DPY_EXT_SUFFIX={}'.format( sysconfig.get_config_var('EXT_SUFFIX') or ''), ] if COVERAGE: cmake_args.append('-DONNX_COVERAGE=ON') if COVERAGE or DEBUG: # in order to get accurate coverage information, the # build needs to turn off optimizations build_type = 'Debug' cmake_args.append('-DCMAKE_BUILD_TYPE=%s' % build_type) if WINDOWS: cmake_args.extend([ # we need to link with libpython on windows, so # passing python version to window in order to # find python in cmake '-DPY_VERSION={}'.format( '{0}.{1}'.format(*sys.version_info[:2])), ]) if USE_MSVC_STATIC_RUNTIME: cmake_args.append('-DONNX_USE_MSVC_STATIC_RUNTIME=ON') if platform.architecture()[0] == '64bit': cmake_args.extend(['-A', 'x64', '-T', 'host=x64']) else: cmake_args.extend(['-A', 'Win32', '-T', 'host=x86']) if ONNX_ML: cmake_args.append('-DONNX_ML=1') if ONNX_VERIFY_PROTO3: cmake_args.append('-DONNX_VERIFY_PROTO3=1') if ONNX_BUILD_TESTS: cmake_args.append('-DONNX_BUILD_TESTS=ON') if 'CMAKE_ARGS' in os.environ: extra_cmake_args = shlex.split(os.environ['CMAKE_ARGS']) # prevent crossfire with downstream scripts del os.environ['CMAKE_ARGS'] log.info('Extra cmake args: {}'.format(extra_cmake_args)) cmake_args.extend(extra_cmake_args) cmake_args.append(TOP_DIR) subprocess.check_call(cmake_args) build_args = [CMAKE, '--build', os.curdir] if WINDOWS: build_args.extend(['--config', build_type]) build_args.extend(['--', '/maxcpucount:{}'.format(self.jobs)]) else: build_args.extend(['--', '-j', str(self.jobs)]) subprocess.check_call(build_args)
def finalize_options(self): from distutils import sysconfig self.set_undefined_options('build', ('build_lib', 'build_lib'), ('build_temp', 'build_temp'), ('compiler', 'compiler'), ('debug', 'debug'), ('force', 'force'), ('parallel', 'parallel'), ('plat_name', 'plat_name'), ) if self.package is None: self.package = self.distribution.ext_package self.extensions = self.distribution.ext_modules # Make sure Python's include directories (for Python.h, pyconfig.h, # etc.) are in the include search path. py_include = sysconfig.get_python_inc() plat_py_include = sysconfig.get_python_inc(plat_specific=1) if self.include_dirs is None: self.include_dirs = self.distribution.include_dirs or [] if isinstance(self.include_dirs, str): self.include_dirs = self.include_dirs.split(os.pathsep) # If in a virtualenv, add its include directory # Issue 16116 if sys.exec_prefix != sys.base_exec_prefix: self.include_dirs.append(os.path.join(sys.exec_prefix, 'include')) # Put the Python "system" include dir at the end, so that # any local include dirs take precedence. self.include_dirs.append(py_include) if plat_py_include != py_include: self.include_dirs.append(plat_py_include) self.ensure_string_list('libraries') # Life is easier if we're not forever checking for None, so # simplify these options to empty lists if unset if self.libraries is None: self.libraries = [] if self.library_dirs is None: self.library_dirs = [] elif isinstance(self.library_dirs, str): self.library_dirs = self.library_dirs.split(os.pathsep) if self.rpath is None: self.rpath = [] elif isinstance(self.rpath, str): self.rpath = self.rpath.split(os.pathsep) # for extensions under windows use different directories # for Release and Debug builds. # also Python's library directory must be appended to library_dirs if os.name == 'nt': # the 'libs' directory is for binary installs - we assume that # must be the *native* platform. But we don't really support # cross-compiling via a binary install anyway, so we let it go. self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) if sys.base_exec_prefix != sys.prefix: # Issue 16116 self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs')) if self.debug: self.build_temp = os.path.join(self.build_temp, "Debug") else: self.build_temp = os.path.join(self.build_temp, "Release") # Append the source distribution include and library directories, # this allows distutils on windows to work in the source tree self.include_dirs.append(os.path.dirname(get_config_h_filename())) _sys_home = getattr(sys, '_home', None) if _sys_home: self.library_dirs.append(_sys_home) # Use the .lib files for the correct architecture if self.plat_name == 'win32': suffix = 'win32' else: # win-amd64 or win-ia64 suffix = self.plat_name[4:] new_lib = os.path.join(sys.exec_prefix, 'PCbuild') if suffix: new_lib = os.path.join(new_lib, suffix) self.library_dirs.append(new_lib) # for extensions under Cygwin and AtheOS Python's library directory must be # appended to library_dirs if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos': if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")): # building third party extensions self.library_dirs.append(os.path.join(sys.prefix, "lib", "python" + get_python_version(), "config")) else: # building python standard extensions self.library_dirs.append('.') # For building extensions with a shared Python library, # Python's library directory must be appended to library_dirs # See Issues: #1600860, #4366 if False and (sysconfig.get_config_var('Py_ENABLE_SHARED')): if not sysconfig.python_build: # building third party extensions self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) else: # building python standard extensions self.library_dirs.append('.') # The argument parsing will result in self.define being a string, but # it has to be a list of 2-tuples. All the preprocessor symbols # specified by the 'define' option will be set to '1'. Multiple # symbols can be separated with commas. if self.define: defines = self.define.split(',') self.define = [(symbol, '1') for symbol in defines] # The option for macros to undefine is also a string from the # option parsing, but has to be a list. Multiple symbols can also # be separated with commas here. if self.undef: self.undef = self.undef.split(',') if self.swig_opts is None: self.swig_opts = [] else: self.swig_opts = self.swig_opts.split(' ') # Finally add the user include and library directories if requested if self.user: user_include = os.path.join(USER_BASE, "include") user_lib = os.path.join(USER_BASE, "lib") if os.path.isdir(user_include): self.include_dirs.append(user_include) if os.path.isdir(user_lib): self.library_dirs.append(user_lib) self.rpath.append(user_lib) if isinstance(self.parallel, str): try: self.parallel = int(self.parallel) except ValueError: raise DistutilsOptionError("parallel should be an integer")
getvar = sysconfig.get_config_var opt_flags = [flag for (flag, val) in opts] if '--help' in opt_flags: exit_with_usage(code=0) for opt in opt_flags: if opt == '--prefix': print sysconfig.PREFIX elif opt == '--exec-prefix': print sysconfig.EXEC_PREFIX elif opt in ('--includes', '--cflags'): flags = ['-I' + sysconfig.get_python_inc(), '-I' + sysconfig.get_python_inc(plat_specific=True)] try: import numpy flags += ['-I' + numpy.get_include() + ' -DSMILEI_USE_NUMPY -DNPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION'] except: pass if opt == '--cflags': flags.extend(getvar('CFLAGS').split()) print ' '.join(flags) elif opt in ('--libs', '--ldflags'): libs = ['-lpython' + pyver] libs += getvar('LIBS').split()
def build_dlib(): """use cmake to build and install the extension """ if cmake_path is None: raise DistutilsSetupError("Cannot find cmake in the path. Please specify its path with --cmake parameter.") platform_arch = platform.architecture()[0] log.info("Detected Python architecture: %s" % platform_arch) # make sure build artifacts are generated for the version of Python currently running cmake_extra_arch = [] if sys.version_info >= (3, 0): cmake_extra_arch += ['-DPYTHON3=yes'] if platform_arch == '64bit' and sys.platform == "win32": # 64bit build on Windows if not generator_set: # see if we can deduce the 64bit default generator cmake_extra_arch += get_msvc_win64_generator() # help cmake to find Python library in 64bit Python in Windows # because cmake is 32bit and cannot find PYTHON_LIBRARY from registry. inc_dir = get_python_inc() cmake_extra_arch += ['-DPYTHON_INCLUDE_DIR={inc}'.format(inc=inc_dir)] # this imitates cmake in path resolution py_ver = get_python_version() for ext in [py_ver.replace(".", "") + '.lib', py_ver + 'mu.lib', py_ver + 'm.lib', py_ver + 'u.lib']: py_lib = os.path.abspath(os.path.join(inc_dir, '../libs/', 'python' + ext)) if os.path.exists(py_lib): cmake_extra_arch += ['-DPYTHON_LIBRARY={lib}'.format(lib=py_lib)] break build_dir = os.path.join(script_dir, "./tools/python/build") if os.path.exists(build_dir): log.info('Removing build directory %s' % build_dir) rmtree(build_dir) try: os.makedirs(build_dir) except OSError: pass # cd build os.chdir(build_dir) log.info('Configuring cmake ...') cmake_cmd = [ cmake_path, "..", ] + cmake_extra + cmake_extra_arch if run_process(cmake_cmd): raise DistutilsSetupError("cmake configuration failed!") log.info('Build using cmake ...') cmake_cmd = [ cmake_path, "--build", ".", "--config", cmake_config, "--target", "install", ] if run_process(cmake_cmd): raise DistutilsSetupError("cmake build failed!") # cd back where setup awaits os.chdir(script_dir)
with open(defines_py, 'w') as fd: fd.write("# Autogenerated file containing compile-time definitions\n\n") for k, v in py_defines.items(): fd.write('%s = %d\n' % (k, int(v))) cythonize_opts = {'language_level': '3'} if os.environ.get("CYTHON_TRACE"): cythonize_opts['linetrace'] = True cython_macros.append(("CYTHON_TRACE_NOGIL", 1)) # By default C object files are rebuilt for every extension # C files must be built once only for coverage to work c_lib = ('c_wt', { 'sources': sources, 'depends': source_templates + header_templates + headers, 'include_dirs': [make_ext_path("c"), get_python_inc()], 'macros': c_macros, }) def get_ext_modules(use_cython): from numpy import get_include as get_numpy_include cython_modules, cython_sources = get_cython_sources(use_cython) ext_modules = [ Extension( 'pywt._extensions.{0}'.format(module), sources=[make_ext_path(source)], # Doesn't automatically rebuild if library changes depends=c_lib[1]['sources'] + c_lib[1]['depends'], include_dirs=[make_ext_path("c"), get_numpy_include()],
"pylogicaldecoding.h", ] parser = configparser.ConfigParser() parser.read('setup.cfg') # Choose a datetime module have_pydatetime = True have_mxdatetime = False use_pydatetime = int(parser.get('build_ext', 'use_pydatetime')) # check for mx package if parser.has_option('build_ext', 'mx_include_dir'): mxincludedir = parser.get('build_ext', 'mx_include_dir') else: mxincludedir = os.path.join(get_python_inc(plat_specific=1), "mx") if os.path.exists(mxincludedir): # Build the support for mx: we will check at runtime if it can be imported include_dirs.append(mxincludedir) define_macros.append(('HAVE_MXDATETIME', '1')) sources.append('adapter_mxdatetime.c') depends.extend(['adapter_mxdatetime.h', 'typecast_mxdatetime.c']) have_mxdatetime = True version_flags.append('mx') # now decide which package will be the default for date/time typecasts if have_pydatetime and (use_pydatetime or not have_mxdatetime): define_macros.append(('PYLD_DEFAULT_PYDATETIME', '1')) elif have_mxdatetime: define_macros.append(('PYLD_DEFAULT_MXDATETIME', '1')) else:
def getInclude(): dirName = get_python_inc() return [dirName, os.path.dirname(dirName), np.get_include()]
def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs from numpy.distutils.misc_util import get_info as get_misc_info from scipy._build_utils.system_info import get_info as get_sys_info from distutils.sysconfig import get_python_inc config = Configuration('spatial', parent_package, top_path) config.add_data_dir('tests') # spatial.transform config.add_subpackage('transform') # qhull qhull_src = sorted( glob.glob(join(dirname(__file__), 'qhull_src', 'src', '*.c'))) inc_dirs = [get_python_inc()] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.append(get_numpy_include_dirs()) inc_dirs.append(join(dirname(dirname(__file__)), '_lib')) cfg = dict(get_sys_info('lapack_opt')) cfg.setdefault('include_dirs', []).extend(inc_dirs) config.add_extension('qhull', sources=['qhull.c', 'qhull_misc.c'] + qhull_src, **cfg) # cKDTree ckdtree_src = [ 'query.cxx', 'build.cxx', 'query_pairs.cxx', 'count_neighbors.cxx', 'query_ball_point.cxx', 'query_ball_tree.cxx', 'sparse_distances.cxx' ] ckdtree_src = [join('ckdtree', 'src', x) for x in ckdtree_src] ckdtree_headers = [ 'ckdtree_decl.h', 'coo_entries.h', 'distance_base.h', 'distance.h', 'ordered_pair.h', 'partial_sort.h', 'rectangle.h' ] ckdtree_headers = [join('ckdtree', 'src', x) for x in ckdtree_headers] ckdtree_dep = ['ckdtree.cxx'] + ckdtree_headers + ckdtree_src ext = config.add_extension('ckdtree', sources=['ckdtree.cxx'] + ckdtree_src, depends=ckdtree_dep, include_dirs=inc_dirs + [join('ckdtree', 'src')]) ext._pre_build_hook = pre_build_hook # _distance_wrap config.add_extension('_distance_wrap', sources=[join('src', 'distance_wrap.c')], depends=[join('src', 'distance_impl.h')], include_dirs=[get_numpy_include_dirs()], extra_info=get_misc_info("npymath")) config.add_extension('_voronoi', sources=['_voronoi.c']) config.add_extension('_hausdorff', sources=['_hausdorff.c']) # Add license files config.add_data_files('qhull_src/COPYING.txt') # Type stubs config.add_data_files('*.pyi') return config
'-Wall', '-Wextra', '-Werror', '-fexceptions', '-DNDEBUG', # You 100% do NOT need -DUSE_CLANG_COMPLETER and/or -DYCM_EXPORT in your flags; # only the YCM source code needs it. '-DUSE_CLANG_COMPLETER', '-DYCM_EXPORT=', # THIS IS IMPORTANT! Without the '-x' flag, Clang won't know which language to # use when compiling headers. So it will guess. Badly. So C++ headers will be # compiled as C headers. You don't want that so ALWAYS specify the '-x' flag. # For a C project, you would set this to 'c' instead of 'c++'. '-x', 'objective-c++', "-isystem", "/usr/include/x86_64-linux-gnu", '-isystem', get_python_inc(), '-isystem', 'cpp/llvm/include', '-isystem', 'cpp/llvm/tools/clang/include', # Third-party header directories. "-isystem", "cpp/third-party", "-isystem", "cpp/third-party/eigen", "-isystem", "cpp/third-party/flann/src/cpp", # Qt5 defines. "-DQT_CORE_LIB", "-DQT_GUI_LIB", "-DQT_NETWORK_LIB", "-DQT_QML_LIB", "-DQT_QUICK_LIB", "-DQT_SQL_LIB", "-DQT_WIDGETS_LIB", "-DQT_XML_LIB",
def finalize_options(self): TestCommand.finalize_options(self) if self.args: self.args = __import__('shlex').split(self.args) def run_tests(self): # Run nose ensuring that argv simulates running nosetests directly nose_args = ['nosetests'] nose_args.extend(self.args) __import__('nose').run_exit(argv=nose_args) commands = versioneer.get_cmdclass() commands['test'] = NoseTestCommand incDirs = [sysconfig.get_python_inc(), numpy.get_include()] ext = [ Extension("radiomics._cmatrices", ["radiomics/src/_cmatrices.c", "radiomics/src/cmatrices.c"], include_dirs=incDirs), Extension("radiomics._cshape", ["radiomics/src/_cshape.c", "radiomics/src/cshape.c"], include_dirs=incDirs) ] setup( name='pyradiomics', url='http://github.com/Radiomics/pyradiomics#readme', author='pyradiomics community', author_email='*****@*****.**',
# THIS IS IMPORTANT! Without the '-x' flag, Clang won't know which language to # use when compiling headers. So it will guess. Badly. So C++ headers will be # compiled as C headers. You don't want that so ALWAYS specify the '-x' flag. # For a C project, you would set this to 'c' instead of 'c++'. '-x', 'c++', '-isystem', 'cpp/absl', '-isystem', 'cpp/pybind11', '-isystem', 'cpp/whereami', '-isystem', 'cpp/BoostParts', '-isystem', get_python_inc(), '-isystem', 'cpp/llvm/include', '-isystem', 'cpp/llvm/tools/clang/include', '-I', 'cpp/ycm', '-I', 'cpp/ycm/ClangCompleter', '-isystem', 'cpp/ycm/tests/gmock/googlemock/include', '-isystem', 'cpp/ycm/tests/gmock/googletest/include', '-isystem', 'cpp/ycm/benchmarks/benchmark/include', '-std=c++17',
print " --link : return a link command" print " --libs : return just the library options for linking" sys.exit(1) if len(sys.argv) != 2: usage() try: from distutils import sysconfig except ImportError: # No information available print "none" sys.exit(1) if sys.argv[1] == '--includes': inc = sysconfig.get_python_inc() plat = sysconfig.get_python_inc(plat_specific=1) if inc == plat: print "-I" + inc else: print "-I%s -I%s" % (inc, plat) sys.exit(0) if sys.argv[1] == '--compile': cc, basecflags, opt, ccshared = \ sysconfig.get_config_vars('CC', 'BASECFLAGS', 'OPT', 'CCSHARED') if basecflags: opt = basecflags + ' ' + opt print cc, opt, ccshared sys.exit(0)
def setup_package(): root = os.path.abspath(os.path.dirname(__file__)) if len(sys.argv) > 1 and sys.argv[1] == 'clean': return clean(root) with chdir(root): with open(os.path.join(root, 'thinc', 'about.py')) as f: about = {} exec(f.read(), about) with io.open(os.path.join(root, 'README.rst'), encoding='utf8') as f: readme = f.read() include_dirs = [ get_python_inc(plat_specific=True), os.path.join(root, 'include') ] if (ccompiler.new_compiler().compiler_type == 'msvc' and msvccompiler.get_build_version() == 9): include_dirs.append(os.path.join(root, 'include', 'msvc9')) ext_modules = [] for mod_name in MOD_NAMES: if mod_name.endswith('gpu_ops'): continue mod_path = mod_name.replace('.', '/') + '.cpp' ext_modules.append( Extension(mod_name, [mod_path], language='c++', include_dirs=include_dirs)) if CUDA is None: pass #ext_modules.append( # Extension("thinc.neural.gpu_ops", # sources=["thinc/neural/gpu_ops.cpp"], # language='c++', # include_dirs=include_dirs)) else: with chdir(root): ext_modules.append( Extension( "thinc.neural.gpu_ops", sources=[ "thinc/neural/gpu_ops.cpp", "include/_cuda_shim.cu" ], library_dirs=[CUDA['lib64']], libraries=['cudart'], language='c++', runtime_library_dirs=[CUDA['lib64']], # this syntax is specific to this build system # we're only going to use certain compiler args with nvcc and not with gcc # the implementation of this trick is in customize_compiler() below extra_compile_args=[ '-arch=sm_20', '--ptxas-options=-v', '-c', '--compiler-options', "'-fPIC'" ], include_dirs=include_dirs + [CUDA['include']])) if not is_source_release(root): generate_cython(root, 'thinc') setup( name=about['__title__'], zip_safe=False, packages=PACKAGES, package_data={'': ['*.pyx', '*.pxd', '*.pxi', '*.cpp']}, description=about['__summary__'], long_description=readme, author=about['__author__'], author_email=about['__email__'], version=about['__version__'], url=about['__uri__'], license=about['__license__'], ext_modules=ext_modules, install_requires=[ 'wrapt', 'numpy>=1.7', 'murmurhash>=0.28,<0.29', 'cymem>=1.30,<1.32', 'preshed>=1.0.0,<2.0.0', 'tqdm>=4.10.0,<5.0.0', 'cytoolz>=0.8,<0.9', 'plac>=0.9.6,<1.0.0', 'six>=1.10.0,<2.0.0', 'dill', 'termcolor', 'pathlib>=1.0.0,<2.0.0', 'msgpack-python', 'msgpack-numpy' ], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX :: Linux', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Programming Language :: Cython', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Scientific/Engineering' ], cmdclass={'build_ext': build_ext_subclass}, )
def init_env(): from setup.build_environment import msvc, is64bit, win_inc, win_lib, NMAKE from distutils import sysconfig linker = None if isunix: cc = os.environ.get('CC', 'gcc') cxx = os.environ.get('CXX', 'g++') debug = '' # debug = '-ggdb' cflags = os.environ.get( 'OVERRIDE_CFLAGS', '-Wall -DNDEBUG %s -fno-strict-aliasing -pipe' % debug) cflags = shlex.split(cflags) + ['-fPIC'] ldflags = os.environ.get('OVERRIDE_LDFLAGS', '-Wall') ldflags = shlex.split(ldflags) cflags += shlex.split(os.environ.get('CFLAGS', '')) ldflags += shlex.split(os.environ.get('LDFLAGS', '')) cflags += ['-fvisibility=hidden'] if islinux: cflags.append('-pthread') ldflags.append('-shared') if isbsd: cflags.append('-pthread') ldflags.append('-shared') if ishaiku: cflags.append('-lpthread') ldflags.append('-shared') if islinux or isbsd or ishaiku: cflags.append('-I' + sysconfig.get_python_inc()) # getattr(..., 'abiflags') is for PY2 compat, since PY2 has no abiflags # member ldflags.append('-lpython{}{}'.format( sysconfig.get_config_var('VERSION'), getattr(sys, 'abiflags', ''))) if isosx: cflags.append('-D_OSX') ldflags.extend('-bundle -undefined dynamic_lookup'.split()) cflags.extend(['-fno-common', '-dynamic']) cflags.append('-I' + sysconfig.get_python_inc()) if iswindows: cc = cxx = msvc.cc cflags = '/c /nologo /MD /W3 /EHsc /DNDEBUG'.split() ldflags = '/DLL /nologo /INCREMENTAL:NO /NODEFAULTLIB:libcmt.lib'.split( ) # cflags = '/c /nologo /Ox /MD /W3 /EHsc /Zi'.split() # ldflags = '/DLL /nologo /INCREMENTAL:NO /DEBUG'.split() if is64bit: cflags.append('/GS-') for p in win_inc: cflags.append('-I' + p) for p in win_lib: if p: ldflags.append('/LIBPATH:' + p) cflags.append('-I%s' % sysconfig.get_python_inc()) ldflags.append('/LIBPATH:' + os.path.join(sysconfig.PREFIX, 'libs')) linker = msvc.linker return namedtuple('Environment', 'cc cxx cflags ldflags linker make')( cc=cc, cxx=cxx, cflags=cflags, ldflags=ldflags, linker=linker, make=NMAKE if iswindows else 'make')