def build_plasma_tensorflow_op(): global tf_plasma_op try: import tensorflow as tf print("TensorFlow version: " + tf.__version__) except ImportError: pass else: print("Compiling Plasma TensorFlow Op...") dir_path = os.path.dirname(os.path.realpath(__file__)) cc_path = os.path.join(dir_path, "tensorflow", "plasma_op.cc") so_path = os.path.join(dir_path, "tensorflow", "plasma_op.so") tf_cflags = tf.sysconfig.get_compile_flags() if sys.platform == 'darwin': tf_cflags = ["-undefined", "dynamic_lookup"] + tf_cflags cmd = [ "g++", "-std=c++11", "-g", "-shared", cc_path, "-o", so_path, "-DNDEBUG", "-I" + pa.get_include() ] cmd += ["-L" + dir for dir in pa.get_library_dirs()] cmd += ["-lplasma", "-larrow_python", "-larrow", "-fPIC"] cmd += tf_cflags cmd += tf.sysconfig.get_link_flags() cmd += ["-O2"] if tf.test.is_built_with_cuda(): cmd += ["-DGOOGLE_CUDA"] print("Running command " + str(cmd)) subprocess.check_call(cmd) tf_plasma_op = tf.load_op_library(TF_PLASMA_OP_PATH)
def build_plasma_tensorflow_op(): global tf_plasma_op try: import tensorflow as tf print("TensorFlow version: " + tf.__version__) except ImportError: pass else: print("Compiling Plasma TensorFlow Op...") dir_path = os.path.dirname(os.path.realpath(__file__)) cc_path = os.path.join(dir_path, "tensorflow", "plasma_op.cc") so_path = os.path.join(dir_path, "tensorflow", "plasma_op.so") tf_cflags = tf.sysconfig.get_compile_flags() if sys.platform == 'darwin': tf_cflags = ["-undefined", "dynamic_lookup"] + tf_cflags cmd = ["g++", "-std=c++11", "-g", "-shared", cc_path, "-o", so_path, "-DNDEBUG", "-I" + pa.get_include()] cmd += ["-L" + dir for dir in pa.get_library_dirs()] cmd += ["-lplasma", "-larrow_python", "-larrow", "-fPIC"] cmd += tf_cflags cmd += tf.sysconfig.get_link_flags() cmd += ["-O2"] if tf.test.is_built_with_cuda(): cmd += ["-DGOOGLE_CUDA"] print("Running command " + str(cmd)) subprocess.check_call(cmd) tf_plasma_op = tf.load_op_library(TF_PLASMA_OP_PATH)
def build_extensions(self): opts = ["-std=c++11", "-g"] if TILEDBVCF_DEBUG_BUILD: opts.extend(["-O0"]) else: opts.extend(["-O2"]) link_opts = [] for ext in self.extensions: ext.extra_compile_args = opts ext.extra_link_args = link_opts import pyarrow # unversioned symlinks to arrow libraries are required for wheels # https://github.com/apache/arrow/blob/master/docs/source/python/extending.rst#building-extensions-against-pypi-wheels pyarrow.create_library_symlinks() ext.libraries.extend(pyarrow.get_libraries()) ext.include_dirs.append(pyarrow.get_include()) # don't overlink the arrow core library if "arrow" in ext.libraries: ext.libraries.remove("arrow") ext.library_dirs.extend(pyarrow.get_library_dirs()) find_or_build_libtiledbvcf(self) build_ext.build_extensions(self)
def test_cython_api(tmpdir): """ Basic test for the Cython API. """ # Fail early if cython is not found import cython # noqa with tmpdir.as_cwd(): # Set up temporary workspace pyx_file = 'pyarrow_cython_example.pyx' shutil.copyfile(os.path.join(here, pyx_file), os.path.join(str(tmpdir), pyx_file)) # Create setup.py file setup_code = setup_template.format(pyx_file=pyx_file, compiler_opts=compiler_opts, test_ld_path=test_ld_path) with open('setup.py', 'w') as f: f.write(setup_code) # ARROW-2263: Make environment with this pyarrow/ package first on the # PYTHONPATH, for local dev environments subprocess_env = test_util.get_modified_env_with_pythonpath() # Compile extension module subprocess.check_call( [sys.executable, 'setup.py', 'build_ext', '--inplace'], env=subprocess_env) # Check basic functionality orig_path = sys.path[:] sys.path.insert(0, str(tmpdir)) try: mod = __import__('pyarrow_cython_example') check_cython_example_module(mod) finally: sys.path = orig_path # Check the extension module is loadable from a subprocess without # pyarrow imported first. code = """if 1: import sys mod = __import__({mod_name!r}) arr = mod.make_null_array(5) assert mod.get_array_length(arr) == 5 assert arr.null_count == 5 """.format(mod_name='pyarrow_cython_example') if sys.platform == 'win32': delim, var = ';', 'PATH' else: delim, var = ':', 'LD_LIBRARY_PATH' subprocess_env[var] = delim.join(pa.get_library_dirs() + [subprocess_env.get(var, '')]) subprocess.check_call([sys.executable, '-c', code], stdout=subprocess.PIPE, env=subprocess_env)
def _set_arrow_symbol_resolution(flag): for dir in map(Path, pa.get_library_dirs()): arrow_path = dir / 'libarrow.so' arrow_python_path = dir / 'libarrow_python.so' if arrow_path.exists() and arrow_python_path.exists(): arrow_python = ctypes.CDLL(arrow_path, flag) libarrow_python = ctypes.CDLL(arrow_python_path, flag) break
def finalize_options(self): import pybind11 import pyarrow as pa build_ext.finalize_options(self) if not hasattr(self, 'include_dirs'): self.include_dirs = [] self.include_dirs.append("pybnesian/") self.include_dirs.append("lib/libfort") self.include_dirs.append(pybind11.get_include()) if not hasattr(self, 'libraries'): self.libraries = [] if sys.platform != 'darwin': self.libraries.append("OpenCL") self.libraries.extend(pa.get_libraries()) self.libraries.append("nlopt") if not hasattr(self, 'library_dirs'): self.library_dirs = [] self.library_dirs.extend(pa.get_library_dirs()) if sys.platform == "win32": if "CL_LIBRARY_PATH" in os.environ: cl_library_path = os.environ["CL_LIBRARY_PATH"] else: cl_library_path = find_opencl.find_opencl_library_dir() if cl_library_path is None: raise RuntimeError("OpenCL library path not found. Set \"CL_LIBRARY_PATH\" environment variable to provide the OpenCL library folder.") self.library_dirs.append(cl_library_path) if not hasattr(self, 'rpath'): self.rpath = [] if sys.platform == "linux": # Use relative RPATH to support out-of-source builds, i.e. pip install . # Check https://man7.org/linux/man-pages/man8/ld.so.8.html for the $ORIGIN syntax self.rpath.append("$ORIGIN/../pyarrow") # Use absolute path so auditwheel and develop builds can find pyarrow. self.rpath.extend(pa.get_library_dirs())
def build_extensions(self): opts = ['-std=c++11', '-g', '-O2'] link_opts = [] for ext in self.extensions: ext.extra_compile_args = opts ext.extra_link_args = link_opts import pyarrow ext.include_dirs.append(pyarrow.get_include()) ext.libraries.extend(pyarrow.get_libraries()) ext.library_dirs.extend(pyarrow.get_library_dirs()) find_or_build_libtiledbvcf(self) build_ext.build_extensions(self)
def create_extensions(): cpp_lib_cxx_flags = ['-fPIC'] python_lib_cxx_flags = [] include_dirs = [ pa.get_include(), 'vinum_cpp/src/operators/aggregate', 'vinum_cpp/src/operators/sort', 'vinum_cpp/src/operators', 'vinum_cpp/src/', ] library_dirs = [_get_distutils_build_directory()] library_dirs.extend(pa.get_library_dirs()) libraries = [VINUM_CPP_LIB_NAME, 'arrow', 'arrow_python'] python_lib_linker_args = [] python_lib_macros = None if sys.platform == 'darwin': python_lib_cxx_flags.append('--std=c++17') python_lib_cxx_flags.append('--stdlib=libc++') python_lib_cxx_flags.append('-mmacosx-version-min=10.9') python_lib_cxx_flags.append('-fvisibility=hidden') python_lib_linker_args.append('-Wl,-rpath,@loader_path/pyarrow') elif sys.platform == 'linux': python_lib_cxx_flags.append('--std=c++17') python_lib_cxx_flags.append('-fvisibility=hidden') if not is_cibuildwheel: python_lib_linker_args.append("-Wl,-rpath,$ORIGIN") python_lib_linker_args.append("-Wl,-rpath,$ORIGIN/pyarrow") python_lib_macros = ('_GLIBCXX_USE_CXX11_ABI', '0') cpp_lib_cxx_flags.append('-D_GLIBCXX_USE_CXX11_ABI=0') cpp_lib = CMakeExtension( "vinum_lib", ["vinum/core/vinum_lib.cpp"], cmake_sourcedir='vinum_cpp', cmake_target_name=VINUM_CPP_LIB_NAME, cmake_cxx_flags=cpp_lib_cxx_flags, ) cpp_lib.include_dirs.extend(include_dirs) cpp_lib.libraries.extend(libraries) cpp_lib.library_dirs.extend(library_dirs) cpp_lib.extra_compile_args.extend(python_lib_cxx_flags) cpp_lib.extra_link_args.extend(python_lib_linker_args) if python_lib_macros: cpp_lib.define_macros.append(python_lib_macros) return [cpp_lib]
def gen_gis_core_modules(): gis_core_modules = cythonize( Extension(name="arctern.arctern_core_", sources=["arctern/cython/arctern_core_.pyx"])) for ext in gis_core_modules: # The Numpy C headers are currently required ext.include_dirs.append(np.get_include()) ext.include_dirs.append(pa.get_include()) ext.libraries.extend(['arctern'] + pa.get_libraries()) ext.library_dirs.extend(pa.get_library_dirs()) if os.name == 'posix': ext.extra_compile_args.append('-std=c++11') # Try uncommenting the following line on Linux # if you get weird linker errors or runtime crashes #ext.define_macros.append(("_GLIBCXX_USE_CXX11_ABI", "0")) return gis_core_modules
def build_extensions(self): opts = ['-std=c++11', '-g'] if TILEDBVCF_DEBUG_BUILD: opts.extend(['-O0']) else: opts.extend(['-O2']) link_opts = [] for ext in self.extensions: ext.extra_compile_args = opts ext.extra_link_args = link_opts import pyarrow ext.include_dirs.append(pyarrow.get_include()) ext.libraries.extend(pyarrow.get_libraries()) # don't overlink the arrow core library if 'arrow' in ext.libraries: ext.libraries.remove('arrow') ext.library_dirs.extend(pyarrow.get_library_dirs()) find_or_build_libtiledbvcf(self) build_ext.build_extensions(self)
def _get_arrow_lib_dir(self): return pyarrow.get_library_dirs()[0]
author="Accelerated Big Data Systems, Delft University of Technology", packages=find_packages(), url="https://github.com/abs-tudelft/fletcher", project_urls={ "Bug Tracker": "https://github.com/abs-tudelft/fletcher/issues", "Documentation": "https://abs-tudelft.github.io/fletcher/", "Source Code": "https://github.com/abs-tudelft/fletcher/", }, ext_modules=[ Extension( "pyfletchgen.lib", ["pyfletchgen/lib.pyx"], language="c++", include_dirs=[np.get_include(), pa.get_include(), include_dir], libraries=pa.get_libraries() + ["fletchgen_lib"], library_dirs=pa.get_library_dirs() + lib_dirs, runtime_library_dirs=pa.get_library_dirs() + lib_dirs, extra_compile_args=["-std=c++11", "-O3"], extra_link_args=["-std=c++11"]) ], entry_points={'console_scripts': ['fletchgen=pyfletchgen:_run']}, install_requires=[ 'numpy >= 1.14', 'pandas', 'pyarrow == 1.0.0', ], setup_requires=['cython', 'numpy', 'pyarrow == 1.0.0', 'plumbum'], classifiers=[ "Programming Language :: Python :: 3", "Programming Language :: Cython", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License",
# for MacOS os.path.join(tbb_root, 'lib'), # for Windows os.path.join(tbb_root, 'lib', 'intel64', 'vc_mt'), ], language="c++") ext_arrow_reader = Extension( name="sdc.harrow_reader", sources=["sdc/native/arrow_reader.cpp"], extra_compile_args=eca, extra_link_args=ela, libraries=pa.get_libraries(), include_dirs=["sdc/native/", numba_include_path, pa.get_include()], library_dirs=lid + pa.get_library_dirs(), language="c++") _ext_mods = [ ext_hdist, ext_chiframes, ext_set, ext_str, ext_dt, ext_io, ext_transport_seq, ext_sort, ext_conc_dict, ext_arrow_reader, ]
def _get_arrow_lib_dir(self): if "SF_ARROW_LIBDIR" in os.environ: return os.environ["SF_ARROW_LIBDIR"] return pyarrow.get_library_dirs()[0]
def test_get_library_dirs_win32(): assert any( os.path.exists(os.path.join(directory, 'arrow.lib')) for directory in pa.get_library_dirs())
def build_extensions(self): import pyarrow as pa self.create_symlinks() self.expand_sources() self.copy_opencl_code() # self.create_clang_tidy_compilation_db(self.extensions) ct = self.compiler.compiler_type c_opts, l_opts = self.create_options() opts = c_opts.get(ct, []) link_opts = l_opts.get(ct, []) if sys.platform == "win32": if "CL_INCLUDE_PATH" in os.environ: cl_include_path = os.environ["CL_INCLUDE_PATH"] else: cl_include_path = find_opencl.find_opencl_include_dir() if cl_include_path is None: raise RuntimeError("OpenCL include path not found. Set \"CL_INCLUDE_PATH\" environment variable to provide the OpenCL headers folder.") opts.append("/external:I" + cl_include_path) # Include this because the name mangling affects to find the pyarrow functions. opts.append("-D_GLIBCXX_USE_CXX11_ABI=0") for ext in self.extensions: ext.define_macros.append(("PYARROW_VERSION_INFO", pa.__version__)) # The compiled extension depends on a specific version of pyarrow. self.distribution.install_requires = ['pybind11>=2.6', 'pyarrow=='+pa.__version__, "numpy"], self.distribution.setup_requires = ['pybind11>=2.6', 'pyarrow=='+pa.__version__, "numpy"], # opts.append("-g") # opts.append("-O0") # opts.append("-libstd=libc++") # opts.append("-ferror-limit=1") # opts.append("-Wno-unused-variable") # opts.append("-Wno-unused-parameter") # opts.append("-Wno-return-type") # opts.append("-Wno-sign-compare") # opts.append("-fsyntax-only") # Activate debug mode. # opts.append("-UNDEBUG") # opts.append("-DDEBUG") # This reduces the binary size because it removes the debug symbols. Check strip command to create release builds. opts.append("-g0") if ct == 'unix': # opts.append("-march=native") opts.append("-fdiagnostics-color=always") opts.append("-Wall") opts.append("-Wextra") # opts.append(cpp_flag(self.compiler)) if has_flag(self.compiler, '-fvisibility=hidden'): opts.append('-fvisibility=hidden') for ext in self.extensions: ext.extra_compile_args.extend(opts) ext.extra_link_args.extend(link_opts) # https://stackoverflow.com/questions/37752901/dylib-built-on-ci-cant-be-loaded # Create the RPATH for MacOSX if sys.platform == "darwin": for ext in self.extensions: ext.extra_link_args.append("-Wl,-rpath,@loader_path/../pyarrow") ext.extra_link_args.append("-Wl,-rpath," + pa.get_library_dirs()[0]) build_ext.build_extensions(self) # Copy the pyarrow dlls because Windows do not have the concept of RPATH. if sys.platform == "win32": for lib in pa.get_libraries(): import shutil shutil.copyfile(pa.get_library_dirs()[0] + '/' + lib + '.dll', path_to_build_folder() + '/' + lib + '.dll')
include_dirs=[ "../../cpp/include/cudf", "../../cpp/include", os.path.join(CUDF_ROOT, "include"), os.path.join(CUDF_ROOT, "_deps/libcudacxx-src/include"), os.path.join( os.path.dirname(sysconfig.get_path("include")), "libcudf/libcudacxx", ), os.path.dirname(sysconfig.get_path("include")), np.get_include(), pa.get_include(), cuda_include_dir, ], library_dirs=( pa.get_library_dirs() + [get_python_lib(), os.path.join(os.sys.prefix, "lib")] ), libraries=["cudf"] + pa.get_libraries(), language="c++", extra_compile_args=["-std=c++14"], ) ] setup( name="cudf", version=versioneer.get_version(), description="cuDF - GPU Dataframe", url="https://github.com/rapidsai/cudf", author="NVIDIA Corporation", license="Apache 2.0",
def test_get_library_dirs_win32(): library_dirs = pa.get_library_dirs() library_lib = library_dirs[-1] assert os.path.exists(os.path.join(library_lib, 'arrow.lib'))
def test_get_library_dirs_win32(): assert any(os.path.exists(os.path.join(directory, 'arrow.lib')) for directory in pa.get_library_dirs())
import os import numpy as np import pyarrow as pa ext_modules = cythonize( Extension("pyfletcher.lib", ["pyfletcher/lib.pyx"], language="c++", extra_compile_args=["-std=c++11", "-O3"], extra_link_args=["-std=c++11"])) for ext in ext_modules: ext.include_dirs.append(np.get_include()) ext.include_dirs.append(pa.get_include()) ext.libraries.extend(pa.get_libraries()) ext.library_dirs.extend(pa.get_library_dirs()) ext.runtime_library_dirs.extend(pa.get_library_dirs()) ext.libraries.extend(["fletcher"]) ext.define_macros.append(("_GLIBCXX_USE_CXX11_ABI", "0")) this_directory = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(this_directory, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup(name="pyfletcher", version="0.0.5", author="Lars van Leeuwen", packages=['pyfletcher'], description="A Python wrapper for the Fletcher runtime library", long_description=long_description, long_description_content_type='text/markdown',
os.environ['CXXFLAGS'] = 'std=c++11' extra_link_args = [ "-v", # verbose "-DSOME_DEFINE_OPT"] extra_compile_args = [# "-v", # verbose # too much verbosity # "-fopenmp", "-O3", "-w", # no warnings # "-Wstrict-prototypes", # "-Wimplicit-function-declaration", # "-I/usr/local/include", # "-I%s/samples/cli" % DB2PATH, "-DSPCLIENT_PYTHON"] libraries.extend(pyarrow.get_libraries()) library_dirs.extend(pyarrow.get_library_dirs()) if sys.version_info > (3,): export_symbols = ['PyInit_spclient_python'] else: export_symbols = ['initspclient_python'] def get_utilcli_c_location(): utilcli_c_location = os.path.join(provide_sample_cli_dir(), "utilcli.cpp") return utilcli_c_location class MyBuildExt(build_ext): """ My_build_ext was only coded to exploit function copy_tree and copy the .pyd to local directory
ext_vaex_arrow = Extension( "vaex_arrow_ext.ext", [os.path.relpath(os.path.join(dirname, "src/ext.cpp"))]) ext_modules = [ext_vaex_arrow] for ext in ext_modules: # The Numpy C headers are currently required ext.include_dirs.append(np.get_include()) ext.include_dirs.append(pa.get_include()) ext.include_dirs.append(pybind11.get_include()) if os.name == 'nt': # windows # only for windows we link ext.libraries.extend(pa.get_libraries()) ext.library_dirs.extend(pa.get_library_dirs()) if os.name == 'posix': ext.extra_compile_args.append('-std=c++11') # Try uncommenting the following line on Linux # if you get weird linker errors or runtime crashes # ext.define_macros.append(("_GLIBCXX_USE_CXX11_ABI", "0")) setup( name="vaex_arrow_ext", ext_modules=ext_modules, packages=["vaex_arrow_ext"], )
sources=cython_files, include_dirs=[ "../../cpp/include/cudf", "../../cpp/include", os.path.join(CUDF_ROOT, "include"), os.path.join(CUDF_ROOT, "_deps/libcudacxx-src/include"), os.path.join( os.path.dirname(sysconfig.get_path("include")), "libcudf/libcudacxx", ), os.path.dirname(sysconfig.get_path("include")), np.get_include(), pa.get_include(), cuda_include_dir, ], library_dirs=(pa.get_library_dirs() + [get_python_lib(), os.path.join(os.sys.prefix, "lib")]), libraries=["cudf"] + pa.get_libraries() + ["arrow_cuda"], language="c++", extra_compile_args=["-std=c++14"], ) ] setup( name="cudf", version=versioneer.get_version(), description="cuDF - GPU Dataframe", url="https://github.com/rapidsai/cudf", author="NVIDIA Corporation", license="Apache 2.0",
os.path.join(os.environ['OMNISCI_ROOT_PATH'], 'lib')) dbe = Extension( "omniscidbe", ["@CMAKE_CURRENT_SOURCE_DIR@/Python/dbe.pyx"], language="c++17", include_dirs=[ np.get_include(), pa.get_include(), root, "@CMAKE_SOURCE_DIR@", "@CMAKE_CURRENT_SOURCE_DIR@", "@CMAKE_SOURCE_DIR@/ThirdParty/rapidjson", "@CMAKE_SOURCE_DIR@/Distributed/os", ], library_dirs=pa.get_library_dirs() + ["@CMAKE_CURRENT_BINARY_DIR@", "."] + extra_library_dirs, runtime_library_dirs=pa.get_library_dirs() + ["$ORIGIN/../../"] + extra_library_dirs, libraries=pa.get_libraries() + ["DBEngine", "boost_system"], extra_compile_args=["-std=c++17", "-DRAPIDJSON_HAS_STDSTRING"], ) # Try uncommenting the following line on Linux # if you get weird linker errors or runtime crashes # dbe.define_macros.append(("_GLIBCXX_USE_CXX11_ABI", "0")) # "fat" wheel data_files = [] if False: # TODO: implement an option? data_files = [ ("lib", ["$<TARGET_FILE:DBEngine>"]),