def build_extension(self, ext): current_dir = os.getcwd() if ext.name == 'snowflake.connector.arrow_iterator': self._copy_arrow_lib() ext.sources += [ 'cpp/ArrowIterator/CArrowIterator.cpp', 'cpp/ArrowIterator/CArrowChunkIterator.cpp', 'cpp/ArrowIterator/CArrowTableIterator.cpp', 'cpp/ArrowIterator/SnowflakeType.cpp', 'cpp/ArrowIterator/BinaryConverter.cpp', 'cpp/ArrowIterator/BooleanConverter.cpp', 'cpp/ArrowIterator/DecimalConverter.cpp', 'cpp/ArrowIterator/DateConverter.cpp', 'cpp/ArrowIterator/FloatConverter.cpp', 'cpp/ArrowIterator/IntConverter.cpp', 'cpp/ArrowIterator/StringConverter.cpp', 'cpp/ArrowIterator/TimeConverter.cpp', 'cpp/ArrowIterator/TimeStampConverter.cpp', 'cpp/ArrowIterator/Python/Common.cpp', 'cpp/ArrowIterator/Python/Helpers.cpp', 'cpp/ArrowIterator/Util/time.cpp', 'cpp/Logging/logging.cpp' ] ext.include_dirs.append('cpp/ArrowIterator/') ext.include_dirs.append('cpp/Logging') if platform == 'win32': ext.include_dirs.append(pyarrow.get_include()) ext.include_dirs.append(numpy.get_include()) elif platform == 'linux' or platform == 'darwin': ext.extra_compile_args.append('-isystem' + pyarrow.get_include()) ext.extra_compile_args.append('-isystem' + numpy.get_include()) ext.extra_compile_args.append('-std=c++11') ext.extra_compile_args.append('-D_GLIBCXX_USE_CXX11_ABI=0') ext.library_dirs.append( os.path.join(current_dir, self.build_lib, 'snowflake', 'connector')) ext.extra_link_args += self._get_arrow_lib_as_linker_input() # sys.platform for linux used to return with version suffix, (i.e. linux2, linux3) # After version 3.3, it will always be just 'linux' # https://docs.python.org/3/library/sys.html#sys.platform if platform == 'linux': ext.extra_link_args += ['-Wl,-rpath,$ORIGIN'] elif platform == 'darwin': # rpath,$ORIGIN only work on linux, did not work on darwin. use @loader_path instead # fyi, https://medium.com/@donblas/fun-with-rpath-otool-and-install-name-tool-e3e41ae86172 ext.extra_link_args += ['-rpath', '@loader_path'] build_ext.build_extension(self, ext)
def build_extension(self, ext): current_dir = os.getcwd() if ext.name == 'snowflake.connector.arrow_iterator': self._copy_arrow_lib() ext.sources += [ 'cpp/ArrowIterator/CArrowChunkIterator.cpp', 'cpp/ArrowIterator/FloatConverter.cpp', 'cpp/ArrowIterator/IntConverter.cpp', 'cpp/ArrowIterator/StringConverter.cpp' ] ext.include_dirs.append('cpp/ArrowIterator/') ext.include_dirs.append(pyarrow.get_include()) ext.extra_compile_args.append('-std=c++11') ext.library_dirs.append( os.path.join(current_dir, self.build_lib, 'snowflake', 'connector')) ext.extra_link_args += self._get_arrow_lib_as_linker_input() if self._is_unix(): ext.extra_link_args += ['-Wl,-rpath,$ORIGIN'] build_ext.build_extension(self, ext)
def build_extensions(self): opts = ["-std=c++11", "-g"] if TILEDBVCF_DEBUG_BUILD: opts.extend(["-O0"]) else: opts.extend(["-O2"]) link_opts = [] for ext in self.extensions: ext.extra_compile_args = opts ext.extra_link_args = link_opts import pyarrow # unversioned symlinks to arrow libraries are required for wheels # https://github.com/apache/arrow/blob/master/docs/source/python/extending.rst#building-extensions-against-pypi-wheels pyarrow.create_library_symlinks() ext.libraries.extend(pyarrow.get_libraries()) ext.include_dirs.append(pyarrow.get_include()) # don't overlink the arrow core library if "arrow" in ext.libraries: ext.libraries.remove("arrow") ext.library_dirs.extend(pyarrow.get_library_dirs()) find_or_build_libtiledbvcf(self) build_ext.build_extensions(self)
def build_plasma_tensorflow_op(): global tf_plasma_op try: import tensorflow as tf print("TensorFlow version: " + tf.__version__) except ImportError: pass else: print("Compiling Plasma TensorFlow Op...") dir_path = os.path.dirname(os.path.realpath(__file__)) cc_path = os.path.join(dir_path, "tensorflow", "plasma_op.cc") so_path = os.path.join(dir_path, "tensorflow", "plasma_op.so") tf_cflags = tf.sysconfig.get_compile_flags() if sys.platform == 'darwin': tf_cflags = ["-undefined", "dynamic_lookup"] + tf_cflags cmd = [ "g++", "-std=c++11", "-g", "-shared", cc_path, "-o", so_path, "-DNDEBUG", "-I" + pa.get_include() ] cmd += ["-L" + dir for dir in pa.get_library_dirs()] cmd += ["-lplasma", "-larrow_python", "-larrow", "-fPIC"] cmd += tf_cflags cmd += tf.sysconfig.get_link_flags() cmd += ["-O2"] if tf.test.is_built_with_cuda(): cmd += ["-DGOOGLE_CUDA"] print("Running command " + str(cmd)) subprocess.check_call(cmd) tf_plasma_op = tf.load_op_library(TF_PLASMA_OP_PATH)
def get_extension_options(): import numpy import pyarrow extension_options = load_lang_config("CXX") extension_options["include_dirs"].append(numpy.get_include()) extension_options["include_dirs"].append(pyarrow.get_include()) extension_options["extra_compile_args"].append("-std=c++17") if extension_options["compiler"]: compiler = " ".join(extension_options["compiler"]) os.environ["CXX"] = compiler os.environ["CC"] = compiler # Because of odd handling of the linker in setuptools with C++ the # compiler and the linker must use the same programs, so build a linker # command line using the compiler. linker = " ".join(extension_options["compiler"] + ["-pthread", "-shared"]) os.environ["LDSHARED"] = linker os.environ["LDEXE"] = linker extension_options["extra_compile_args"].extend([ # Warnings are common in generated code and hard to fix. Don't make them errors. "-Wno-error", # Entirely disable some warning that are common in generated code and safe. "-Wno-unused-variable", "-Wno-unused-function", "-Wno-deprecated-declarations", # Disable numpy deprecation warning in generated code. "-DNPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION", ]) extension_options.pop("compiler") return extension_options
def build_plasma_tensorflow_op(): global tf_plasma_op try: import tensorflow as tf print("TensorFlow version: " + tf.__version__) except ImportError: pass else: print("Compiling Plasma TensorFlow Op...") dir_path = os.path.dirname(os.path.realpath(__file__)) cc_path = os.path.join(dir_path, "tensorflow", "plasma_op.cc") so_path = os.path.join(dir_path, "tensorflow", "plasma_op.so") tf_cflags = tf.sysconfig.get_compile_flags() if sys.platform == 'darwin': tf_cflags = ["-undefined", "dynamic_lookup"] + tf_cflags cmd = ["g++", "-std=c++11", "-g", "-shared", cc_path, "-o", so_path, "-DNDEBUG", "-I" + pa.get_include()] cmd += ["-L" + dir for dir in pa.get_library_dirs()] cmd += ["-lplasma", "-larrow_python", "-larrow", "-fPIC"] cmd += tf_cflags cmd += tf.sysconfig.get_link_flags() cmd += ["-O2"] if tf.test.is_built_with_cuda(): cmd += ["-DGOOGLE_CUDA"] print("Running command " + str(cmd)) subprocess.check_call(cmd) tf_plasma_op = tf.load_op_library(TF_PLASMA_OP_PATH)
def create_options(self): import pyarrow as pa import numpy as np c_opts = { 'msvc': ['/EHsc', "/std:c++17", "/experimental:external", "/external:W0", "/external:I" + pa.get_include(), "/external:I" + np.get_include(), "/external:Ilib\\eigen-3.3.7", "/external:Ilib\\OpenCL", "/external:Ilib\\boost", "/external:Ilib\\indicators", # Windows creates a build_temp/Release/pybnesian folder structure, so apply a dirname "/external:I" + os.path.join(os.path.dirname(self.build_temp), 'nlopt', 'include'), "-DNOGDI"], 'unix': ["-std=c++17", "-isystem" + pa.get_include(), "-isystem" + np.get_include(), "-isystemlib/eigen-3.3.7", "-isystemlib/OpenCL", "-isystemlib/boost", "-isystemlib/indicators", # Unix creates a build_temp/pybnesian folder structure. "-isystem" + os.path.join(self.build_temp, 'nlopt', 'include') ] } l_opts = { 'msvc': [], 'unix': [], } if sys.platform == 'darwin': opencl_opts = ["-framework", "OpenCL"] c_opts['unix'].extend(darwin_opts) l_opts['unix'].extend(darwin_opts) l_opts['unix'].extend(opencl_opts) return (c_opts, l_opts)
def build_extensions(self): opts = ['-std=c++11', '-g', '-O2'] link_opts = [] for ext in self.extensions: ext.extra_compile_args = opts ext.extra_link_args = link_opts import pyarrow ext.include_dirs.append(pyarrow.get_include()) ext.libraries.extend(pyarrow.get_libraries()) ext.library_dirs.extend(pyarrow.get_library_dirs()) find_or_build_libtiledbvcf(self) build_ext.build_extensions(self)
def create_extensions(): cpp_lib_cxx_flags = ['-fPIC'] python_lib_cxx_flags = [] include_dirs = [ pa.get_include(), 'vinum_cpp/src/operators/aggregate', 'vinum_cpp/src/operators/sort', 'vinum_cpp/src/operators', 'vinum_cpp/src/', ] library_dirs = [_get_distutils_build_directory()] library_dirs.extend(pa.get_library_dirs()) libraries = [VINUM_CPP_LIB_NAME, 'arrow', 'arrow_python'] python_lib_linker_args = [] python_lib_macros = None if sys.platform == 'darwin': python_lib_cxx_flags.append('--std=c++17') python_lib_cxx_flags.append('--stdlib=libc++') python_lib_cxx_flags.append('-mmacosx-version-min=10.9') python_lib_cxx_flags.append('-fvisibility=hidden') python_lib_linker_args.append('-Wl,-rpath,@loader_path/pyarrow') elif sys.platform == 'linux': python_lib_cxx_flags.append('--std=c++17') python_lib_cxx_flags.append('-fvisibility=hidden') if not is_cibuildwheel: python_lib_linker_args.append("-Wl,-rpath,$ORIGIN") python_lib_linker_args.append("-Wl,-rpath,$ORIGIN/pyarrow") python_lib_macros = ('_GLIBCXX_USE_CXX11_ABI', '0') cpp_lib_cxx_flags.append('-D_GLIBCXX_USE_CXX11_ABI=0') cpp_lib = CMakeExtension( "vinum_lib", ["vinum/core/vinum_lib.cpp"], cmake_sourcedir='vinum_cpp', cmake_target_name=VINUM_CPP_LIB_NAME, cmake_cxx_flags=cpp_lib_cxx_flags, ) cpp_lib.include_dirs.extend(include_dirs) cpp_lib.libraries.extend(libraries) cpp_lib.library_dirs.extend(library_dirs) cpp_lib.extra_compile_args.extend(python_lib_cxx_flags) cpp_lib.extra_link_args.extend(python_lib_linker_args) if python_lib_macros: cpp_lib.define_macros.append(python_lib_macros) return [cpp_lib]
def gen_gis_core_modules(): gis_core_modules = cythonize( Extension(name="arctern.arctern_core_", sources=["arctern/cython/arctern_core_.pyx"])) for ext in gis_core_modules: # The Numpy C headers are currently required ext.include_dirs.append(np.get_include()) ext.include_dirs.append(pa.get_include()) ext.libraries.extend(['arctern'] + pa.get_libraries()) ext.library_dirs.extend(pa.get_library_dirs()) if os.name == 'posix': ext.extra_compile_args.append('-std=c++11') # Try uncommenting the following line on Linux # if you get weird linker errors or runtime crashes #ext.define_macros.append(("_GLIBCXX_USE_CXX11_ABI", "0")) return gis_core_modules
def build_extension(self, ext): current_dir = os.getcwd() if ext.name == 'snowflake.connector.arrow_iterator': self._copy_arrow_lib() ext.sources += [ 'cpp/ArrowIterator/CArrowIterator.cpp', 'cpp/ArrowIterator/CArrowChunkIterator.cpp', 'cpp/ArrowIterator/CArrowTableIterator.cpp', 'cpp/ArrowIterator/SnowflakeType.cpp', 'cpp/ArrowIterator/BinaryConverter.cpp', 'cpp/ArrowIterator/BooleanConverter.cpp', 'cpp/ArrowIterator/DecimalConverter.cpp', 'cpp/ArrowIterator/DateConverter.cpp', 'cpp/ArrowIterator/FloatConverter.cpp', 'cpp/ArrowIterator/IntConverter.cpp', 'cpp/ArrowIterator/StringConverter.cpp', 'cpp/ArrowIterator/TimeConverter.cpp', 'cpp/ArrowIterator/TimeStampConverter.cpp', 'cpp/ArrowIterator/Python/Common.cpp', 'cpp/ArrowIterator/Python/Helpers.cpp', 'cpp/ArrowIterator/Util/time.cpp', 'cpp/Logging/logging.cpp' ] ext.include_dirs.append('cpp/ArrowIterator/') ext.include_dirs.append('cpp/Logging') ext.include_dirs.append(pyarrow.get_include()) ext.include_dirs.append(numpy.get_include()) ext.extra_compile_args.append('-std=c++11') ext.extra_compile_args.append('-D_GLIBCXX_USE_CXX11_ABI=0') ext.library_dirs.append( os.path.join(current_dir, self.build_lib, 'snowflake', 'connector')) ext.extra_link_args += self._get_arrow_lib_as_linker_input() if self._is_unix(): ext.extra_link_args += ['-Wl,-rpath,$ORIGIN'] build_ext.build_extension(self, ext)
def build_extensions(self): opts = ['-std=c++11', '-g'] if TILEDBVCF_DEBUG_BUILD: opts.extend(['-O0']) else: opts.extend(['-O2']) link_opts = [] for ext in self.extensions: ext.extra_compile_args = opts ext.extra_link_args = link_opts import pyarrow ext.include_dirs.append(pyarrow.get_include()) ext.libraries.extend(pyarrow.get_libraries()) # don't overlink the arrow core library if 'arrow' in ext.libraries: ext.libraries.remove('arrow') ext.library_dirs.extend(pyarrow.get_library_dirs()) find_or_build_libtiledbvcf(self) build_ext.build_extensions(self)
def create_clang_tidy_compilation_db(self, extensions): db = "[{}\n]" template = """ {{ "directory": "{0}", "file": "{1}", "output": "{2}", "arguments": ["/usr/lib/llvm-11/bin/clang", "-xc++", "{1}", "-Wno-unused-result", "-Wsign-compare", "-D", "NDEBUG", "-g", "-fwrapv", "-O2", "-Wall", "-g", "-fstack-protector-strong", "-Wformat", "-Werror=format-security", "-g", "-fwrapv", "-O2", "-g", "-fstack-protector-strong", "-Wformat", "-Werror=format-security", "-Wdate-time", "-D", "_FORTIFY_SOURCE=2", "-fPIC", "-D", "VERSION_INFO={3}", "-I", "{4}", "-I", "pybnesian/", "-I", "lib/libfort", "-I", "{5}", "-c", "-o", "{6}", "-std=c++17", "-isystem", "{6}", "-isystem", "{7}", "-isystem", "lib/eigen-3.3.7", "-isystem", "lib/OpenCL", "-isystem", "lib/boost", "-isystem", "lib/indicators", "-D", "_GLIBCXX_USE_CXX11_ABI=0", "-fdiagnostics-color=always", "-Wall", "-Wextra", "-fvisibility=hidden", "--target=x86_64-pc-linux-gnu"] }}""" conf_files = [] import pathlib import sysconfig import pybind11 import pyarrow as pa import numpy as np py_include = sysconfig.get_path('include') pybind_include = pybind11.get_include() pyarrow_include = pa.get_include() numpy_include = np.get_include() for ext in extensions: for s in ext.sources: p = pathlib.Path(s) relative_path = pathlib.Path(*p.parts[1:-1]) new_file = pathlib.Path(os.path.splitext(p.parts[-1])[0] + ".o") output = pathlib.Path(path_to_build_folder(), relative_path, new_file) conf_files.append( template.format(os.getcwd(), s, str(output), __version__, py_include, pybind_include, pyarrow_include, numpy_include) ) json = db.format(','.join(conf_files)) with open('compile_commands.json', 'w') as f: f.write(json)
def main(): env = os.environ['CONDA_PREFIX'] if env == "": raise Exception("CONDA_PREFIX environment variable not set") # Configure extension ext = Extension(name='pyfwfr._fwfr', sources=['./pyfwfr/_fwfr.pyx']) ext.include_dirs.append(np.get_include()) ext.include_dirs.append(pa.get_include()) ext.include_dirs.append('./pyfwfr/include') ext.include_dirs.append(env + '/include') ext.library_dirs.extend([env + '/lib']) ext.libraries.extend(['fwfr']) ext.extra_compile_args.append('-w') # quiet warnings # Set rpath ext.extra_link_args.append('-Wl,-rpath,$ORIGIN/../../..') # Included libraries use features from C++11 if os.name == 'posix': ext.extra_compile_args.append('-std=c++11') setup( name='pyfwfr', version='0.1', author='Kira Noel', description='Module to read fixed-width files into Arrow tables.', url='https://gitlab.k8s.cloud.statcan.ca/stcdatascience/fwfr', ext_modules=cythonize([ext]), cmdclass={'build_ext': build_ext_}, packages=find_packages(), python_requires='>=3.7,<3.8', package_data={ '': ['includes/*.pxd', 'include/fwfr/*.h', '*.pyx'], }, include_package_data=True, )
def cythonize(module_list, *, source_root, **kwargs): # TODO(amp): Dependencies are yet again repeated here. This needs to come from a central deps list. require_python_module("packaging") require_python_module("numpy", "1.10") try: require_python_module("Cython", "0.29.12") require_python_module("pyarrow", "4.0", "5.0.dev") build_extensions = True except RequirementError: print( "WARNING: Building Katana Python without extensions! The following features will not work: katana.local, " "katana.distributed, katana parallel loops. The following features will work: katana.client and " "katana.remote (with limitations).", file=sys.stderr, ) build_extensions = False if not build_extensions: return [] import Cython.Build import numpy import pyarrow extension_options = load_lang_config("CXX") extension_options["include_dirs"].append(numpy.get_include()) extension_options["include_dirs"].append(pyarrow.get_include()) if not extension_options["extra_compile_args"]: extension_options["extra_compile_args"] = ["-std=c++17", "-Werror"] if extension_options["compiler"]: compiler = " ".join(extension_options["compiler"]) os.environ["CXX"] = compiler os.environ["CC"] = compiler # Because of odd handling of the linker in setuptools with C++ the # compiler and the linker must use the same programs, so build a linker # command line using the compiler. linker = " ".join(extension_options["compiler"] + ["-pthread", "-shared"]) os.environ["LDSHARED"] = linker os.environ["LDEXE"] = linker extension_options["extra_compile_args"].extend([ # Warnings are common in generated code and hard to fix. Don't make them errors. "-Wno-error", # Entirely disable some warning that are common in generated code and safe. "-Wno-unused-variable", "-Wno-unused-function", "-Wno-deprecated-declarations", # Disable numpy deprecation warning in generated code. "-DNPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION", ]) extension_options.pop("compiler") test_extension_options = extension_options.copy() test_extension_options.setdefault("extra_link_args", []) if not any( s.endswith("/libkatana_galois.so") for s in test_extension_options["extra_link_args"]): test_extension_options["extra_link_args"].append("-lkatana_galois") check_cython_module( "libkatana_galois", """ # distutils: language=c++ from katana.cpp.libgalois.Galois cimport setActiveThreads, SharedMemSys cdef SharedMemSys _katana_runtime setActiveThreads(1) """, extension_options=test_extension_options, ) source_root = Path(source_root) source_root_name = source_root.name pyx_files = list(filter(lambda v: isinstance(v, Path), module_list)) modules = list(filter(lambda v: not isinstance(v, Path), module_list)) modules.extend( _build_cython_extensions(pyx_files, source_root_name, extension_options)) kwargs.setdefault("include_path", []) kwargs["include_path"].append(str(source_root)) kwargs["include_path"].append(numpy.get_include()) return Cython.Build.cythonize( modules, nthreads=int(os.environ.get("CMAKE_BUILD_PARALLEL_LEVEL", "0")), language_level="3", compiler_directives={"binding": True}, **kwargs, )
def test_get_include(): include_dir = pa.get_include() assert os.path.exists(os.path.join(include_dir, 'arrow', 'api.h'))
from setuptools import setup, Extension from Cython.Build import cythonize import os import numpy as np import pyarrow as pa ext_modules = cythonize( Extension("pyfletcher.lib", ["pyfletcher/lib.pyx"], language="c++", extra_compile_args=["-std=c++11", "-O3"], extra_link_args=["-std=c++11"])) for ext in ext_modules: ext.include_dirs.append(np.get_include()) ext.include_dirs.append(pa.get_include()) ext.libraries.extend(pa.get_libraries()) ext.library_dirs.extend(pa.get_library_dirs()) ext.runtime_library_dirs.extend(pa.get_library_dirs()) ext.libraries.extend(["fletcher"]) ext.define_macros.append(("_GLIBCXX_USE_CXX11_ABI", "0")) this_directory = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(this_directory, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup(name="pyfletcher", version="0.0.5", author="Lars van Leeuwen", packages=['pyfletcher'], description="A Python wrapper for the Fletcher runtime library",
def build_extension(self, ext): current_dir = os.getcwd() if ext.name == 'snowflake.connector.arrow_iterator': if not os.environ.get("SF_NO_COPY_ARROW_LIB", False): self._copy_arrow_lib() CPP_SRC_DIR = os.path.join(CONNECTOR_SRC_DIR, 'cpp') ARROW_ITERATOR_SRC_DIR = os.path.join(CPP_SRC_DIR, 'ArrowIterator') LOGGING_SRC_DIR = os.path.join(CPP_SRC_DIR, 'Logging') ext.sources += [ os.path.join(ARROW_ITERATOR_SRC_DIR, 'CArrowIterator.cpp'), os.path.join(ARROW_ITERATOR_SRC_DIR, 'CArrowChunkIterator.cpp'), os.path.join(ARROW_ITERATOR_SRC_DIR, 'CArrowTableIterator.cpp'), os.path.join(ARROW_ITERATOR_SRC_DIR, 'SnowflakeType.cpp'), os.path.join(ARROW_ITERATOR_SRC_DIR, 'BinaryConverter.cpp'), os.path.join(ARROW_ITERATOR_SRC_DIR, 'BooleanConverter.cpp'), os.path.join(ARROW_ITERATOR_SRC_DIR, 'DecimalConverter.cpp'), os.path.join(ARROW_ITERATOR_SRC_DIR, 'DateConverter.cpp'), os.path.join(ARROW_ITERATOR_SRC_DIR, 'FloatConverter.cpp'), os.path.join(ARROW_ITERATOR_SRC_DIR, 'IntConverter.cpp'), os.path.join(ARROW_ITERATOR_SRC_DIR, 'StringConverter.cpp'), os.path.join(ARROW_ITERATOR_SRC_DIR, 'TimeConverter.cpp'), os.path.join(ARROW_ITERATOR_SRC_DIR, 'TimeStampConverter.cpp'), os.path.join(ARROW_ITERATOR_SRC_DIR, 'Python', 'Common.cpp'), os.path.join(ARROW_ITERATOR_SRC_DIR, 'Python', 'Helpers.cpp'), os.path.join(ARROW_ITERATOR_SRC_DIR, 'Util', 'time.cpp'), LOGGING_SRC_DIR + '/logging.cpp' ] ext.include_dirs.append(ARROW_ITERATOR_SRC_DIR) ext.include_dirs.append(LOGGING_SRC_DIR) if platform == 'win32': ext.include_dirs.append(pyarrow.get_include()) ext.include_dirs.append(numpy.get_include()) elif platform == 'linux' or platform == 'darwin': ext.extra_compile_args.append('-isystem' + pyarrow.get_include()) ext.extra_compile_args.append('-isystem' + numpy.get_include()) if "std=" not in os.environ.get("CXXFLAGS", ""): ext.extra_compile_args.append('-std=c++11') ext.extra_compile_args.append( '-D_GLIBCXX_USE_CXX11_ABI=0') ext.library_dirs.append( os.path.join(current_dir, self.build_lib, 'snowflake', 'connector')) ext.extra_link_args += self._get_arrow_lib_as_linker_input() # sys.platform for linux used to return with version suffix, (i.e. linux2, linux3) # After version 3.3, it will always be just 'linux' # https://docs.python.org/3/library/sys.html#sys.platform if platform == 'linux': ext.extra_link_args += ['-Wl,-rpath,$ORIGIN'] elif platform == 'darwin': # rpath,$ORIGIN only work on linux, did not work on darwin. use @loader_path instead # fyi, https://medium.com/@donblas/fun-with-rpath-otool-and-install-name-tool-e3e41ae86172 ext.extra_link_args += ['-rpath', '@loader_path'] build_ext.build_extension(self, ext)
os.path.join(tbb_root, 'lib', 'intel64', 'gcc4.4'), # for MacOS os.path.join(tbb_root, 'lib'), # for Windows os.path.join(tbb_root, 'lib', 'intel64', 'vc_mt'), ], language="c++") ext_arrow_reader = Extension( name="sdc.harrow_reader", sources=["sdc/native/arrow_reader.cpp"], extra_compile_args=eca, extra_link_args=ela, libraries=pa.get_libraries(), include_dirs=["sdc/native/", numba_include_path, pa.get_include()], library_dirs=lid + pa.get_library_dirs(), language="c++") _ext_mods = [ ext_hdist, ext_chiframes, ext_set, ext_str, ext_dt, ext_io, ext_transport_seq, ext_sort, ext_conc_dict, ext_arrow_reader, ]
from setuptools import setup from setuptools import Extension except ImportError: from distutils.core import setup from distutils.extension import Extension sys.dont_write_bytecode = True if "WINDOWS" in platform.platform().upper(): ext_modules = [ Extension( "AoN", ["AoN.pyx"], extra_compile_args=["/openmp"], extra_link_args=["/openmp"], include_dirs=[np.get_include(), pa.get_include()], ) ] else: ext_modules = [ Extension( "AoN", ["AoN.pyx"], extra_compile_args=["-fopenmp"], # do we want -Ofast? extra_link_args=["-fopenmp"], include_dirs=[np.get_include(), pa.get_include()], ) ] setup(name="AoN", ext_modules=cythonize(ext_modules))
log.error("Exception %s %s" % (type(e), e)) self.log_details() try: self.copy_tree("%s" % self.build_lib, ".") self.copy_tree("%s" % self.build_lib, "..") except distutils.errors.DistutilsFileError as e: # cannot copy tree 'build\lib.win-amd64-3.6': not a directory log.error("DistutilsFileError '%s'" % e) except Exception as e: log.error("DistutilsPlatformError %s" % e) raise include_dirs = [provide_db2_include_dir(), pyarrow.get_include(), numpy.get_include(), pybind11.get_include(), os.getcwd(), os.path.join(os.getcwd(), "include"), provide_sample_cli_dir()] if platform.system() == "Windows": pass #include_dirs.append(os.path.join(os.getcwd(), "include")) else: # do I need to append lib dir # to library_dirs /usr/local/lib so that the linker find # so far is linking without the append include_dirs.append('/usr/local/include')
def build_extension(self, ext): if options["debug"]: ext.extra_compile_args.append("-g") ext.extra_link_args.append("-g") current_dir = os.getcwd() if ext.name == "snowflake.connector.arrow_iterator": if not os.environ.get("SF_NO_COPY_ARROW_LIB", False): self._copy_arrow_lib() CPP_SRC_DIR = os.path.join(CONNECTOR_SRC_DIR, "cpp") ARROW_ITERATOR_SRC_DIR = os.path.join(CPP_SRC_DIR, "ArrowIterator") LOGGING_SRC_DIR = os.path.join(CPP_SRC_DIR, "Logging") ext.sources += [ os.path.join(ARROW_ITERATOR_SRC_DIR, "CArrowIterator.cpp"), os.path.join(ARROW_ITERATOR_SRC_DIR, "CArrowChunkIterator.cpp"), os.path.join(ARROW_ITERATOR_SRC_DIR, "CArrowTableIterator.cpp"), os.path.join(ARROW_ITERATOR_SRC_DIR, "SnowflakeType.cpp"), os.path.join(ARROW_ITERATOR_SRC_DIR, "BinaryConverter.cpp"), os.path.join(ARROW_ITERATOR_SRC_DIR, "BooleanConverter.cpp"), os.path.join(ARROW_ITERATOR_SRC_DIR, "DecimalConverter.cpp"), os.path.join(ARROW_ITERATOR_SRC_DIR, "DateConverter.cpp"), os.path.join(ARROW_ITERATOR_SRC_DIR, "FloatConverter.cpp"), os.path.join(ARROW_ITERATOR_SRC_DIR, "IntConverter.cpp"), os.path.join(ARROW_ITERATOR_SRC_DIR, "StringConverter.cpp"), os.path.join(ARROW_ITERATOR_SRC_DIR, "TimeConverter.cpp"), os.path.join(ARROW_ITERATOR_SRC_DIR, "TimeStampConverter.cpp"), os.path.join(ARROW_ITERATOR_SRC_DIR, "Python", "Common.cpp"), os.path.join(ARROW_ITERATOR_SRC_DIR, "Python", "Helpers.cpp"), os.path.join(ARROW_ITERATOR_SRC_DIR, "Util", "time.cpp"), LOGGING_SRC_DIR + "/logging.cpp", ] ext.include_dirs.append(ARROW_ITERATOR_SRC_DIR) ext.include_dirs.append(LOGGING_SRC_DIR) if platform == "win32": ext.include_dirs.append(pyarrow.get_include()) ext.include_dirs.append(numpy.get_include()) elif platform == "linux" or platform == "darwin": ext.extra_compile_args.append("-isystem" + pyarrow.get_include()) ext.extra_compile_args.append("-isystem" + numpy.get_include()) if "std=" not in os.environ.get("CXXFLAGS", ""): ext.extra_compile_args.append("-std=c++11") ext.extra_compile_args.append( "-D_GLIBCXX_USE_CXX11_ABI=0") ext.library_dirs.append( os.path.join(current_dir, self.build_lib, "snowflake", "connector")) ext.extra_link_args += self._get_arrow_lib_as_linker_input() # sys.platform for linux used to return with version suffix, (i.e. linux2, linux3) # After version 3.3, it will always be just 'linux' # https://docs.python.org/3/library/sys.html#sys.platform if platform == "linux": ext.extra_link_args += ["-Wl,-rpath,$ORIGIN"] elif platform == "darwin": # rpath,$ORIGIN only work on linux, did not work on darwin. use @loader_path instead # fyi, https://medium.com/@donblas/fun-with-rpath-otool-and-install-name-tool-e3e41ae86172 ext.extra_link_args += ["-rpath", "@loader_path"] build_ext.build_extension(self, ext)
compile_args = ['-std=c++17', '-fPIC'] #suppress all compiler warnings compile_args.append('-w') with open("README.md", "r") as fh: long_description = fh.read() module = Extension( name='pykafarr', sources=srcs, libraries=libs, language='c++', extra_compile_args=compile_args, include_dirs=[numpy.get_include(), pyarrow.get_include(), 'cpp/']) setup( name='pykafarr', version='0.6.0.3', author='iztok kucan', author_email='*****@*****.**', url='https://github.com/ikucan/pykafarr', long_description=long_description, long_description_content_type="text/markdown", ext_modules=[module], python_requires='>=3.5.', install_requires=['numpy', 'pyarrow', 'pandas'], setup_requires=['numpy', 'pyarrow', 'pandas'], #extras_require = ['numpy'], classifiers=[
name="pyfletchgen", version="0.0.11", author="Accelerated Big Data Systems, Delft University of Technology", packages=find_packages(), url="https://github.com/abs-tudelft/fletcher", project_urls={ "Bug Tracker": "https://github.com/abs-tudelft/fletcher/issues", "Documentation": "https://abs-tudelft.github.io/fletcher/", "Source Code": "https://github.com/abs-tudelft/fletcher/", }, ext_modules=[ Extension( "pyfletchgen.lib", ["pyfletchgen/lib.pyx"], language="c++", include_dirs=[np.get_include(), pa.get_include(), include_dir], libraries=pa.get_libraries() + ["fletchgen_lib"], library_dirs=pa.get_library_dirs() + lib_dirs, runtime_library_dirs=pa.get_library_dirs() + lib_dirs, extra_compile_args=["-std=c++11", "-O3"], extra_link_args=["-std=c++11"]) ], entry_points={'console_scripts': ['fletchgen=pyfletchgen:_run']}, install_requires=[ 'numpy >= 1.14', 'pandas', 'pyarrow == 1.0.0', ], setup_requires=['cython', 'numpy', 'pyarrow == 1.0.0', 'plumbum'], classifiers=[ "Programming Language :: Python :: 3",
#!/usr/bin/env python3 import setuptools import glob import pyarrow as pa import numpy as np arrow = pa.get_include() setuptools.setup( name='openalpha', version='1.0', ext_modules=[ setuptools.Extension( 'openalpha', glob.glob('src/openalpha/*cc'), extra_compile_args=['-std=c++17', '-Wno-deprecated-declarations'], include_dirs=[ './src', arrow, np.get_include(), ], libraries=[ 'boost_python3', 'boost_numpy3', 'boost_system', 'boost_date_time', 'boost_program_options', 'boost_filesystem', 'log4cxx', 'arrow_python', ],