def validate_lambda_function(dist, attr, value): if not re.compile('^([a-zA-Z0-9_]+\.)*[a-zA-Z0-9_]+:[a-zA-Z0-9_]+$').match( value): raise DistutilsSetupError( '{} must be in the form of \'my_package.some_module:some_function\'' .format(attr))
def check_test_suite(dist, attr, value): if not isinstance(value, six.string_types): raise DistutilsSetupError("test_suite must be a string")
def winsdk_setenv(platform_arch, build_type): from distutils.msvc9compiler import VERSION as MSVC_VERSION from distutils.msvc9compiler import Reg from distutils.msvc9compiler import HKEYS from distutils.msvc9compiler import WINSDK_BASE sdk_version_map = { "v6.0a": 9.0, "v6.1": 9.0, "v7.0": 9.0, "v7.0a": 10.0, "v7.1": 10.0 } log.info("Searching Windows SDK with MSVC compiler version {}".format( MSVC_VERSION)) setenv_paths = [] for base in HKEYS: sdk_versions = Reg.read_keys(base, WINSDK_BASE) if sdk_versions: for sdk_version in sdk_versions: installationfolder = Reg.get_value( WINSDK_BASE + "\\" + sdk_version, "installationfolder") productversion = Reg.get_value( WINSDK_BASE + "\\" + sdk_version, "productversion") setenv_path = os.path.join(installationfolder, os.path.join('bin', 'SetEnv.cmd')) if not os.path.exists(setenv_path): continue if not sdk_version in sdk_version_map: continue if sdk_version_map[sdk_version] != MSVC_VERSION: continue setenv_paths.append(setenv_path) if len(setenv_paths) == 0: raise DistutilsSetupError( "Failed to find the Windows SDK with MSVC compiler " "version {}".format(MSVC_VERSION)) for setenv_path in setenv_paths: log.info("Found {}".format(setenv_path)) # Get SDK env (use latest SDK version installed on system) setenv_path = setenv_paths[-1] log.info("Using {} ".format(setenv_path)) build_arch = "/x86" if platform_arch.startswith("32") else "/x64" build_type = "/Debug" if build_type.lower() == "debug" else "/Release" setenv_cmd = [setenv_path, build_arch, build_type] setenv_env = get_environment_from_batch_command(setenv_cmd) setenv_env_paths = os.pathsep.join([ setenv_env[k] for k in setenv_env if k.upper() == 'PATH' ]).split(os.pathsep) setenv_env_without_paths = dict([(k, setenv_env[k]) for k in setenv_env if k.upper() != 'PATH']) # Extend os.environ with SDK env log.info("Initializing Windows SDK env...") update_env_path(setenv_env_paths) for k in sorted(setenv_env_without_paths): v = setenv_env_without_paths[k] log.info("Inserting '{} = {}' to environment".format(k, v)) os.environ[k] = v log.info("Done initializing Windows SDK env")
# Trove classifiers # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers 'License :: OSI Approved :: Apache Software License', 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Topic :: Scientific/Engineering :: Artificial Intelligence' ] if sys.platform == 'darwin': classifiers.append('Operating System :: MacOS :: MacOS X') elif sys.platform == 'linux': 'Operating System :: POSIX :: Linux' else: raise DistutilsSetupError( 'Building on Windows is not supported currently.') keywords = 'deep learning, apache singa' with_cuda, with_nccl, _, _ = parse_compile_options() if with_cuda: classifiers.append('Environment :: GPU :: NVIDIA CUDA') cuda_version = os.environ.get('CUDA_VERSION') cudnn_version = os.environ.get('CUDNN_VERSION') keywords += ', cuda{}, cudnn{}'.format(cuda_version, cudnn_version) cuda_major = int(cuda_version.split('.')[0]) cuda_minor = int(cuda_version.split('.')[1]) # local label '+cuda10.2'. Ref: https://www.python.org/dev/peps/pep-0440/ VERSION = VERSION + '+cuda{}.{}'.format(cuda_major, cuda_minor) if with_nccl: classifiers.append('Topic :: System :: Distributed Computing') keywords += ', distributed'
def assert_bool(dist, attr, value): """Verify that value is True, False, 0, or 1""" if bool(value) != value: tmpl = "{attr!r} must be a boolean value (got {value!r})" raise DistutilsSetupError(tmpl.format(attr=attr, value=value))
def swig_sources(self, sources, extension): # Assuming SWIG 1.3.14 or later. See compatibility note in # http://www.swig.org/Doc1.3/Python.html#Python_nn6 new_sources = [] swig_sources = [] swig_targets = {} target_dirs = [] py_files = [] # swig generated .py files target_ext = '.c' if self.swig_cpp: typ = 'c++' is_cpp = True else: typ = None is_cpp = False skip_swig = 0 ext_name = extension.name.split('.')[-1] for source in sources: (base, ext) = os.path.splitext(source) if ext == '.i': # SWIG interface file if self.inplace: target_dir = os.path.dirname(base) py_target_dir = self.ext_target_dir else: target_dir = appendpath(self.build_src, os.path.dirname(base)) py_target_dir = target_dir if os.path.isfile(source): name = get_swig_modulename(source) if name != ext_name[1:]: raise DistutilsSetupError( 'mismatch of extension names: %s provides %r' ' but expected %r' % (source, name, ext_name[1:])) if typ is None: typ = get_swig_target(source) is_cpp = typ == 'c++' if is_cpp: target_ext = '.cpp' else: typ2 = get_swig_target(source) if typ != typ2: log.warn('expected %r but source %r defines %r swig target' \ % (typ, source, typ2)) if typ2 == 'c++': log.warn( 'resetting swig target to c++ (some targets may have .c extension)' ) is_cpp = True target_ext = '.cpp' else: log.warn( 'assuming that %r has c++ swig target' % (source)) target_file = os.path.join(target_dir,'%s_wrap%s' \ % (name, target_ext)) else: log.warn(' source %s does not exist: skipping swig\'ing.' \ % (source)) name = ext_name[1:] skip_swig = 1 target_file = _find_swig_target(target_dir, name) if not os.path.isfile(target_file): log.warn(' target %s does not exist:\n '\ 'Assuming %s_wrap.{c,cpp} was generated with '\ '"build_src --inplace" command.' \ % (target_file, name)) target_dir = os.path.dirname(base) target_file = _find_swig_target(target_dir, name) if not os.path.isfile(target_file): raise DistutilsSetupError("%r missing" % (target_file, )) log.warn(' Yes! Using %r as up-to-date target.' \ % (target_file)) target_dirs.append(target_dir) new_sources.append(target_file) py_files.append(os.path.join(py_target_dir, name + '.py')) swig_sources.append(source) swig_targets[source] = new_sources[-1] else: new_sources.append(source) if not swig_sources: return new_sources if skip_swig: return new_sources + py_files for d in target_dirs: self.mkpath(d) swig = self.swig or self.find_swig() swig_cmd = [swig, "-python"] if is_cpp: swig_cmd.append('-c++') for d in extension.include_dirs: swig_cmd.append('-I' + d) for source in swig_sources: target = swig_targets[source] depends = [source] + extension.depends if self.force or newer_group(depends, target, 'newer'): log.info("%s: %s" % (os.path.basename(swig) \ + (is_cpp and '++' or ''), source)) self.spawn(swig_cmd + self.swig_opts \ + ["-o", target, '-outdir', py_target_dir, source]) else: log.debug(" skipping '%s' swig interface (up-to-date)" \ % (source)) return new_sources + py_files
def run(self): # base install _install.run(self) # custom install of backend log.info('Now installing cppyy-cling into cppyy_backend') builddir = get_builddir() if not os.path.exists(builddir): raise DistutilsSetupError('Failed to find build dir!') # use $MAKE to install if it is defined env_make = os.getenv("MAKE") if not env_make: install_cmd = 'cmake' install_args = [ '--build', '.', '--config', get_build_type(), '--target', 'install' ] else: install_args = env_make.split() install_cmd, install_args = install_args[0], install_args[1:] + [ 'install' ] prefix = get_prefix() log.info('Now creating installation under %s ...', prefix) if env_make: os.unsetenv("MAKE") if subprocess.call([install_cmd] + install_args, cwd=builddir) != 0: raise DistutilsSetupError('Failed to install cppyy-cling') if env_make: os.putenv("MAKE", env_make) # remove allDict.cxx.pch as it's not portable (rebuild on first run, see cppyy) log.info('removing allDict.cxx.pch') os.remove(os.path.join(get_prefix(), 'etc', 'allDict.cxx.pch')) # for manylinux, reset the default cxxversion to 17 if no user override if not 'STDCXX' in os.environ and is_manylinux(): log.info('updating root-config to C++17 for manylinux') inp = os.path.join(get_prefix(), 'bin', 'root-config') outp = inp + '.new' outfile = open(outp, 'w') for line in open(inp).readlines(): if line.find('cxxversionflag=', 0, 15) == 0: line = 'cxxversionflag="-std=c++1z "\n' elif line.find('features=', 0, 9) == 0: line = line.replace('cxx11', 'cxx17') outfile.write(line) outfile.close() os.rename(outp, inp) os.chmod( inp, stat.S_IMODE(os.lstat(inp).st_mode) | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) log.info('updating allCppflags.txt to C++17 for manylinux') inp = os.path.join(get_prefix(), 'etc', 'dictpch', 'allCppflags.txt') outp = inp + '.new' outfile = open(outp, 'w') for line in open(inp).readlines(): if '-std=' == line[:5]: line = '-std=c++1z\n' outfile.write(line) outfile.close() os.rename(outp, inp) log.info('updating compiledata.h to C++17 for manylinux') inp = os.path.join(get_prefix(), 'include', 'compiledata.h') outp = inp + '.new' outfile = open(outp, 'w') for line in open(inp).readlines(): line = line.replace('-std=c++11', '-std=c++1z') outfile.write(line) outfile.close() os.rename(outp, inp) install_path = self._get_install_path() log.info('Copying installation to: %s ...', install_path) self.copy_tree(os.path.join(get_prefix(), os.path.pardir), install_path) log.info('Install finished')
def assert_bool(dist, attr, value): """Verify that value is True, False, 0, or 1""" if bool(value) != value: raise DistutilsSetupError( "%r must be a boolean value (got %r)" % (attr,value) )
def build_a_library(self, build_info, lib_name, libraries): # default compilers compiler = self.compiler fcompiler = self._f_compiler sources = build_info.get('sources') if sources is None or not is_sequence(sources): raise DistutilsSetupError( ("in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames") % lib_name) sources = list(sources) c_sources, cxx_sources, f_sources, fmodule_sources \ = filter_sources(sources) requiref90 = not not fmodule_sources or \ build_info.get('language', 'c') == 'f90' # save source type information so that build_ext can use it. source_languages = [] if c_sources: source_languages.append('c') if cxx_sources: source_languages.append('c++') if requiref90: source_languages.append('f90') elif f_sources: source_languages.append('f77') build_info['source_languages'] = source_languages lib_file = compiler.library_filename(lib_name, output_dir=self.build_clib) depends = sources + build_info.get('depends', []) if not (self.force or newer_group(depends, lib_file, 'newer')): log.debug("skipping '%s' library (up-to-date)", lib_name) return else: log.info("building '%s' library", lib_name) config_fc = build_info.get('config_fc', {}) if fcompiler is not None and config_fc: log.info('using additional config_fc from setup script ' 'for fortran compiler: %s' % (config_fc, )) from numpy.distutils.fcompiler import new_fcompiler fcompiler = new_fcompiler(compiler=fcompiler.compiler_type, verbose=self.verbose, dry_run=self.dry_run, force=self.force, requiref90=requiref90, c_compiler=self.compiler) if fcompiler is not None: dist = self.distribution base_config_fc = dist.get_option_dict('config_fc').copy() base_config_fc.update(config_fc) fcompiler.customize(base_config_fc) # check availability of Fortran compilers if (f_sources or fmodule_sources) and fcompiler is None: raise DistutilsError("library %s has Fortran sources" " but no Fortran compiler found" % (lib_name)) if fcompiler is not None: fcompiler.extra_f77_compile_args = build_info.get( 'extra_f77_compile_args') or [] fcompiler.extra_f90_compile_args = build_info.get( 'extra_f90_compile_args') or [] macros = build_info.get('macros') include_dirs = build_info.get('include_dirs') if include_dirs is None: include_dirs = [] extra_postargs = build_info.get('extra_compiler_args') or [] include_dirs.extend(get_numpy_include_dirs()) # where compiled F90 module files are: module_dirs = build_info.get('module_dirs') or [] module_build_dir = os.path.dirname(lib_file) if requiref90: self.mkpath(module_build_dir) if compiler.compiler_type == 'msvc': # this hack works around the msvc compiler attributes # problem, msvc uses its own convention :( c_sources += cxx_sources cxx_sources = [] objects = [] if c_sources: log.info("compiling C sources") objects = compiler.compile(c_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) if cxx_sources: log.info("compiling C++ sources") cxx_compiler = compiler.cxx_compiler() cxx_objects = cxx_compiler.compile(cxx_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) objects.extend(cxx_objects) if f_sources or fmodule_sources: extra_postargs = [] f_objects = [] if requiref90: if fcompiler.module_dir_switch is None: existing_modules = glob('*.mod') extra_postargs += fcompiler.module_options( module_dirs, module_build_dir) if fmodule_sources: log.info("compiling Fortran 90 module sources") f_objects += fcompiler.compile(fmodule_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) if requiref90 and self._f_compiler.module_dir_switch is None: # move new compiled F90 module files to module_build_dir for f in glob('*.mod'): if f in existing_modules: continue t = os.path.join(module_build_dir, f) if os.path.abspath(f) == os.path.abspath(t): continue if os.path.isfile(t): os.remove(t) try: self.move_file(f, module_build_dir) except DistutilsFileError: log.warn('failed to move %r to %r' % (f, module_build_dir)) if f_sources: log.info("compiling Fortran sources") f_objects += fcompiler.compile(f_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) else: f_objects = [] if f_objects and not fcompiler.can_ccompiler_link(compiler): # Default linker cannot link Fortran object files, and results # need to be wrapped later. Instead of creating a real static # library, just keep track of the object files. listfn = os.path.join(self.build_clib, lib_name + '.fobjects') with open(listfn, 'w') as f: f.write("\n".join(os.path.abspath(obj) for obj in f_objects)) listfn = os.path.join(self.build_clib, lib_name + '.cobjects') with open(listfn, 'w') as f: f.write("\n".join(os.path.abspath(obj) for obj in objects)) # create empty "library" file for dependency tracking lib_fname = os.path.join(self.build_clib, lib_name + compiler.static_lib_extension) with open(lib_fname, 'wb') as f: pass else: # assume that default linker is suitable for # linking Fortran object files objects.extend(f_objects) compiler.create_static_lib(objects, lib_name, output_dir=self.build_clib, debug=self.debug) # fix library dependencies clib_libraries = build_info.get('libraries', []) for lname, binfo in libraries: if lname in clib_libraries: clib_libraries.extend(binfo.get('libraries', [])) if clib_libraries: build_info['libraries'] = clib_libraries
del sys.argv[1:i + 1] log.basicConfig(stream=sys.stdout, level=log.INFO) # For build try: this_file = __file__ except NameError: this_file = sys.argv[0] ORIG_DIR = os.getcwd() SCRIPT_DIR = os.path.dirname(os.path.abspath(this_file)) BUILD_DIR = "build" BUILT_EXTENSIONS = False CMAKE_PATH = ENV.get("CMAKE", find_executable("cmake")) if CMAKE_PATH is None: raise DistutilsSetupError("`cmake` not found, and `CMAKE` is not set.") MAKE_PATH = ENV.get("MAKE", find_executable("make")) if MAKE_PATH is None: raise DistutilsSetupError("`make` not found, and `MAKE` is not set.") MAKE_FLAGS = ENV.get("MAKE_FLAGS", "-j %d" % cpu_count()).split() EIGEN3_INCLUDE_DIR = ENV.get( "EIGEN3_INCLUDE_DIR") # directory where eigen is saved if EIGEN3_INCLUDE_DIR is not None: EIGEN3_INCLUDE_DIR = os.path.abspath(EIGEN3_INCLUDE_DIR) HG_PATH = find_executable("hg") CC_PATH = ENV.get("CC", find_executable("gcc")) if CC_PATH is None: raise DistutilsSetupError("`gcc` not found, and `CC` is not set.") CXX_PATH = ENV.get("CXX", find_executable("g++")) if CXX_PATH is None: raise DistutilsSetupError("`g++` not found, and `CXX` is not set.")
def run(self): global BUILD_DIR, BUILT_EXTENSIONS, EIGEN3_INCLUDE_DIR BUILD_DIR = os.path.abspath(self.build_dir) if EIGEN3_INCLUDE_DIR is None: EIGEN3_INCLUDE_DIR = os.path.join(BUILD_DIR, "eigen") log.info("CMAKE_PATH=" + CMAKE_PATH) log.info("MAKE_PATH=" + MAKE_PATH) log.info("MAKE_FLAGS=" + " ".join(MAKE_FLAGS)) if HG_PATH is not None: log.info("HG_PATH=" + HG_PATH) log.info("EIGEN3_INCLUDE_DIR=" + EIGEN3_INCLUDE_DIR) log.info("CC_PATH=" + CC_PATH) log.info("CXX_PATH=" + CXX_PATH) log.info("SCRIPT_DIR=" + SCRIPT_DIR) log.info("BUILD_DIR=" + BUILD_DIR) log.info("INSTALL_PREFIX=" + INSTALL_PREFIX) log.info("PYTHON=" + PYTHON) run_process([CMAKE_PATH, "--version"]) run_process([CXX_PATH, "--version"]) if not self.skip_build: # Prepare folders if not os.path.isdir(BUILD_DIR): log.info("Creating build directory " + BUILD_DIR) os.makedirs(BUILD_DIR) os.chdir(BUILD_DIR) if os.path.isdir(EIGEN3_INCLUDE_DIR): log.info("Found eigen in " + EIGEN3_INCLUDE_DIR) elif HG_PATH is None: raise DistutilsSetupError("`hg` not found.") else: hg_cmd = [ HG_PATH, "clone", "https://bitbucket.org/eigen/eigen" ] log.info("Cloning Eigen...") if run_process(hg_cmd) != 0: raise DistutilsSetupError(" ".join(hg_cmd)) os.environ["CXX"] = CXX_PATH os.environ["CC"] = CC_PATH # Build module cmake_cmd = [ CMAKE_PATH, SCRIPT_DIR, "-DCMAKE_INSTALL_PREFIX=" + INSTALL_PREFIX, "-DEIGEN3_INCLUDE_DIR=" + EIGEN3_INCLUDE_DIR, "-DPYTHON=" + PYTHON, ] log.info("Configuring...") if run_process(cmake_cmd) != 0: raise DistutilsSetupError(" ".join(cmake_cmd)) make_cmd = [MAKE_PATH] + MAKE_FLAGS log.info("Compiling...") if run_process(make_cmd) != 0: raise DistutilsSetupError(" ".join(make_cmd)) make_cmd = [MAKE_PATH, "install"] log.info("Installing...") if run_process(make_cmd) != 0: raise DistutilsSetupError(" ".join(make_cmd)) BUILT_EXTENSIONS = True # because make calls build_ext _build.run(self)
def build_extensions(self): pre_setup.setup() make_option = "" # To resolve tf-gcc incompatibility has_cxx_flag = False glibcxx_flag = False if not int(os.environ.get('BYTEPS_WITHOUT_TENSORFLOW', 0)): try: import tensorflow as tf make_option += 'ADD_CFLAGS="' for flag in tf.sysconfig.get_compile_flags(): if 'D_GLIBCXX_USE_CXX11_ABI' in flag: has_cxx_flag = True glibcxx_flag = False if (flag[-1] == '0') else True make_option += flag + ' ' break make_option += '" ' except: pass # To resolve torch-gcc incompatibility if not int(os.environ.get('BYTEPS_WITHOUT_PYTORCH', 0)): try: import torch torch_flag = torch.compiled_with_cxx11_abi() if has_cxx_flag: if glibcxx_flag != torch_flag: raise DistutilsError( '-D_GLIBCXX_USE_CXX11_ABI is not consistent between TensorFlow and PyTorch, ' 'consider install them separately.') else: pass else: make_option += 'ADD_CFLAGS=-D_GLIBCXX_USE_CXX11_ABI=' + \ str(int(torch_flag)) + ' ' has_cxx_flag = True glibcxx_flag = torch_flag except: pass if not os.path.exists("3rdparty/ps-lite/build/libps.a") or \ not os.path.exists("3rdparty/ps-lite/deps/lib"): if os.environ.get('CI', 'false') == 'false': make_option += "-j " if has_rdma_header(): make_option += "USE_RDMA=1 " make_option += pre_setup.extra_make_option() make_process = subprocess.Popen('make ' + make_option, cwd='3rdparty/ps-lite', stdout=sys.stdout, stderr=sys.stderr, shell=True) make_process.communicate() if make_process.returncode: raise DistutilsSetupError('An ERROR occured while running the ' 'Makefile for the ps-lite library. ' 'Exit code: {0}'.format( make_process.returncode)) options = get_common_options(self) if has_cxx_flag: options['COMPILE_FLAGS'] += [ '-D_GLIBCXX_USE_CXX11_ABI=' + str(int(glibcxx_flag)) ] built_plugins = [] try: build_server(self, options) except: raise DistutilsSetupError( 'An ERROR occured while building the server module.\n\n' '%s' % traceback.format_exc()) # If PyTorch is installed, it must be imported before others, otherwise # we may get an error: dlopen: cannot load any more object with static TLS if not int(os.environ.get('BYTEPS_WITHOUT_PYTORCH', 0)): dummy_import_torch() if not int(os.environ.get('BYTEPS_WITHOUT_TENSORFLOW', 0)): try: build_tf_extension(self, options) built_plugins.append(True) print('INFO: Tensorflow extension is built successfully.') except: if not int(os.environ.get('BYTEPS_WITH_TENSORFLOW', 0)): print( 'INFO: Unable to build TensorFlow plugin, will skip it.\n\n' '%s' % traceback.format_exc()) built_plugins.append(False) else: raise if not int(os.environ.get('BYTEPS_WITHOUT_PYTORCH', 0)): try: torch_version = check_torch_version() build_torch_extension(self, options, torch_version) built_plugins.append(True) print('INFO: PyTorch extension is built successfully.') except: if not int(os.environ.get('BYTEPS_WITH_PYTORCH', 0)): print( 'INFO: Unable to build PyTorch plugin, will skip it.\n\n' '%s' % traceback.format_exc()) built_plugins.append(False) else: raise if not int(os.environ.get('BYTEPS_WITHOUT_MXNET', 0)): # fix "libcuda.so.1 not found" issue cuda_home = os.environ.get('BYTEPS_CUDA_HOME', '/usr/local/cuda') cuda_stub_path = cuda_home + '/lib64/stubs' ln_command = "cd " + cuda_stub_path + "; ln -sf libcuda.so libcuda.so.1" os.system(ln_command) try: build_mx_extension(self, options) built_plugins.append(True) print('INFO: MXNet extension is built successfully.') except: if not int(os.environ.get('BYTEPS_WITH_MXNET', 0)): print( 'INFO: Unable to build MXNet plugin, will skip it.\n\n' '%s' % traceback.format_exc()) built_plugins.append(False) else: raise finally: os.system("rm -rf " + cuda_stub_path + "/libcuda.so.1") if not built_plugins: print('INFO: Only server module is built.') return if not any(built_plugins): raise DistutilsError( 'None of TensorFlow, MXNet, PyTorch plugins were built. See errors above.' )
def __init__(self, name, sources, *args, **kwargs): if len(sources) != 1: nln = '\n' raise DistutilsSetupError(f'PrebuiltExtension can accept only one source, but got: {nln}{nln.join(sources)}') super().__init__(name, sources, *args, **kwargs)
def _verify_tag(self): if os.system('git tag | grep -q "^%s\$"' % self.fullname) == 0: raise DistutilsSetupError("Tag '%s' already exists!" % self.fullname)
def get_pycairo_include_dir(): """Returns the best guess at where to find the pycairo headers. A bit convoluted because we have to deal with multiple pycairo versions. Raises if pycairo isn't found or it's too old. """ pkg_config_name = get_pycairo_pkg_config_name() min_version = get_version_requirement(pkg_config_name) min_version_info = tuple(int(p) for p in min_version.split(".")) def check_path(include_dir): log.info("pycairo: trying include directory: %r" % include_dir) header_path = os.path.join(include_dir, "%s.h" % pkg_config_name) if os.path.exists(header_path): log.info("pycairo: found %r" % header_path) return True log.info("pycairo: header file (%r) not found" % header_path) return False def find_path(paths): for p in reversed(paths): if check_path(p): return p def find_new_api(): log.info("pycairo: new API") import cairo if cairo.version_info < min_version_info: raise DistutilsSetupError( "pycairo >= %s required, %s found." % ( min_version, ".".join(map(str, cairo.version_info)))) if hasattr(cairo, "get_include"): return [cairo.get_include()] log.info("pycairo: no get_include()") return [] def find_old_api(): log.info("pycairo: old API") import cairo if cairo.version_info < min_version_info: raise DistutilsSetupError( "pycairo >= %s required, %s found." % ( min_version, ".".join(map(str, cairo.version_info)))) location = os.path.dirname(os.path.abspath(cairo.__path__[0])) log.info("pycairo: found %r" % location) def samefile(src, dst): # Python 2 on Windows doesn't have os.path.samefile, so we have to # provide a fallback if hasattr(os.path, "samefile"): return os.path.samefile(src, dst) os.stat(src) os.stat(dst) return (os.path.normcase(os.path.abspath(src)) == os.path.normcase(os.path.abspath(dst))) def get_sys_path(location, name): # Returns the sysconfig path for a distribution, or None for scheme in sysconfig.get_scheme_names(): for path_type in ["platlib", "purelib"]: path = sysconfig.get_path(path_type, scheme) try: if samefile(path, location): return sysconfig.get_path(name, scheme) except EnvironmentError: pass data_path = get_sys_path(location, "data") or sys.prefix return [os.path.join(data_path, "include", "pycairo")] def find_pkg_config(): log.info("pycairo: pkg-config") pkg_config_version_check(pkg_config_name, min_version) return pkg_config_parse("--cflags-only-I", pkg_config_name) # First the new get_include() API added in >1.15.6 include_dir = find_path(find_new_api()) if include_dir is not None: return include_dir # Then try to find it in the data prefix based on the module path. # This works with many virtualenv/userdir setups, but not all apparently, # see https://gitlab.gnome.org/GNOME/pygobject/issues/150 include_dir = find_path(find_old_api()) if include_dir is not None: return include_dir # Finally, fall back to pkg-config include_dir = find_path(find_pkg_config()) if include_dir is not None: return include_dir raise DistutilsSetupError("Could not find pycairo headers")
def build_extension(self, ext): sources = ext.sources if sources is None or not is_sequence(sources): raise DistutilsSetupError( ("in 'ext_modules' option (extension '%s'), " + "'sources' must be present and must be " + "a list of source filenames") % ext.name) sources = list(sources) if not sources: return fullname = self.get_ext_fullname(ext.name) if self.inplace: modpath = fullname.split('.') package = '.'.join(modpath[0:-1]) base = modpath[-1] build_py = self.get_finalized_command('build_py') package_dir = build_py.get_package_dir(package) ext_filename = os.path.join(package_dir, self.get_ext_filename(base)) else: ext_filename = os.path.join(self.build_lib, self.get_ext_filename(fullname)) depends = sources + ext.depends if not (self.force or newer_group(depends, ext_filename, 'newer')): log.debug("skipping '%s' extension (up-to-date)", ext.name) return else: log.info("building '%s' extension", ext.name) extra_args = ext.extra_compile_args or [] macros = ext.define_macros[:] for undef in ext.undef_macros: macros.append((undef, )) c_sources, cxx_sources, f_sources, fmodule_sources = \ filter_sources(ext.sources) if self.compiler.compiler_type == 'msvc': if cxx_sources: # Needed to compile kiva.agg._agg extension. extra_args.append('/Zm1000') # this hack works around the msvc compiler attributes # problem, msvc uses its own convention :( c_sources += cxx_sources cxx_sources = [] # Set Fortran/C++ compilers for compilation and linking. if ext.language == 'f90': fcompiler = self._f90_compiler elif ext.language == 'f77': fcompiler = self._f77_compiler else: # in case ext.language is c++, for instance fcompiler = self._f90_compiler or self._f77_compiler if fcompiler is not None: fcompiler.extra_f77_compile_args = ( ext.extra_f77_compile_args or []) if hasattr( ext, 'extra_f77_compile_args') else [] fcompiler.extra_f90_compile_args = ( ext.extra_f90_compile_args or []) if hasattr( ext, 'extra_f90_compile_args') else [] cxx_compiler = self._cxx_compiler # check for the availability of required compilers if cxx_sources and cxx_compiler is None: raise DistutilsError("extension %r has C++ sources" "but no C++ compiler found" % (ext.name)) if (f_sources or fmodule_sources) and fcompiler is None: raise DistutilsError("extension %r has Fortran sources " "but no Fortran compiler found" % (ext.name)) if ext.language in ['f77', 'f90'] and fcompiler is None: self.warn("extension %r has Fortran libraries " "but no Fortran linker found, using default linker" % (ext.name)) if ext.language == 'c++' and cxx_compiler is None: self.warn("extension %r has C++ libraries " "but no C++ linker found, using default linker" % (ext.name)) kws = {'depends': ext.depends} output_dir = self.build_temp include_dirs = ext.include_dirs + get_numpy_include_dirs() c_objects = [] if c_sources: log.info("compiling C sources") c_objects = self.compiler.compile(c_sources, output_dir=output_dir, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args, **kws) if cxx_sources: log.info("compiling C++ sources") c_objects += cxx_compiler.compile(cxx_sources, output_dir=output_dir, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args, **kws) extra_postargs = [] f_objects = [] if fmodule_sources: log.info("compiling Fortran 90 module sources") module_dirs = ext.module_dirs[:] module_build_dir = os.path.join( self.build_temp, os.path.dirname(self.get_ext_filename(fullname))) self.mkpath(module_build_dir) if fcompiler.module_dir_switch is None: existing_modules = glob('*.mod') extra_postargs += fcompiler.module_options(module_dirs, module_build_dir) f_objects += fcompiler.compile(fmodule_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs, depends=ext.depends) if fcompiler.module_dir_switch is None: for f in glob('*.mod'): if f in existing_modules: continue t = os.path.join(module_build_dir, f) if os.path.abspath(f) == os.path.abspath(t): continue if os.path.isfile(t): os.remove(t) try: self.move_file(f, module_build_dir) except DistutilsFileError: log.warn('failed to move %r to %r' % (f, module_build_dir)) if f_sources: log.info("compiling Fortran sources") f_objects += fcompiler.compile(f_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs, depends=ext.depends) if f_objects and not fcompiler.can_ccompiler_link(self.compiler): unlinkable_fobjects = f_objects objects = c_objects else: unlinkable_fobjects = [] objects = c_objects + f_objects if ext.extra_objects: objects.extend(ext.extra_objects) extra_args = ext.extra_link_args or [] libraries = self.get_libraries(ext)[:] library_dirs = ext.library_dirs[:] linker = self.compiler.link_shared_object # Always use system linker when using MSVC compiler. if self.compiler.compiler_type in ('msvc', 'intelw', 'intelemw'): # expand libraries with fcompiler libraries as we are # not using fcompiler linker self._libs_with_msvc_and_fortran(fcompiler, libraries, library_dirs) elif ext.language in ['f77', 'f90'] and fcompiler is not None: linker = fcompiler.link_shared_object if ext.language == 'c++' and cxx_compiler is not None: linker = cxx_compiler.link_shared_object if fcompiler is not None: objects, libraries = self._process_unlinkable_fobjects( objects, libraries, fcompiler, library_dirs, unlinkable_fobjects) linker(objects, ext_filename, libraries=libraries, library_dirs=library_dirs, runtime_library_dirs=ext.runtime_library_dirs, extra_postargs=extra_args, export_symbols=self.get_export_symbols(ext), debug=self.debug, build_temp=self.build_temp, target_lang=ext.language)
def f2py_sources(self, sources, extension): new_sources = [] f2py_sources = [] f_sources = [] f2py_targets = {} target_dirs = [] ext_name = extension.name.split('.')[-1] skip_f2py = 0 for source in sources: (base, ext) = os.path.splitext(source) if ext == '.pyf': # F2PY interface file if self.inplace: target_dir = os.path.dirname(base) else: target_dir = appendpath(self.build_src, os.path.dirname(base)) if os.path.isfile(source): name = get_f2py_modulename(source) if name != ext_name: raise DistutilsSetupError( 'mismatch of extension names: %s ' 'provides %r but expected %r' % (source, name, ext_name)) target_file = os.path.join(target_dir, name + 'module.c') else: log.debug(' source %s does not exist: skipping f2py\'ing.' \ % (source)) name = ext_name skip_f2py = 1 target_file = os.path.join(target_dir, name + 'module.c') if not os.path.isfile(target_file): log.warn(' target %s does not exist:\n '\ 'Assuming %smodule.c was generated with '\ '"build_src --inplace" command.' \ % (target_file, name)) target_dir = os.path.dirname(base) target_file = os.path.join(target_dir, name + 'module.c') if not os.path.isfile(target_file): raise DistutilsSetupError("%r missing" % (target_file, )) log.info(' Yes! Using %r as up-to-date target.' \ % (target_file)) target_dirs.append(target_dir) f2py_sources.append(source) f2py_targets[source] = target_file new_sources.append(target_file) elif fortran_ext_match(ext): f_sources.append(source) else: new_sources.append(source) if not (f2py_sources or f_sources): return new_sources for d in target_dirs: self.mkpath(d) f2py_options = extension.f2py_options + self.f2py_opts if self.distribution.libraries: for name, build_info in self.distribution.libraries: if name in extension.libraries: f2py_options.extend(build_info.get('f2py_options', [])) log.info("f2py options: %s" % (f2py_options)) if f2py_sources: if len(f2py_sources) != 1: raise DistutilsSetupError( 'only one .pyf file is allowed per extension module but got'\ ' more: %r' % (f2py_sources,)) source = f2py_sources[0] target_file = f2py_targets[source] target_dir = os.path.dirname(target_file) or '.' depends = [source] + extension.depends if (self.force or newer_group(depends, target_file,'newer')) \ and not skip_f2py: log.info("f2py: %s" % (source)) import numpy.f2py numpy.f2py.run_main(f2py_options + ['--build-dir', target_dir, source]) else: log.debug(" skipping '%s' f2py interface (up-to-date)" % (source)) else: #XXX TODO: --inplace support for sdist command if is_sequence(extension): name = extension[0] else: name = extension.name target_dir = os.path.join(*([self.build_src]\ +name.split('.')[:-1])) target_file = os.path.join(target_dir, ext_name + 'module.c') new_sources.append(target_file) depends = f_sources + extension.depends if (self.force or newer_group(depends, target_file, 'newer')) \ and not skip_f2py: log.info("f2py:> %s" % (target_file)) self.mkpath(target_dir) import numpy.f2py numpy.f2py.run_main(f2py_options + ['--lower', '--build-dir',target_dir]+\ ['-m',ext_name]+f_sources) else: log.debug(" skipping f2py fortran files for '%s' (up-to-date)"\ % (target_file)) if not os.path.isfile(target_file): raise DistutilsError("f2py target file %r not generated" % (target_file, )) target_c = os.path.join(self.build_src, 'fortranobject.c') target_h = os.path.join(self.build_src, 'fortranobject.h') log.info(" adding '%s' to sources." % (target_c)) new_sources.append(target_c) if self.build_src not in extension.include_dirs: log.info(" adding '%s' to include_dirs." \ % (self.build_src)) extension.include_dirs.append(self.build_src) if not skip_f2py: import numpy.f2py d = os.path.dirname(numpy.f2py.__file__) source_c = os.path.join(d, 'src', 'fortranobject.c') source_h = os.path.join(d, 'src', 'fortranobject.h') if newer(source_c, target_c) or newer(source_h, target_h): self.mkpath(os.path.dirname(target_c)) self.copy_file(source_c, target_c) self.copy_file(source_h, target_h) else: if not os.path.isfile(target_c): raise DistutilsSetupError("f2py target_c file %r not found" % (target_c, )) if not os.path.isfile(target_h): raise DistutilsSetupError("f2py target_h file %r not found" % (target_h, )) for name_ext in ['-f2pywrappers.f', '-f2pywrappers2.f90']: filename = os.path.join(target_dir, ext_name + name_ext) if os.path.isfile(filename): log.info(" adding '%s' to sources." % (filename)) f_sources.append(filename) return new_sources + f_sources
def error(msg): from distutils.errors import DistutilsSetupError raise DistutilsSetupError(msg)
def build_static_extension(self, ext): from distutils import log sources = ext.sources if sources is None or not isinstance(sources, (list, tuple)): raise DistutilsSetupError( ("in 'ext_modules' option (extension '%s'), " + "'sources' must be present and must be " + "a list of source filenames") % ext.name) sources = list(sources) ext_path = self.get_ext_fullpath(ext.name) depends = sources + ext.depends if not (self.force or newer_group(depends, ext_path, 'newer')): log.debug("skipping '%s' extension (up-to-date)", ext.name) return else: log.info("building '%s' extension", ext.name) extra_args = ext.extra_compile_args or [] macros = ext.define_macros[:] for undef in ext.undef_macros: macros.append((undef, )) objects = self.compiler.compile(sources, output_dir=self.build_temp, macros=macros, include_dirs=ext.include_dirs, debug=self.debug, extra_postargs=extra_args, depends=ext.depends) self._built_objects = objects[:] if ext.extra_objects: objects.extend(ext.extra_objects) extra_args = ext.extra_link_args or [] language = ext.language or self.compiler.detect_language(sources) libname = os.path.basename(ext_path).split(os.extsep)[0] output_dir = os.path.dirname(ext_path) if (self.compiler.static_lib_format.startswith('lib') and libname.startswith('lib')): libname = libname[3:] # 1. copy to build directory # 1. copy to src tree for develop mode import re src_tree_output_dir = re.match('build.*(mdtraj.*)', output_dir).group(1) if not os.path.exists(src_tree_output_dir): os.makedirs(src_tree_output_dir) if not os.path.exists(output_dir): # necessary for windows os.makedirs(output_dir) assert os.path.isdir(src_tree_output_dir) self.compiler.create_static_lib(objects, output_libname=libname, output_dir=output_dir, target_lang=language) lib_path = self.compiler.library_filename(libname, output_dir=output_dir) shutil.copy(lib_path, src_tree_output_dir) for item in ext.export_include: shutil.copy(item, src_tree_output_dir) shutil.copy(item, output_dir)
def setup_keyword(dist, attr, value): if not isinstance(value, dict): raise DistutilsSetupError("'meta' should be a dict")
def run(self): # base run _build.run(self) # custom run log.info('Now building cppyy-cling') builddir = get_builddir() prefix = get_prefix() srcdir = get_srcdir() if not os.path.exists(srcdir): log.info( 'No src directory ... creating with "python create_src_directory.py"' ) if subprocess.call([sys.executable, 'create_src_directory.py' ]) != 0: log.error('ERROR: the source directory "%s" does not exist' % srcdir) log.error('Please run "python create_src_directory.py" first.') sys.exit(1) if not os.path.exists(builddir): log.info('Creating build directory %s ...' % builddir) os.makedirs(builddir) # get C++ standard to use, if set try: stdcxx = os.environ['STDCXX'] except KeyError: if is_manylinux(): stdcxx = '11' elif 'win32' in sys.platform: stdcxx = '14' # current cmake claims MSVC'17 does not support C++17 yet else: stdcxx = '17' if not stdcxx in ['11', '14', '17']: log.fatal('FATAL: envar STDCXX should be one of 11, 14, or 17') sys.exit(1) stdcxx = '-DCMAKE_CXX_STANDARD=' + stdcxx # extra optimization flags for Cling if not 'EXTRA_CLING_ARGS' in os.environ: has_avx = False if not is_manylinux(): try: for line in open('/proc/cpuinfo', 'r'): if 'avx' in line: has_avx = True break except Exception: try: cli_arg = subprocess.check_output( ['sysctl', 'machdep.cpu.features']) has_avx = 'avx' in cli_arg.decode( "utf-8").strip().lower() except Exception: pass extra_args = '-O2' if has_avx: extra_args += ' -mavx' os.putenv('EXTRA_CLING_ARGS', extra_args) CMAKE_COMMAND = [ 'cmake', srcdir, stdcxx, '-DLLVM_ENABLE_TERMINFO=0', '-Dminimal=ON', '-Dasimage=OFF', '-Droot7=OFF', '-Dhttp=OFF', '-Dbuiltin_pcre=ON', '-Dbuiltin_freetype=ON', '-Dbuiltin_zlib=ON', '-Dbuiltin_xxhash=ON' ] if 'darwin' in sys.platform: CMAKE_COMMAND.append('-Dlibcxx=ON') CMAKE_COMMAND.append('-DCMAKE_BUILD_TYPE=' + get_build_type()) if 'win32' in sys.platform: import platform if '64' in platform.architecture()[0]: CMAKE_COMMAND += [ '-Thost=x64', '-DCMAKE_GENERATOR_PLATFORM=x64', '-Dall=OFF', '-Dmathmore=OFF', '-Dbuiltin_ftgl=OFF', '-Droofit=OFF', '-Dgfal=OFF', '-Dfftw3=OFF' ] FFTW_INC = os.environ.get("FFTW_INC", None) FFTW_LIB = os.environ.get("FFTW_LIB", None) if FFTW_INC and FFTW_LIB: CMAKE_COMMAND += [ "-DFFTW_INCLUDE_DIR={}".format(FFTW_INC), "-DFFTW_LIBRARY={}".format(FFTW_LIB) ] else: CMAKE_COMMAND += ['-Dbuiltin_freetype=OFF'] CMAKE_COMMAND.append('-DCMAKE_INSTALL_PREFIX=' + prefix) log.info('Running cmake for cppyy-cling: %s', ' '.join(CMAKE_COMMAND)) if subprocess.call(CMAKE_COMMAND, cwd=builddir) != 0: raise DistutilsSetupError('Failed to configure cppyy-cling') # use $MAKE to build if it is defined env_make = os.getenv('MAKE') if not env_make: build_cmd = 'cmake' # default to using all available cores (x2 if hyperthreading enabled) nprocs = os.getenv("MAKE_NPROCS") or '0' try: nprocs = int(nprocs) except ValueError: log.warn( "Integer expected for MAKE_NPROCS, but got %s (ignored)", nprocs) nprocs = 0 if nprocs < 1: nprocs = multiprocessing.cpu_count() build_args = ['--build', '.', '--config', get_build_type(), '--'] if 'win32' in sys.platform: build_args.append('/maxcpucount:' + str(nprocs)) else: build_args.append('-j' + str(nprocs)) else: build_args = env_make.split() build_cmd, build_args = build_args[0], build_args[1:] log.info('Now building cppyy-cling and dependencies ...') if env_make: os.unsetenv("MAKE") if subprocess.call([build_cmd] + build_args, cwd=builddir) != 0: raise DistutilsSetupError('Failed to build cppyy-cling') if env_make: os.putenv('MAKE', env_make) log.info('Build finished')
def finalize_options(self): if self.key is None: raise DistutilsSetupError('Option --key is required') if self.value is None: raise DistutilsSetupError('Option --value is required')
def _check_extra(extra, reqs): name, sep, marker = extra.partition(':') if marker and pkg_resources.invalid_marker(marker): raise DistutilsSetupError("Invalid environment marker: " + marker) list(pkg_resources.parse_requirements(reqs))
def build_extensions(self): if not os.path.exists("3rdparty/ps-lite/build/libps.a") or \ not os.path.exists("3rdparty/ps-lite/deps/lib"): str_rdma_option = "" if int(os.environ.get('BYTEPS_USE_RDMA', 0)): str_rdma_option += "USE_RDMA=1" make_process = subprocess.Popen('make -j ' + str_rdma_option, cwd='3rdparty/ps-lite', stdout=sys.stdout, stderr=sys.stderr, shell=True) make_process.communicate() if make_process.returncode: raise DistutilsSetupError('An ERROR occured while running the ' 'Makefile for the ps-lite library. ' 'Exit code: {0}'.format(make_process.returncode)) options = get_common_options(self) built_plugins = [] # If PyTorch is installed, it must be imported before others, otherwise # we may get an error: dlopen: cannot load any more object with static TLS if not int(os.environ.get('BYTEPS_WITHOUT_PYTORCH', 0)): dummy_import_torch() if not int(os.environ.get('BYTEPS_WITHOUT_TENSORFLOW', 0)): try: build_tf_extension(self, options) built_plugins.append(True) except: if not int(os.environ.get('BYTEPS_WITH_TENSORFLOW', 0)): print('INFO: Unable to build TensorFlow plugin, will skip it.\n\n' '%s' % traceback.format_exc()) built_plugins.append(False) else: raise if not int(os.environ.get('BYTEPS_WITHOUT_MXNET', 0)): try: build_mx_extension(self, options) built_plugins.append(True) print('INFO: MXNet extension is built successfully.') except: if not int(os.environ.get('BYTEPS_WITH_MXNET', 0)): print('INFO: Unable to build MXNet plugin, will skip it.\n\n' '%s' % traceback.format_exc()) built_plugins.append(False) else: raise if not int(os.environ.get('BYTEPS_WITHOUT_PYTORCH', 0)): try: torch_version = check_torch_version() build_torch_extension(self, options, torch_version) built_plugins.append(True) print('INFO: PyTorch extension is built successfully.') except: if not int(os.environ.get('BYTEPS_WITH_PYTORCH', 0)): print('INFO: Unable to build PyTorch plugin, will skip it.\n\n' '%s' % traceback.format_exc()) built_plugins.append(False) else: raise if not built_plugins: raise DistutilsError( 'TensorFlow, MXNet, PyTorch plugins were excluded from build. Aborting.') if not any(built_plugins): raise DistutilsError( 'None of TensorFlow, MXNet, PyTorch plugins were built. See errors above.')
def check_entry_points(dist, attr, value): """Verify that entry_points map is parseable""" try: pkg_resources.EntryPoint.parse_map(value) except ValueError as e: raise DistutilsSetupError(e)
def build_libraries(self, libraries): for (lib_name, build_info) in libraries: sources = build_info.get('sources') if sources is None or not isinstance(sources, (list, tuple)): raise DistutilsSetupError( "in 'libraries' option (library '%s'), " "'sources' must be present and must be " "a list of source filenames" % lib_name) sources = list(sources) log.info("building '%s' library", lib_name) # Make sure everything is the correct type. # obj_deps should be a dictionary of keys as sources # and a list/tuple of files that are its dependencies. obj_deps = build_info.get('obj_deps', dict()) if not isinstance(obj_deps, dict): raise DistutilsSetupError( "in 'libraries' option (library '%s'), " "'obj_deps' must be a dictionary of " "type 'source: list'" % lib_name) dependencies = [] # Get the global dependencies that are specified by the '' key. # These will go into every source's dependency list. global_deps = obj_deps.get('', list()) if not isinstance(global_deps, (list, tuple)): raise DistutilsSetupError( "in 'libraries' option (library '%s'), " "'obj_deps' must be a dictionary of " "type 'source: list'" % lib_name) # Build the list to be used by newer_pairwise_group # each source will be auto-added to its dependencies. for source in sources: src_deps = [source] src_deps.extend(global_deps) extra_deps = obj_deps.get(source, list()) if not isinstance(extra_deps, (list, tuple)): raise DistutilsSetupError( "in 'libraries' option (library '%s'), " "'obj_deps' must be a dictionary of " "type 'source: list'" % lib_name) src_deps.extend(extra_deps) dependencies.append(src_deps) expected_objects = self.compiler.object_filenames( sources, output_dir=self.build_temp) if newer_pairwise_group(dependencies, expected_objects) != ([], []): # First, compile the source code to object files in the library # directory. (This should probably change to putting object # files in a temporary build directory.) macros = build_info.get('macros') include_dirs = build_info.get('include_dirs') cflags = build_info.get('cflags') objects = self.compiler.compile(sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, extra_postargs=cflags, debug=self.debug) # Now "link" the object files together into a static_in_env library. # (On Unix at least, this isn't really linking -- it just # builds an archive. Whatever.) self.compiler.create_static_lib(expected_objects, lib_name, output_dir=self.build_clib, debug=self.debug)
def _exclude_packages(self, packages): if not isinstance(packages, sequence): raise DistutilsSetupError( "packages: setting must be a list or tuple (%r)" % (packages, )) list(map(self.exclude_package, packages))
def run(self): global BUILD_DIR, BUILT_EXTENSIONS, EIGEN3_INCLUDE_DIR BUILD_DIR = os.path.abspath(self.build_dir) if EIGEN3_INCLUDE_DIR is None: EIGEN3_INCLUDE_DIR = os.path.join(BUILD_DIR, "eigen") log.info("CMAKE_PATH=" + CMAKE_PATH) log.info("MAKE_PATH=" + MAKE_PATH) log.info("MAKE_FLAGS=" + " ".join(MAKE_FLAGS)) if HG_PATH is not None: log.info("HG_PATH=" + HG_PATH) log.info("EIGEN3_INCLUDE_DIR=" + EIGEN3_INCLUDE_DIR) log.info("CC_PATH=" + CC_PATH) log.info("CXX_PATH=" + CXX_PATH) log.info("SCRIPT_DIR=" + SCRIPT_DIR) log.info("BUILD_DIR=" + BUILD_DIR) log.info("INSTALL_PREFIX=" + INSTALL_PREFIX) log.info("PYTHON=" + PYTHON) run_process([CMAKE_PATH, "--version"]) run_process([CXX_PATH, "--version"]) # This will generally be called by the pip install if not self.skip_build: if CMAKE_PATH is None: raise DistutilsSetupError("`cmake` not found, and `CMAKE` is not set.") if MAKE_PATH is None: raise DistutilsSetupError("`make` not found, and `MAKE` is not set.") if CC_PATH is None: raise DistutilsSetupError("`gcc` not found, and `CC` is not set.") if CXX_PATH is None: raise DistutilsSetupError("`g++` not found, and `CXX` is not set.") # Prepare folders if not os.path.isdir(BUILD_DIR): log.info("Creating build directory " + BUILD_DIR) os.makedirs(BUILD_DIR) os.chdir(BUILD_DIR) if os.path.isdir(EIGEN3_INCLUDE_DIR): log.info("Found eigen in " + EIGEN3_INCLUDE_DIR) elif HG_PATH is None: raise DistutilsSetupError("`hg` not found.") else: hg_cmd = [HG_PATH, "clone", "https://bitbucket.org/eigen/eigen"] log.info("Cloning Eigen...") if run_process(hg_cmd) != 0: raise DistutilsSetupError(" ".join(hg_cmd)) os.environ["CXX"] = CXX_PATH os.environ["CC"] = CC_PATH # Build module cmake_cmd = [ CMAKE_PATH, SCRIPT_DIR, "-DCMAKE_INSTALL_PREFIX=" + INSTALL_PREFIX, "-DEIGEN3_INCLUDE_DIR=" + EIGEN3_INCLUDE_DIR, "-DPYTHON=" + PYTHON, ] for env_var in ("BACKEND",): value = ENV.get(env_var) if value is not None: cmake_cmd.append("-D" + env_var + "=" + value) log.info("Configuring...") if run_process(cmake_cmd) != 0: raise DistutilsSetupError(" ".join(cmake_cmd)) make_cmd = [MAKE_PATH] + MAKE_FLAGS log.info("Compiling...") if run_process(make_cmd) != 0: raise DistutilsSetupError(" ".join(make_cmd)) make_cmd = [MAKE_PATH, "install"] log.info("Installing...") if run_process(make_cmd) != 0: raise DistutilsSetupError(" ".join(make_cmd)) # This will generally be called by the manual install else: # The cmake directory and Python directory are different in manual install, so # try to move to the parent directory if not os.path.isdir(EIGEN3_INCLUDE_DIR) and os.path.isdir(os.path.join(EIGEN3_INCLUDE_DIR, os.pardir)): EIGEN3_INCLUDE_DIR = os.path.join(EIGEN3_INCLUDE_DIR, os.pardir) if not os.path.isdir(EIGEN3_INCLUDE_DIR): raise RuntimeError("Could not find Eigen in EIGEN3_INCLUDE_DIR={}. If doing manual install, please set the EIGEN3_INCLUDE_DIR variable with the absolute path to Eigen manually. If doing install via pip, please file an issue at the github site.".format(EIGEN3_INCLUDE_DIR)) BUILT_EXTENSIONS = True # because make calls build_ext _build.run(self)
def finalize_options(self): build.finalize_options(self) self.help2man = find_executable('help2man') if not self.help2man: raise DistutilsSetupError('Building man pages requires help2man.')
def validate_i18n(dist, attr, value): """Validate i18n files.""" for i18n_file in value: if not os.path.isfile(i18n_file): raise DistutilsSetupError( 'Filename {} does not exist.'.format(i18n_file))