def run(self): if cython_message: raise BuildFailed(cython_message) try: build_ext.run(self) except DistutilsPlatformError: raise BuildFailed()
def run(self): if sys.platform == 'darwin' and (not include_dirs or not library_dirs): print >> sys.stdout, \ "WARNING: Could not find default directories for FreeTDS " + \ "headers and/or libraries. If setup fails, try:\n" + \ "python setup.py clean build_ext " + \ "--include-dirs=/path/to/freetds/headers " + \ "--library-dirs=/path/to/freetds/libraries " + \ "build install" # Not running on windows means we don't want to do this if not WINDOWS: return _build_ext.run(self) if os.path.isdir(FREETDS): return _build_ext.run(self) log.info('extracting FreeTDS') from zipfile import ZipFile zip_file = ZipFile(os.path.join(WIN32, 'freetds.zip')) for name in zip_file.namelist(): dest = os.path.normpath(os.path.join(WIN32, name)) if name.endswith('/'): os.makedirs(dest) else: f = open(dest, 'wb') f.write(zip_file.read(name)) f.close() zip_file.close() return _build_ext.run(self)
def run(self): # Import numpy here, only when headers are needed import numpy # Add numpy headers to include_dirs self.include_dirs.append(numpy.get_include()) # Call original build_ext command build_ext.run(self)
def run(self): # If we were asked to build any C/C++ libraries, add the directory # where we built them to the include path. (It's already on the library # path.) if self.distribution.has_c_libraries(): build_clib = self.get_finalized_command('build_clib') self.include_dirs.append(build_clib.build_clib) build_ext.run(self)
def run(self): if self.from_templates: try: import jinja2 except: exit_with_error( "You need the python package jinja2 to rebuild the " + \ "extension from the templates") execfile("bquery/templates/run_templates.py") build_ext.run(self)
def run(self): try: build_ext.run(self) except DistutilsPlatformError: e = sys.exc_info()[1] if not self.allow_python_fallback: log.warn('\n Cannot build extensions.\n' ' Use "build_ext --allow-python-fallback" to use' ' slower python implementations instead.\n') raise log.warn(str(e)) log.warn('\n Extensions cannot be built.\n' ' Using the slower Python implementations instead.\n')
def run(self): if not self.skip_pcl_helper: # build pcl_helper first try: self.run_command('build_pcl_helper') except: print('Error: pcl_helper could not be compiled automatically') print('Please compile pcl_helper manually (see %s/pcl/pcl_helper/README.rst for instructions)' % __package__ + \ ' and set SKIP_PCL_HELPER in setup.py to True.') raise # copy pcl_helper library to package build directory self.copy_tree(self.cwd_pcl_helper_dir_lib, self.build_pcl_helper_dir_lib) _build_ext.run(self)
def run(self): build_ext.run(self) build_dir = Path(self.build_lib) root_dir = Path(__file__).parent target_dir = build_dir if not self.inplace else root_dir for root, dirs, files in os.walk('.'): for file in files: if file.endswith(".py") and os.sep in root: self.copy_file(Path(os.sep.join(root.split(os.sep)[1:])) / '__init__.py', root_dir, target_dir) self.copy_file(Path(os.sep.join(root.split(os.sep)[1:])) / '__main__.py', root_dir, target_dir) break
def run(self): # If we were asked to build any C/C++ libraries, add the directory # where we built them to the include path. (It's already on the library # path.) if self.distribution.has_c_libraries(): self.run_command('build_clib') build_clib = self.get_finalized_command('build_clib') for key, value in build_clib.build_args.items(): for ext in self.extensions: if not hasattr(ext, key) or getattr(ext, key) is None: setattr(ext, key, value) else: getattr(ext, key).extend(value) build_ext.run(self)
def run(self): try: setup_darknet( download_darknet=params.darknet_download_required, darknet_url=params.darknet_url, target_location=osp.join( osp.dirname(osp.abspath(__file__)), "__libdarknet", "libdarknet.so"), darknet_dir=params.darknet_home, build_branch_name=params.darknet_branch_name) build_ext.run(self) finally: if params.darknet_download_required: clean_darknet(params.darknet_home)
def run(self): # If we were asked to build any C/C++ libraries, add the directory # where we built them to the include path. (It's already on the library # path.) if self.distribution.has_c_libraries(): self.run_command("build_clib") build_clib = self.get_finalized_command("build_clib") for key, value in build_clib.build_args.items(): for ext in self.extensions: if not hasattr(ext, key) or getattr(ext, key) is None: setattr(ext, key, value) else: getattr(ext, key).extend(value) build_ext.run(self)
def run(self): build_ext.run(self) # Find the quantlib dll and copy it to the built package if sys.platform == "win32": # Find the visual studio runtime redist dlls dlls = [] if VC_INCLUDE_REDIST: plat_name = msvc9compiler.get_platform() plat_spec = msvc9compiler.PLAT_TO_VCVARS[plat_name] # look for the compiler executable vc_env = msvc9compiler.query_vcvarsall(VC_VERSION, plat_spec) for path in vc_env['path'].split(os.pathsep): if os.path.exists(os.path.join(path, "cl.exe")): crt_dir = "Microsoft.VC%d0.CRT" % VC_VERSION redist_dir = os.path.join(path, "..", ".redist", ARCH, crt_dir) if not os.path.exists(redist_dir): redist_dir = os.path.join(path, "..", "..", "redist", ARCH, crt_dir) break else: raise RuntimeError("Can't find cl.exe") assert os.path.exists( redist_dir), "Can't find CRT redist dlls '%s'" % redist_dir dlls.extend(glob.glob(os.path.join(redist_dir, "msvc*.dll"))) for libdir in LIBRARY_DIRS: if os.path.exists(os.path.join(libdir, QL_LIBRARY + ".dll")): dlls.append(os.path.join(libdir, QL_LIBRARY + ".dll")) break else: raise AssertionError("%s.dll not found" % QL_LIBRARY) for dll in dlls: self.copy_file( dll, os.path.join(self.build_lib, "quantlib", os.path.basename(dll))) # Write the list of dlls to be pre-loaded filename = os.path.join(self.build_lib, "quantlib", "preload_dlls.txt") log.info("writing preload dlls list to %s", filename) if not self.dry_run: with open(filename, "wt") as fh: fh.write("\n".join(map(os.path.basename, dlls)))
def run(self): # Import numpy here, only when headers are needed import numpy import zmq # Add numpy/zmq headers to include_dirs self.include_dirs.append(numpy.get_include()) self.include_dirs.extend(zmq.get_includes()) if zmq_lib_path is not None and zmq_lib_path != "": self.library_dirs.append(zmq_lib_path) # Call original build_ext command build_ext.run(self)
def run(self): build_ext.run(self) build_dir = Path(self.build_lib) root_dir = Path(__file__).parent target_dir = build_dir if not self.inplace else root_dir self.copy_file(Path('automl') / '__init__.py', root_dir, target_dir) self.copy_file( Path('automl/data') / '__init__.py', root_dir, target_dir) self.copy_file( Path('automl/feature') / '__init__.py', root_dir, target_dir) self.copy_file( Path('automl/hyperparam') / '__init__.py', root_dir, target_dir)
def run(self): if not os.path.exists(library_dir): self.run_command('bootstrap') # Read the manifest of files in libstemmer. src_files.extend([ os.path.join(library_dir, line.strip().replace(' \\', '')) for line in open(os.path.join(library_dir, 'mkinc_utf8.mak')) if len(line.strip()) > 2 and (line.strip().endswith( '.c \\') or line.strip().endswith('.c')) and os.path.split( line.strip().replace(' \\', ''))[0] in library_core_dirs ]) build_ext.run(self)
def run(self): build_ext.run(self) build_dir = Path(self.build_lib) root_dir = Path(__file__).parent target_dir = build_dir if not self.inplace else root_dir init_path = target_dir / 'cnn' / '__init__.py' lines = ( "from .dense import NN", "from .data import preprocess_data", ) init_path.touch() with open(init_path, 'w') as f: f.write(linesep.join(lines))
def run(self): self._original_modules = self.distribution.ext_modules try: self.extensions = [sqlite_ext_module] return build_ext.run(self) finally: self.distribution.ext_modules = self._original_modules
def run(self): # Check numpy is installed before trying to find the location # of numpy headers try: import numpy except ImportError: raise ImportError( 'numpy need to be installed before GAMtools can be ' 'compiled. Try installing with "pip install numpy" ' 'before installing GAMtools.') self.include_dirs.append(numpy.get_include()) build_ext.run(self)
def run(self): if force_bundled_libcapnp: need_build = True elif force_system_libcapnp: need_build = False else: # Try to autodetect presence of library. Requires compile/run # step so only works for host (non-cross) compliation try: test_build() need_build = False except CompileError: need_build = True if need_build: info( "*WARNING* no libcapnp detected or rebuild forced. Will download and build it from source now. If you have C++ Cap'n Proto installed, it may be out of date or is not being detected. Downloading and building libcapnp may take a while." ) bundle_dir = os.path.join(_this_dir, "bundled") if not os.path.exists(bundle_dir): os.mkdir(bundle_dir) build_dir = os.path.join(_this_dir, "build") if not os.path.exists(build_dir): os.mkdir(build_dir) fetch_libcapnp(bundle_dir, libcapnp_url) build_libcapnp(bundle_dir, build_dir) self.include_dirs += [os.path.join(build_dir, 'include')] self.library_dirs += [os.path.join(build_dir, 'lib')] return build_ext_c.run(self)
def run(self): build_ext.run(self) build_dir = Path(self.build_lib) root_dir = Path(__file__).parent target_dir = build_dir if not self.inplace else root_dir self.copy_file( Path('zs_backend') / '__init__.py', root_dir, target_dir) self.copy_file( Path('zs_backend') / 'api/__init__.py', root_dir, target_dir) self.copy_file( Path('zs_backend') / 'busi/__init__.py', root_dir, target_dir) self.copy_file( Path('zs_backend') / 'utils/__init__.py', root_dir, target_dir)
def run(self): build_failed = False try: test_build() except CompileError: build_failed = True if build_failed and force_system_libcapnp: raise RuntimeError("libcapnp C++ library not detected and --force-system-libcapnp was used") if build_failed or force_bundled_libcapnp: if build_failed: info("*WARNING* no libcapnp detected. Will download and build it from source now. If you have C++ Cap'n Proto installed, it may be out of date or is not being detected. Downloading and building libcapnp may take a while.") bundle_dir = os.path.join(_this_dir, "bundled") if not os.path.exists(bundle_dir): os.mkdir(bundle_dir) build_dir = os.path.join(_this_dir, "build") if not os.path.exists(build_dir): os.mkdir(build_dir) fetch_libcapnp(bundle_dir) build_libcapnp(bundle_dir, build_dir) self.include_dirs += [os.path.join(build_dir, 'include')] self.library_dirs += [os.path.join(build_dir, 'lib')] return build_ext_c.run(self)
def run(self): build_ext.run(self) build_dir = Path(self.build_lib) root_dir = Path(__file__).parent target_dir = build_dir if not self.inplace else root_dir for filename in glob.iglob( 'coper_dci/**/__init__.py', recursive=True, ): try: self.copy_file(filename, root_dir, target_dir) except Exception as e: pass
def run(self): build_failed = False try: test_build() except CompileError: build_failed = True if build_failed and force_system_libcapnp: raise RuntimeError( "libcapnp C++ library not detected and --force-system-libcapnp was used" ) if build_failed or force_bundled_libcapnp: if build_failed: info( "*WARNING* no libcapnp detected. Will download and build it from source now. If you have C++ Cap'n Proto installed, it may be out of date or is not being detected. Downloading and building libcapnp may take a while." ) bundle_dir = os.path.join(_this_dir, "bundled") if not os.path.exists(bundle_dir): os.mkdir(bundle_dir) build_dir = os.path.join(_this_dir, "build") if not os.path.exists(build_dir): os.mkdir(build_dir) fetch_libcapnp(bundle_dir) build_libcapnp(bundle_dir, build_dir) self.include_dirs += [os.path.join(build_dir, 'include')] self.library_dirs += [os.path.join(build_dir, 'lib')] return build_ext_c.run(self)
def run(self): build_ext.run(self) build_dir = Path(self.build_lib) root_dir = Path(__file__).parent target_dir = build_dir if not self.inplace else root_dir # Manage.py self._copy_file(Path('cities') / 'manage.py', root_dir, target_dir) # Wsgi file self._copy_file( Path('cities/cities') / 'wsgi.py', root_dir, target_dir) self._copy_file( Path('cities/cities') / 'asgi.py', root_dir, target_dir)
def run(self): import numpy import detect_simd self.include_dirs.append(numpy.get_include()) simd = detect_simd.detect() if not self.define: self.define = [] if simd['AVX512'] == 1: self.define.append(('AVX512', '1')) self.extensions[0].extra_compile_args.append('-mavx512f') elif simd['AVX'] == 1: self.define.append(('AVX', '1')) self.extensions[0].extra_compile_args.append('-mavx') elif simd['SSE2'] == 1: self.define.append(('SSE2', '1')) self.extensions[0].extra_compile_args.append('-msse2') build_ext.run(self)
def run(self): if not config_file_present: # Create an empty config file if none is present so that the # extensions will not be rebuilt each time. Only depending on the # config file if it is present would make it impossible to detect a # necessary rebuild due to a deleted config file. with open(CONFIG_FILE, 'w') as f: f.write('# Created by setup.py - feel free to modify.\n') try: build_ext.run(self) except (DistutilsError, CCompilerError): print(error_msg.format(file=CONFIG_FILE, summary=build_summary), file=sys.stderr) raise print(header(' Build summary ')) print(build_summary)
def run(self): if BUILT_EXTENSIONS: INCLUDE_DIRS.append(EIGEN3_INCLUDE_DIR) LIBRARY_DIRS.append(BUILD_DIR + "/dynet/") log.info("Building Cython extensions...") log.info("INCLUDE_DIRS=%r" % " ".join(INCLUDE_DIRS)) log.info("LIBRARIES=%r" % " ".join(LIBRARIES)) log.info("LIBRARY_DIRS=%r" % " ".join(LIBRARY_DIRS)) log.info("COMPILER_ARGS=%r" % " ".join(COMPILER_ARGS)) log.info("EXTRA_LINK_ARGS=%r" % " ".join(EXTRA_LINK_ARGS)) log.info("RUNTIME_LIB_DIRS=%r" % " ".join(RUNTIME_LIB_DIRS)) _build_ext.run(self) if os.path.abspath(".") != SCRIPT_DIR: log.info("Copying built extensions...") for d in os.listdir("build"): target_dir = os.path.join(SCRIPT_DIR, "build", d) rmtree(target_dir, ignore_errors=True) copytree(os.path.join("build", d), target_dir)
def run(self): '''Before building the C++ extension apply the templates substitution''' print('running pre_build_ext') try: for templ_name, dic_name, result in templates: with open(dic_name, 'r') as d: with open(templ_name, 'r') as t: with open(result, 'w') as r: dic = yaml.load(d) tmpl = t.read() r.write(pystache.render(tmpl, dic)) print('Created template %s' % result) build_ext.run(self) except Exception as e: # how to handle bad cases! print(e) raise e
def run(self): import numpy self.include_dirs.append(numpy.get_include()) if self.define: if any(["AVX512" == x[0] for x in self.define]): self.extensions[0].extra_compile_args.append('-mavx512f') elif any(["AVX" == x[0] for x in self.define]): self.extensions[0].extra_compile_args.append('-mavx') elif any(["SSE2" == x[0] for x in self.define]): self.extensions[0].extra_compile_args.append('-msse2') elif any(["AVX2" == x[0] for x in self.define]): self.define.append(('AVX', '1')) self.extensions[0].extra_compile_args.append('-mavx') self.extensions[0].extra_compile_args.append('-mavx2') elif any(["SSE41" == x[0] for x in self.define]): self.define.append(('SSE2', '1')) self.extensions[0].extra_compile_args.append('-msse2') self.extensions[0].extra_compile_args.append('-msse4.1') build_ext.run(self)
def run(self): build_ext.run(self) # Find the quantlib dll and copy it to the built package if sys.platform == "win32": # Find the visual studio runtime redist dlls dlls = [] if VC_INCLUDE_REDIST: plat_name = msvc9compiler.get_platform() plat_spec = msvc9compiler.PLAT_TO_VCVARS[plat_name] # look for the compiler executable vc_env = msvc9compiler.query_vcvarsall(VC_VERSION, plat_spec) for path in vc_env['path'].split(os.pathsep): if os.path.exists(os.path.join(path, "cl.exe")): crt_dir = "Microsoft.VC%d0.CRT" % VC_VERSION redist_dir = os.path.join(path, "..", ".redist", ARCH, crt_dir) if not os.path.exists(redist_dir): redist_dir = os.path.join(path, "..", "..", "redist", ARCH, crt_dir) break else: raise RuntimeError("Can't find cl.exe") assert os.path.exists(redist_dir), "Can't find CRT redist dlls '%s'" % redist_dir dlls.extend(glob.glob(os.path.join(redist_dir, "msvc*.dll"))) for libdir in LIBRARY_DIRS: if os.path.exists(os.path.join(libdir, QL_LIBRARY + ".dll")): dlls.append(os.path.join(libdir, QL_LIBRARY + ".dll")) break else: raise AssertionError("%s.dll not found" % QL_LIBRARY) for dll in dlls: self.copy_file(dll, os.path.join(self.build_lib, "quantlib", os.path.basename(dll))) # Write the list of dlls to be pre-loaded filename = os.path.join(self.build_lib, "quantlib", "preload_dlls.txt") log.info("writing preload dlls list to %s", filename) if not self.dry_run: with open(filename, "wt") as fh: fh.write("\n".join(map(os.path.basename, dlls)))
def run(self): build_ext.run(self) Config.OUTPUT_DIR = os.path.join(Config.WORKING_DIR, os.path.basename(self.build_lib)) print('Copying included files to output: ' + Config.OUTPUT_DIR) for filename in Config.INCLUDED_FILES: self.copy_file(filename, Config.OUTPUT_DIR) print('Copying included folders to output: ' + Config.OUTPUT_DIR) for folder in Config.INCLUDED_DIRS: self.copy_dir(folder, Config.OUTPUT_DIR) init_files = glob.glob(Config.SOURCE_DIR + '**/__init__.py', recursive=True) print('Copying __init__.py to output: ' + Config.OUTPUT_DIR) for filename in init_files: container = normalize_dir( os.path.dirname(os.path.realpath(filename))) if check_path(container, [Config.BUILD_FOLDER]) or \ check_path(container, Config.EXCLUDED_DIRS) or \ check_path(container, Config.INCLUDED_DIRS): continue filename = filename.replace(Config.SOURCE_DIR, '') self.copy_file(filename, Config.OUTPUT_DIR) compiled_init_files = glob.glob(normalize_dir(Config.OUTPUT_DIR) + '**/__init__.*.so', recursive=True) print('Deleting compiled __init__.*.so files from output: ' + Config.OUTPUT_DIR) for filename in compiled_init_files: os.remove(filename) print('Copy all files & folders in the output to destination dir') if os.path.exists(Config.DESTINATION_DIR): shutil.rmtree(Config.DESTINATION_DIR) copy_tree(Config.OUTPUT_DIR, Config.DESTINATION_DIR)
def run(self): os.system('(cd "%s"; bash prepare_env.sh; make)' % curr_dir) with open('setup.py.add_libs') as f_in: libs = f_in.read().split() for ext in self.extensions: ext.extra_objects.extend(libs) with open('setup.py.cxxflags_kaldi') as f_in: flags = f_in.read().split() for ext in self.extensions: if ext.name == 'alex_asr.decoder': ext.extra_compile_args.extend(flags) with open('setup.py.cxxflags_pyfst') as f_in: flags = f_in.read().split() for ext in self.extensions: if ext.name == 'alex_asr.fst._fst': ext.extra_compile_args.extend(flags) build_ext.run(self)
def run(self): if BUILT_EXTENSIONS: INCLUDE_DIRS.append(EIGEN3_INCLUDE_DIR) LIBRARY_DIRS.append(BUILD_DIR + "/dynet/") log.info("Building Cython extensions...") log.info("INCLUDE_DIRS=%r" % " ".join(INCLUDE_DIRS)) log.info("LIBRARIES=%r" % " ".join(LIBRARIES)) log.info("LIBRARY_DIRS=%r" % " ".join(LIBRARY_DIRS)) log.info("COMPILER_ARGS=%r" % " ".join(COMPILER_ARGS)) log.info("EXTRA_LINK_ARGS=%r" % " ".join(EXTRA_LINK_ARGS)) log.info("RUNTIME_LIB_DIRS=%r" % " ".join(RUNTIME_LIB_DIRS)) _build_ext.run(self) if os.path.abspath(".") != SCRIPT_DIR: log.info("Copying built extensions...") for d in os.listdir("build"): target_dir = os.path.join(SCRIPT_DIR, "build", d) rmtree(target_dir, ignore_errors=True) try: copytree(os.path.join("build", d), target_dir) except OSError as e: log.info("Cannot copy %s %s" % (os.path.join("build",d), e))
def run(self): # Not running on windows means we don't want to do this if not WINDOWS: return _build_ext.run(self) if os.path.isdir(FREETDS): return _build_ext.run(self) log.info('extracting FreeTDS') from zipfile import ZipFile zip_file = ZipFile(os.path.join(WIN32, 'freetds.zip')) for name in zip_file.namelist(): dest = os.path.normpath(os.path.join(WIN32, name)) if name.endswith('/'): os.makedirs(dest) else: f = open(dest, 'wb') f.write(zip_file.read(name)) f.close() zip_file.close() return _build_ext.run(self)
def run(self): os.system('(cd "%s"; bash prepare_env.sh; make)' % curr_dir) with open('setup.py.add_libs') as f_in: libs = f_in.read().split() for ext in self.extensions: ext.extra_objects.extend(libs) with open('setup.py.cxxflags_kaldi') as f_in: flags = f_in.read().split() for ext in self.extensions: if ext.name =='alex_asr.decoder': ext.extra_compile_args.extend(flags) with open('setup.py.cxxflags_pyfst') as f_in: flags = f_in.read().split() for ext in self.extensions: if ext.name =='alex_asr.fst._fst': ext.extra_compile_args.extend(flags) build_ext.run(self)
def run(self): if has_nvcc(self): # run clib build. Or, clib won't be build with such command as 'pip # install -e .' build_clib = self.get_finalized_command('build_nvcc') build_clib.force = self.force build_clib.compiler = None # bug in distutils? build_clib.run() clibdir = build_clib.build_clib self.libraries.extend(build_clib.get_library_names() or []) self.library_dirs.append(clibdir) deps = [ build_clib.compiler.library_filename(name, output_dir=clibdir) for name in build_clib.get_library_names()] for ext in self.extensions: ext.depends += deps orig_build_ext.run(self)
def run(self): # Get protoc protoc = None if "PROTOC" in os.environ and os.path.exists(os.environ["PROTOC"]): protoc = os.environ["PROTOC"] else: protoc = find_executable("protoc") if protoc is None: sys.stderr.write("protoc not found") sys.exit(1) # Build .proto file for source in ["cudf/utils/metadata/orc_column_statistics.proto"]: output = source.replace(".proto", "_pb2.py") if not os.path.exists(output) or ( os.path.getmtime(source) > os.path.getmtime(output) ): with open(output, "a") as src: src.write("# flake8: noqa" + os.linesep) src.write("# fmt: off" + os.linesep) subprocess.check_call([protoc, "--python_out=.", source]) with open(output, "r+") as src: new_src_content = ( "# flake8: noqa" + os.linesep + "# fmt: off" + os.linesep + src.read() + "# fmt: on" + os.linesep ) src.seek(0) src.write(new_src_content) # Run original Cython build_ext command build_ext.run(self)
def run(self): # For extensions that require 'numpy' in their include dirs, # replace 'numpy' with the actual paths np_include = get_numpy_include_path() for extension in self.extensions: if 'numpy' in extension.include_dirs: idx = extension.include_dirs.index('numpy') extension.include_dirs.insert(idx, np_include) extension.include_dirs.remove('numpy') self._check_cython_sources(extension) basecls.run(self) # Update cython_version.py if building with Cython try: cython_version = get_pkg_version_module( packagename, fromlist=['cython_version'])[0] except (AttributeError, ImportError): cython_version = 'unknown' if self.uses_cython and self.uses_cython != cython_version: package_dir = os.path.relpath(packagename) cython_py = os.path.join(package_dir, 'cython_version.py') with open(cython_py, 'w') as f: f.write('# Generated file; do not modify\n') f.write('cython_version = {0!r}\n'.format(self.uses_cython)) if os.path.isdir(self.build_lib): # The build/lib directory may not exist if the build_py # command was not previously run, which may sometimes be # the case self.copy_file(cython_py, os.path.join(self.build_lib, cython_py), preserve_mode=False) invalidate_caches()
def run(self): import numpy import detect_simd self.include_dirs.append(numpy.get_include()) simd = detect_simd.detect() if not self.define: self.define = [] if simd['AVX512F'] == 1: self.define.append(('AVX512', '1')) if os.name == 'nt': self.extensions[0].extra_compile_args.append('/arch:AVX512') else: self.extensions[0].extra_compile_args.append('-mavx512f') elif simd['AVX'] == 1: self.define.append(('AVX', '1')) if simd['AVX2'] == 1: self.define.append(('AVX2', '1')) if os.name == 'nt': self.extensions[0].extra_compile_args.append('/arch:AVX2') else: self.extensions[0].extra_compile_args.append('-mavx2') if os.name == 'nt': self.extensions[0].extra_compile_args.append('/arch:AVX') else: self.extensions[0].extra_compile_args.append('-mavx') elif simd['SSE2'] == 1: self.define.append(('SSE2', '1')) if simd['SSE41']: self.define.append(('SSE41', '1')) if os.name != 'nt': self.extensions[0].extra_compile_args.append('-msse4.1') if os.name == 'nt': self.extensions[0].extra_compile_args.append('/arch:SSE2') else: self.extensions[0].extra_compile_args.append('-msse2') build_ext.run(self)
def run(self, *args, **kw): if self.compiler is None: self.compiler = distutils.ccompiler.get_default_compiler() if self.compiler == 'msvc': msvc = distutils.ccompiler.new_compiler(compiler='msvc', verbose=self.verbose, dry_run=self.dry_run, force=self.force) msvc.initialize() for deffile in glob.glob(os.path.join('..', 'win32', 'deps', 'lib', '*.def')): libfile = os.path.splitext(deffile)[0] + '.lib' if newer(deffile, libfile): msvc.spawn([msvc.lib, '/nologo', '/machine:x86', '/out:'+libfile, '/def:'+deffile]) # Also add the directory containing the msinttypes headers to the include path. self.include_dirs.append(os.path.join('..', 'win32', 'deps', 'include', 'msinttypes')) return _build_ext.run(self, *args, **kw)
def run(self, *args, **kw): if self.compiler is None: self.compiler = distutils.ccompiler.get_default_compiler() if self.compiler == "msvc": msvc = distutils.ccompiler.new_compiler( compiler="msvc", verbose=self.verbose, dry_run=self.dry_run, force=self.force ) msvc.initialize() for deffile in glob.glob(os.path.join("..", "win32", "deps", "lib", "*.def")): libfile = os.path.splitext(deffile)[0] + ".lib" if newer(deffile, libfile): msvc.spawn([msvc.lib, "/nologo", "/machine:x86", "/out:" + libfile, "/def:" + deffile]) # Also add the directory containing the msinttypes headers to the include path. self.include_dirs.append(os.path.join(".", "win32", "deps", "include", "msinttypes")) return _build_ext.run(self, *args, **kw)
def run(self): self.distribution.run_command('configure') return build_ext_c.run(self)
def run(self): try: build_ext.run(self) except DistutilsPlatformError: raise BuildFailed
def run(self): build_libocc() build_ext.run(self)
def run(self): MakeLBLRTM() build_ext.run(self)
def run(self): cleanup_pycs() build_ext.run(self)
def run(self): self.distribution.fetch_build_eggs(['numpy']) import numpy self.include_dirs.append(numpy.get_include()) build_ext.run(self)
def run(self): self._run_cmake() _build_ext.run(self)
def run(self): try: build_ext.run(self) except DistutilsPlatformError: _etype, e, _tb = sys.exc_info() self._unavailable(e)
def run(self): build_ext.run(self)
def run(self): build_ext.run(self) filename = Path( self.build_lib).joinpath('pyrogen').joinpath('__init__.py') filename.touch(exist_ok=True)
def run(self): build_libocc_clean() build_ext.run(self)
def run(self, *args, **kwargs): if self.distribution.disable_ext: return build_ext.run(self, *args, **kwargs)