def run(self): nvcc_o = self._call(NVCC + ' -V') if nvcc_o is not None: nvcc_version = nvcc_o.split('release ')[-1].strip() else: raise SystemError("Nvidia's CUDA-compiler 'nvcc' can't be " \ "found.") print "Compiling CUDA module using nvcc %s..." % nvcc_version bits, linkage = platform.architecture() if bits == '32bit': bit_flag = ' -m32' elif bits == '64bit': bit_flag = ' -m64' else: print >>sys.stderr, "Can't detect platform, using 32bit" bit_flag = ' -m32' nvcc_cmd = NVCC + bit_flag + ' -c -arch=sm_20 '\ ' ./src/pml_cuda_kernel.cu' \ ' --compiler-options ''-fPIC''' print "Executing '%s'" % nvcc_cmd subprocess.check_call(nvcc_cmd, shell=True) print "Building modules..." build_ext.run(self)
def run(self): if not os.path.isdir(nxprdlib_include_path) or not os.path.isdir(nxprdlib_link_path): print('\nNXP Reader library not found. Install it using the DEB package from http://bit.ly/nxpreader, and the instructions included in the README for this repo.\n') sys.exit(1) # Run the rest of the build build_ext.run(self)
def run(self): # Normally the library should have been downloaded during `sdist`. # In case of `develop` however this might not have happened. download_library(self) self.build_library() distutils_build_ext.run(self)
def run(self): print 'Create symlinks' if not os.path.exists('linkcheckerjs/node_modules'): os.symlink('../node_modules', 'linkcheckerjs/node_modules') if not os.path.exists('linkcheckerjs/jslib'): os.symlink('../jslib', 'linkcheckerjs/jslib') _build_ext.run(self)
def run(self): build_onig() if os.path.exists(tarball_path): os.unlink(tarball_path) urlretrieve("https://github.com/stedolan/jq/archive/jq-1.5.tar.gz", tarball_path) if os.path.exists(jq_lib_dir): shutil.rmtree(jq_lib_dir) tarfile.open(tarball_path, "r:gz").extractall(path_in_dir(".")) def command(args): print("") print("#" * 15) print("# Executing: %s" % ' '.join(args)) print("#" * 15) print("") subprocess.check_call(args, cwd=jq_lib_dir) macosx_deployment_target = sysconfig.get_config_var("MACOSX_DEPLOYMENT_TARGET") if macosx_deployment_target: os.environ['MACOSX_DEPLOYMENT_TARGET'] = macosx_deployment_target command(["autoreconf", "-i"]) command(["./configure", "CFLAGS=-fPIC", "--disable-maintainer-mode", "--with-oniguruma=%s/%s" % (os.getcwd(), onig_lib_dir)]) command(["make"]) build_ext.run(self)
def run(self): try: build_ext.run(self) except (DistutilsPlatformError, FileNotFoundError): print("************************************************************") print("Cannot compile C accelerator module, use pure python version") print("************************************************************")
def run(self): try: build_ext.run(self) except DistutilsPlatformError as e: print(e) print(self.warning_message % ("Extension modules", "There was an issue with your platform configuration - see above."))
def run(self): """ Build extension modules and copy shared libraries. """ build_ext.run(self) for dllpath, newpath in self.openssl_dlls: self.copy_file(dllpath, newpath)
def run(self): # fetch and unpack the archive. Not the nicest way... urllib.urlretrieve("http://pub.ist.ac.at/~vnk/software/QPBO-v1.3.src.tar.gz", "QPBO-v1.3.src.tar.gz") tfile = tarfile.open("QPBO-v1.3.src.tar.gz", 'r:gz') tfile.extractall('.') build_ext.run(self)
def run(self): if True or not self.dry_run: antlr3 = None # First, try locally-hosted antlr3 if os.path.exists('antlr-3.1.3.jar'): antlr3 = find_executable('java') if antlr3 is not None: antlr3 = [antlr3, '-cp', 'antlr-3.1.3.jar', 'org.antlr.Tool'] # Then, try to find system-provided one if antlr3 is None: antlr3 = find_executable('antlr3') if antlr3 is None: raise RuntimeError("antlr3 (>= 3.1 but < 3.2) is required") antlr3 = [antlr3] # TODO: antlr3 jar and python runtime version check? source_file = os.path.join('orderlyjson', 'OrderlyJSON.g') if self.inplace: target_dir = 'orderlyjson' else: target_dir = os.path.join(self.build_lib, 'orderlyjson') tokens_file = os.path.join(target_dir, 'OrderlyJSON.tokens') self.mkpath(target_dir) self.spawn(antlr3 + ['-fo', target_dir, source_file]) os.unlink(tokens_file) build_ext.run(self)
def run(self): try: build_ext.run(self) except DistutilsPlatformError: ex = sys.exc_info()[1] sys.stdout.write('%s\n' % str(ex)) warnings.warn("Extension modules: There was an issue with your platform configuration - see above.")
def run (self): """Configure the extensions and if successful, build them.""" ## The current building process will probably not work on ## non-posix systems. ## If anybody knows how to do it, please go ahead and emove this. if os.name != 'posix': print("!! The acceleration library is not available for your platform.\n!! You should consider switching to Linux (or some other Posix) Platform.") return if self.configure(): print("Compiling the pyFormex acceleration library") _build_ext.run(self) print("Compiling the pyFormex postabq converter") cmd = "cd pyformex/lib;make postabq" sta,out = commands.getstatusoutput(cmd) print(out) else: print(""" Some files required to compile the accelerator library were not found on your system. Installation will be continued, and pyFormex will run without the library, but some operations on large data sets may run slowly. See the manual or the website for information onhow to install the missing files. """)
def run(self): call('cd ' + zlibdir + ' && ( test -f Makefile || bash' ' ./configure --shared ) && make libz.a', shell=True) call('cd ' + bzip2dir + ' && make -f Makefile-libbz2_so all', shell=True) _build_ext.run(self)
def run(self): try: self._setup_extensions() build_ext.run(self) except DistutilsPlatformError as exc: sys.stderr.write('%s\n' % str(exc)) warnings.warn(self.error_message % "C extensions.")
def run(self): log.info("hadoop_home: %r" % (HADOOP_HOME,)) log.info("hadoop_version: '%s'" % HADOOP_VERSION_INFO) log.info("java_home: %r" % (JAVA_HOME,)) build_ext.run(self) for jlib in self.java_libs: self.__build_java_lib(jlib)
def run(self): # Run the commands that this one depends on (i.e. build_configure) for cmd_name in self.get_sub_commands(): self.run_command(cmd_name) class unmodified: pass # sentinel value orig_cc = unmodified try: # Set environment variables generated by the configure script if os.path.exists("buildenv"): try: f = open("buildenv", "r") for line in f.readlines(): if line.startswith("#") or not line.strip(): continue k, v = line.split("=", 1) k, v = k.strip(), v.strip() os.environ[k] = v finally: f.close() # Python 2.1 and 2.2 don't respect the CC environment variable by default. Monkey-patch it. if sys.version_info < (2, 3, 'final', 0) and os.environ.get('CC'): distutils.sysconfig.get_config_vars() # populates distutils.sysconfig._config_vars orig_cc = distutils.sysconfig._config_vars['CC'] distutils.sysconfig._config_vars['CC'] = os.environ['CC'] # Build the extension modules build_ext.run(self) finally: if orig_cc is not unmodified: # Undo monkey-patch distutils.sysconfig._config_vars['CC'] = orig_cc
def run(self): if not check_dependencies_once(): return # add software that requires NumPy to install if is_Numpy_installed(): import numpy numpy_include_dir = numpy.get_include() self.extensions.append( Extension('Bio.Cluster.cluster', ['Bio/Cluster/clustermodule.c', 'Bio/Cluster/cluster.c'], include_dirs=[numpy_include_dir], )) self.extensions.append( Extension('Bio.KDTree._CKDTree', ["Bio/KDTree/KDTree.c", "Bio/KDTree/KDTreemodule.c"], include_dirs=[numpy_include_dir], )) self.extensions.append( Extension('Bio.Motif._pwm', ["Bio/Motif/_pwm.c"], include_dirs=[numpy_include_dir], )) build_ext.run(self)
def run(self): """Run the command""" if not self.with_mysql_capi and not self.with_mysqlxpb_cext: return if os.name == 'nt': for ext in self.extensions: # Add Protobuf include and library dirs if ext.name == "_mysqlxpb" and self.with_mysqlxpb_cext: ext.include_dirs.append(self.with_protobuf_include_dir) ext.library_dirs.append(self.with_protobuf_lib_dir) ext.libraries.append("libprotobuf") # Use the multithread, static version of the run-time library ext.extra_compile_args.append("/MT") # Add extra compile args if self.extra_compile_args: ext.extra_compile_args.extend(self.extra_compile_args.split()) # Add extra link args if self.extra_link_args: ext.extra_link_args.extend(self.extra_link_args.split()) if self.with_mysqlxpb_cext: self.run_protoc() build_ext.run(self) else: self.real_build_extensions = self.build_extensions self.build_extensions = lambda: None build_ext.run(self) self.fix_compiler() if self.with_mysqlxpb_cext: self.run_protoc() self.real_build_extensions()
def run(self): """Runs the build extension.""" compiler = new_compiler(compiler=self.compiler) if compiler.compiler_type == "msvc": self.define = [ ("UNICODE", ""), ] else: command = "sh configure --disable-shared-libs" output = self._RunCommand(command) print_line = False for line in output.split("\n"): line = line.rstrip() if line == "configure:": print_line = True if print_line: print(line) self.define = [ ("HAVE_CONFIG_H", ""), ("LOCALEDIR", "\"/usr/share/locale\""), ] build_ext.run(self)
def run(self): try: _build_ext.run(self) # old-style class in Py2 except CompileError as e: print('Compile failed: %s' % e) if not seems_to_have_libxml2(): print_libxml_error() raise
def run(self): print "running scons" cmd = ["./lib/build.sh"] commons.run_command(cmd) #run_command("./lib/gcc-so.sh") #run_command("cd /home/fjammes/src/misc/distutils_swig_scons") # self.run_command("ls") build_ext.run(self)
def run(self): self.run_command("config") config_cmd = self.get_finalized_command("config") if self.define is None: self.define = [('restrict', config_cmd.restrict)] else: self.define += [('restrict', config_cmd.restrict)] build_ext.run(self)
def run(self): try: build_ext.run(self) except DistutilsPlatformError: # The sys.exc_info()[1] is to preserve compatibility with both # Python 2.5 and 3.x, which is needed in setup.py. self._unavailable(sys.exc_info()[1])
def run(self): """ Distutils calls this method to run the command """ from Cython.Build import cythonize import numpy # Provides all of our build options config = self.distribution.get_command_obj('configure') config.run() defs_file = localpath('h5py', 'defs.pyx') func_file = localpath('h5py', 'api_functions.txt') config_file = localpath('h5py', 'config.pxi') # Rebuild low-level defs if missing or stale if not op.isfile(defs_file) or os.stat(func_file).st_mtime > os.stat(defs_file).st_mtime: print("Executing api_gen rebuild of defs") api_gen.run() # Rewrite config.pxi file if needed if not op.isfile(config_file) or config.rebuild_required: with open(config_file, 'wb') as f: if config.mpi: import mpi4py from distutils.version import StrictVersion v2 = StrictVersion(mpi4py.__version__) > StrictVersion("1.3.1") else: v2 = False s = """\ # This file is automatically generated by the h5py setup script. Don't modify. DEF MPI = %(mpi)s DEF MPI4PY_V2 = %(mpi4py_v2)s DEF HDF5_VERSION = %(version)s DEF SWMR_MIN_HDF5_VERSION = (1,9,178) DEF VDS_MIN_HDF5_VERSION = (1,9,233) DEF COMPLEX256_SUPPORT = %(complex256_support)s """ s %= { 'mpi': bool(config.mpi), 'mpi4py_v2': bool(v2), 'version': tuple(int(x) for x in config.hdf5_version.split('.')), 'complex256_support': hasattr(numpy, 'complex256') } s = s.encode('utf-8') f.write(s) # Run Cython print("Executing cythonize()") self.extensions = cythonize(self._make_extensions(config), force=config.rebuild_required or self.force) self.check_rerun_cythonize() # Perform the build build_ext.run(self) # Mark the configuration as built config.reset_rebuild()
def run(self): try: build_ext.run(self) except DistutilsPlatformError: e = sys.exc_info()[1] sys.stdout.write('%s\n' % e) warn(self.warning_message % ("Extension modules", "There was an issue with your " "platform configuration - see above."))
def run(self): self.mkpath(self.build_temp) p = subprocess.Popen('nvcc --compiler-options="-fPIC" -m64 -arch=sm_20 -o %s/cufft_core.o -c cufft_core.cu' % (self.build_temp), stdout=subprocess.PIPE, shell=True) stdout, stderr = p.communicate() if p.returncode != 0: raise RuntimeError self.extensions[0].extra_objects.append('%s/cufft_core.o' % (self.build_temp)) build_ext.run(self) self.copy_file('cufft.py', self.build_lib+'/cufft.py')
def run(self): build_ext.run(self) build_library_files(self.dry_run) # HACK: this makes sure the library file (which is large) is only # included in binary builds, not source builds. from llvmlite.utils import get_library_files self.distribution.package_data = { "llvmlite.binding": get_library_files(), }
def run(self): # Import numpy here, only when headers are needed import numpy # Add numpy headers to include_dirs self.include_dirs.append(numpy.get_include()) # Call original build_ext command build_ext.run(self)
def run(self): build_ext.run(self) cmd = [sys.executable, os.path.join(here_dir, 'ffi', 'build.py')] spawn(cmd, dry_run=self.dry_run) # HACK: this makes sure the library file (which is large) is only # included in binary builds, not source builds. self.distribution.package_data = { "llvmlite.binding": get_library_files(), }
def run(self): try: build_ext.run(self) except Exception: e = sys.exc_info()[1] sys.stdout.write('%s\n' % str(e)) warnings.warn(self.warning_message % ("Extension modules", "There was an issue with " "your platform configuration" " - see above."))
def run(self): try: build_ext.run(self) except DistutilsPlatformError: raise BuildFailed()
def run(self, *args, **kwargs): return _build_ext.run(self, *args, **kwargs)
def run(self): def _patch(): if PLATFORM == "linux" and openssl_gt_1_1_0: print( '>>>>>>>>>>> OpenSSL version > 1.1.0, checking if already patched' ) patchcmd = ["patch", "-p1", "--ignore-whitespace"] patchcheck = patchcmd + ["-N", "--dry-run", "--silent"] try: patch = open("{}/patches/openssl-1.1.0.patch".format( NETSNMP_SRC_PATH)) check_call(patchcheck, cwd=NETSNMP_SRC_PATH, stdin=patch) except CalledProcessError: print('>>>>>>>>>>> Patch already applied, skipping') return print('>>>>>>>>>>> Patch not applied, applying') try: patch = open("{}/patches/openssl-1.1.0.patch".format( NETSNMP_SRC_PATH)) check_call(patchcmd, cwd=NETSNMP_SRC_PATH, stdin=patch) except CalledProcessError: sys.exit( '>>>>>>>>>>> OpenSSL version 1.1.0 patch failed, aborting' ) configureargs = "--with-defaults --with-default-snmp-version=2 --with-sys-contact=root@localhost " \ "--with-logfile=/var/log/snmpd.log " \ "--with-persistent-directory=/var/net-snmp --with-sys-location=unknown " \ "--without-rpm" featureflags = "--enable-reentrant --disable-debugging --disable-embedded-perl " \ "--without-perl-modules --enable-static=no --disable-snmpv1 --disable-applications " \ "--disable-manuals --with-libs=-lpthread" if PLATFORM == 'linux': configureargs += " --build={0}-unknown-linux-gnu --host={0}-unknown-linux-gnu ".format( MACHINE) else: configureargs += " --without-openssl" featureflags += " --disable-agent --disable-mibs" configurecmd = "./configure {0} {1}".format(configureargs, featureflags).split(' ') configurecmd += [ '--with-security-modules=usm tsm', '--with-out-transports=DTLSUDP TLSTCP' ] makecmd = ['make'] print(">>>>>>>>>>> Configuring with: {0} in {1}...".format( ' '.join(configurecmd), NETSNMP_SRC_PATH)) check_call(configurecmd, cwd=NETSNMP_SRC_PATH) print(">>>>>>>>>>> Building net-snmp library...") check_call(makecmd, cwd=NETSNMP_SRC_PATH) print(">>>>>>>>>>> Done building net-snmp library") print(">>>>>>>>>>> Copying shared objects") for path in self.target_dirs: for so_target in NETSNMP_SO_TARGETS: self.copy_file(NETSNMP_SO_PATH, '{0}/{1}'.format(path, so_target)) build_ext.run(self) # https://medium.com/@donblas/fun-with-rpath-otool-and-install-name-tool-e3e41ae86172 # https://jorgen.tjer.no/post/2014/05/20/dt-rpath-ld-and-at-rpath-dyld/ if PLATFORM == 'darwin': for interface_so_path in self.get_outputs(): install_name_tool_cmd = [ 'install_name_tool', '-change', os.path.join(os.path.sep, 'usr', 'local', 'lib', NETSNMP_SO_FILENAME), os.path.join('@rpath', NETSNMP_SO_FILENAME), interface_so_path ] check_call(install_name_tool_cmd)
def run(self): try: build_ext.run(self) except DistutilsPlatformError as e: self._failed(e)
def run(self): self.distribution.run_command('configure') return build_ext.run(self)
def run(self): from Cython.Build.Dependencies import cythonize self.distribution.ext_modules[:] = cythonize( self.distribution.ext_modules, language_level=3) du_build_ext.run(self)
def run(self): # Discover qpid-proton in the system self.distribution.run_command('configure') build_ext.run(self)
def run(self): sys.stderr.write("building libbwa\n") subprocess.call(["make", "-C", "./bwa", "-f", "../src/Makefile_bwa"]) build_ext.run(self)
def run(self): # check version, to prevent confusing undefined constant errors self.distribution.run_command('configure') build_ext.run(self)
def run(self): if self.portage_ext_modules: _build_ext.run(self)
def run(self): check_processor() _build_ext.run(self)
def run(self): try: build_ext.run(self) except DistutilsPlatformError: self._error()
def run(self): for extension in self.extensions: extension.build_custom() build_ext.run(self) return True
def run(self): _build_ext.run(self)
def run(self): try: build_ext.run(self) except run_errors: raise BuildFailed()
def run(self): try: build_ext.run(self) except DistutilsPlatformError, x: self._unavailable(x)
def run(self): self.cmake_build() # can't use super() here because # _build_ext is an old style class in 2.7 _build_ext.run(self)
def run(self): build_ext.run(self) if self.inplace: build_py = self.get_finalized_command('build_py') build_liblearn(build_py.get_package_dir(PACKAGE))
def run(self): if "PRETIX_DOCKER_BUILD" in os.environ: return # this is a hack to allow calling this file early in our docker build to make use of caching npm_install() build_ext.run(self)
def run(self): self.pre_process() build_ext.run(self) self.post_process()
def run(self): try: build_ext.run(self) except DistutilsPlatformError: traceback.print_exc() raise BuildFailed()
def run(self): try: build_ext.run(self) except (DistutilsPlatformError, FileNotFoundError): raise BuildFailed()
def run(self): npm_install() build_ext.run(self)
def run(self): """Wrap `run` with `BuildFailed`.""" try: build_ext.run(self) except errors.DistutilsPlatformError: raise BuildFailed()
def run(self): """ Distutils calls this method to run the command """ from Cython.Build import cythonize import numpy # This allows ccache to recognise the files when pip builds in a temp # directory. It speeds up repeatedly running tests through tox with # ccache configured (CC="ccache gcc"). It should have no effect if # ccache is not in use. os.environ['CCACHE_BASEDIR'] = op.dirname(op.abspath(__file__)) os.environ['CCACHE_NOHASHDIR'] = '1' # Provides all of our build options config = self.get_finalized_command('configure') config.run() defs_file = localpath('h5py', 'defs.pyx') func_file = localpath('h5py', 'api_functions.txt') config_file = localpath('h5py', 'config.pxi') # Rebuild low-level defs if missing or stale if not op.isfile(defs_file) or os.stat(func_file).st_mtime > os.stat( defs_file).st_mtime: print("Executing api_gen rebuild of defs") api_gen.run() # Rewrite config.pxi file if needed if not op.isfile(config_file) or config.rebuild_required: with open(config_file, 'wb') as f: s = """\ # This file is automatically generated by the h5py setup script. Don't modify. DEF MPI = %(mpi)s DEF HDF5_VERSION = %(version)s DEF SWMR_MIN_HDF5_VERSION = (1,9,178) DEF VDS_MIN_HDF5_VERSION = (1,9,233) DEF VOL_MIN_HDF5_VERSION = (1,11,5) DEF COMPLEX256_SUPPORT = %(complex256_support)s """ s %= { 'mpi': bool(config.mpi), 'version': tuple(int(x) for x in config.hdf5_version.split('.')), 'complex256_support': hasattr(numpy, 'complex256'), } s = s.encode('utf-8') f.write(s) # Run Cython print("Executing cythonize()") self.extensions = cythonize(self._make_extensions(config), force=config.rebuild_required or self.force, language_level=3) self.check_rerun_cythonize() # Perform the build build_ext.run(self) # Mark the configuration as built config.reset_rebuild()
def run(self): if not check_dependencies_once(): return build_ext.run(self)
def run(self): """Execute the build command.""" module = self.distribution.ext_modules[0] base_dir = os.path.dirname(__file__) if base_dir: os.chdir(base_dir) exclusions = [] for define in self.define or []: module.define_macros.append(define) for library in self.libraries or []: module.libraries.append(library) building_for_windows = self.plat_name in ('win32', 'win-amd64') building_for_osx = 'macosx' in self.plat_name building_for_linux = 'linux' in self.plat_name building_for_freebsd = 'freebsd' in self.plat_name building_for_openbsd = 'openbsd' in self.plat_name # need testing if building_for_linux: module.define_macros.append(('_GNU_SOURCE', '1')) module.define_macros.append(('USE_LINUX_PROC', '1')) module.extra_compile_args.append('-std=c99') elif building_for_windows: module.define_macros.append(('USE_WINDOWS_PROC', '1')) module.define_macros.append(('_CRT_SECURE_NO_WARNINGS', '1')) module.libraries.append('kernel32') module.libraries.append('advapi32') module.libraries.append('user32') module.libraries.append('crypt32') module.libraries.append('ws2_32') elif building_for_osx: module.define_macros.append(('_GNU_SOURCE', '1')) module.define_macros.append(('USE_MACH_PROC', '1')) module.extra_compile_args.append('-std=c99') module.include_dirs.append('/usr/local/opt/openssl/include') module.include_dirs.append('/opt/local/include') module.library_dirs.append('/opt/local/lib') module.include_dirs.append('/usr/local/include') module.library_dirs.append('/usr/local/lib') elif building_for_freebsd: module.define_macros.append(('_GNU_SOURCE', '1')) module.define_macros.append(('USE_FREEBSD_PROC', '1')) module.include_dirs.append('/opt/local/include') module.library_dirs.append('/opt/local/lib') module.include_dirs.append('/usr/local/include') module.library_dirs.append('/usr/local/lib') elif building_for_openbsd: module.define_macros.append(('_GNU_SOURCE', '1')) module.define_macros.append(('USE_OPENBSD_PROC', '1')) module.extra_compile_args.append('-std=c99') module.include_dirs.append('/opt/local/include') module.library_dirs.append('/opt/local/lib') module.include_dirs.append('/usr/local/include') module.library_dirs.append('/usr/local/lib') else: module.define_macros.append(('_GNU_SOURCE', '1')) module.define_macros.append(('USE_NO_PROC', '1')) module.extra_compile_args.append('-std=c99') if has_function('memmem'): module.define_macros.append(('HAVE_MEMMEM', '1')) if has_function('strlcpy'): module.define_macros.append(('HAVE_STRLCPY', '1')) if has_function('strlcat'): module.define_macros.append(('HAVE_STRLCAT', '1')) if self.enable_profiling: module.define_macros.append(('YR_PROFILING_ENABLED', '1')) if self.dynamic_linking: module.libraries.append('yara') else: if not self.define or not ('HASH_MODULE', '1') in self.define: if (has_function('MD5_Init', libraries=['crypto']) and has_function('SHA256_Init', libraries=['crypto'])): module.define_macros.append(('HASH_MODULE', '1')) module.define_macros.append(('HAVE_LIBCRYPTO', '1')) module.libraries.append('crypto') elif building_for_windows: module.define_macros.append(('HASH_MODULE', '1')) module.define_macros.append(('HAVE_WINCRYPT_H', '1')) else: exclusions.append('yara/libyara/modules/hash/hash.c') if self.enable_magic: module.define_macros.append(('MAGIC_MODULE', '1')) module.libraries.append('magic') else: exclusions.append('yara/libyara/modules/magic/magic.c') if self.enable_cuckoo: module.define_macros.append(('CUCKOO_MODULE', '1')) module.libraries.append('jansson') else: exclusions.append('yara/libyara/modules/cuckoo/cuckoo.c') if self.enable_dotnet: module.define_macros.append(('DOTNET_MODULE', '1')) else: exclusions.append('yara/libyara/modules/dotnet/dotnet.c') if self.enable_dex: module.define_macros.append(('DEX_MODULE', '1')) else: exclusions.append('yara/libyara/modules/dex/dex.c') if self.enable_macho: module.define_macros.append(('MACHO_MODULE', '1')) else: exclusions.append('yara/libyara/modules/macho/macho.c') # exclude pb_tests module exclusions.append('yara/libyara/modules/pb_tests/pb_tests.c') exclusions.append('yara/libyara/modules/pb_tests/pb_tests.pb-c.c') exclusions = [os.path.normpath(x) for x in exclusions] for directory, _, files in os.walk('yara/libyara/'): for x in files: x = os.path.normpath(os.path.join(directory, x)) if x.endswith('.c') and x not in exclusions: module.sources.append(x) build_ext.run(self)
def run(self): try: build_ext.run(self) except Exception as e: warn(e)
def run(self): # Always run the Cythonize command before building extensions self.run_command('build_cython') build_ext.run(self)
def run(self): """Execute the build command.""" module = self.distribution.ext_modules[0] base_dir = os.path.dirname(__file__) if base_dir: os.chdir(base_dir) exclusions = [] for define in self.define or []: module.define_macros.append(define) for library in self.libraries or []: module.libraries.append(library) building_for_windows = self.plat_name in ('win32', 'win-amd64') building_for_osx = 'macosx' in self.plat_name building_for_linux = 'linux' in self.plat_name if building_for_linux: module.sources.append('yara/libyara/proc/linux.c') if building_for_windows: module.sources.append('yara/libyara/proc/windows.c') module.define_macros.append(('_CRT_SECURE_NO_WARNINGS', '1')) module.libraries.append('kernel32') module.libraries.append('advapi32') module.libraries.append('user32') module.libraries.append('crypt32') module.libraries.append('ws2_32') if building_for_osx: module.sources.append('yara/libyara/proc/mach.c') module.include_dirs.append('/usr/local/opt/openssl/include') module.include_dirs.append('/opt/local/include') module.library_dirs.append('/opt/local/lib') module.include_dirs.append('/usr/local/include') module.library_dirs.append('/usr/local/lib') if has_function('memmem'): module.define_macros.append(('HAVE_MEMMEM', '1')) if has_function('strlcpy'): module.define_macros.append(('HAVE_STRLCPY', '1')) if has_function('strlcat'): module.define_macros.append(('HAVE_STRLCAT', '1')) if self.enable_profiling: module.define_macros.append(('PROFILING_ENABLED', '1')) if self.dynamic_linking: module.libraries.append('yara') else: if not self.define or not ('HASH_MODULE', '1') in self.define: if (has_function('MD5_Init', libraries=['crypto']) and has_function('SHA256_Init', libraries=['crypto'])): module.define_macros.append(('HASH_MODULE', '1')) module.define_macros.append(('HAVE_LIBCRYPTO', '1')) module.libraries.append('crypto') else: exclusions.append('yara/libyara/modules/hash.c') if self.enable_magic: module.define_macros.append(('MAGIC_MODULE', '1')) module.libraries.append('magic') else: exclusions.append('yara/libyara/modules/magic.c') if self.enable_cuckoo: module.define_macros.append(('CUCKOO_MODULE', '1')) module.libraries.append('jansson') else: exclusions.append('yara/libyara/modules/cuckoo.c') if self.enable_dotnet: module.define_macros.append(('DOTNET_MODULE', '1')) else: exclusions.append('yara/libyara/modules/dotnet.c') exclusions = [os.path.normpath(x) for x in exclusions] for directory, _, files in os.walk('yara/libyara/'): if directory != 'yara/libyara/proc': for x in files: x = os.path.normpath(os.path.join(directory, x)) if x.endswith('.c') and x not in exclusions: module.sources.append(x) build_ext.run(self)