def sanity_check_step(self): """Custom sanity check for SuiteSparse.""" # Make sure that SuiteSparse did NOT compile its own Metis if os.path.exists(os.path.join(self.installdir, 'lib', 'libmetis.%s' % get_shared_lib_ext())): raise EasyBuildError("SuiteSparse has compiled its own Metis. This will conflict with the Metis build." " The SuiteSparse EasyBlock need to be updated!") libnames = ['AMD', 'BTF', 'CAMD', 'CCOLAMD', 'CHOLMOD', 'COLAMD', 'CXSparse', 'KLU', 'LDL', 'RBio', 'SPQR', 'UMFPACK'] libs = [os.path.join(x, 'lib', 'lib%s.a' % x.lower()) for x in libnames] if LooseVersion(self.version) < LooseVersion('4.0'): csparse_dir = 'CSparse3' else: csparse_dir = 'CSparse' libs.append(os.path.join(csparse_dir, 'lib', 'libcsparse.a')) # Latest version of SuiteSparse also compiles shared library and put them in 'lib' shlib_ext = get_shared_lib_ext() if LooseVersion(self.version) >= LooseVersion('4.5.1'): libs += [os.path.join('lib', 'lib%s.%s' % (l.lower(), shlib_ext)) for l in libnames] custom_paths = { 'files': libs, 'dirs': ['MATLAB_Tools'], } super(EB_SuiteSparse, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for Qt.""" shlib_ext = get_shared_lib_ext() if LooseVersion(self.version) >= LooseVersion('4'): libversion = '' if LooseVersion(self.version) >= LooseVersion('5'): libversion = self.version.split('.')[0] libfile = os.path.join('lib', 'libQt%sCore.%s' % (libversion, shlib_ext)) else: libfile = os.path.join('lib', 'libqt.%s' % shlib_ext) custom_paths = { 'files': ['bin/moc', 'bin/qmake', libfile], 'dirs': ['include', 'plugins'], } if self.cfg['check_qtwebengine']: qtwebengine_libs = ['libQt%s%s.%s' % (libversion, l, shlib_ext) for l in ['WebEngine', 'WebEngineCore']] custom_paths['files'].extend([os.path.join('lib', lib) for lib in qtwebengine_libs]) if LooseVersion(self.version) >= LooseVersion('4'): custom_paths['files'].append('bin/xmlpatterns') super(EB_Qt, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for Trilinos.""" # selection of libraries libs = ["Amesos", "Anasazi", "AztecOO", "Belos", "Epetra", "Galeri", "GlobiPack", "Ifpack", "Intrepid", "Isorropia", "Kokkos", "Komplex", "LOCA", "Mesquite", "ML", "Moertel", "MOOCHO", "NOX", "Pamgen", "RTOp", "Rythmos", "Sacado", "Shards", "Stratimikos", "Teuchos", "Tpetra", "Triutils", "Zoltan"] libs = [l for l in libs if not l in self.cfg['skip_exts']] # Teuchos was refactored in 11.2 if LooseVersion(self.version) >= LooseVersion('11.2') and 'Teuchos' in libs: libs.remove('Teuchos') libs.extend(['teuchoscomm', 'teuchoscore', 'teuchosnumerics', 'teuchosparameterlist', 'teuchosremainder']) # Kokkos was refactored in 12.x, check for libkokkoscore.a rather than libkokkos.a if LooseVersion(self.version) >= LooseVersion('12') and 'Kokkos' in libs: libs.remove('Kokkos') libs.append('kokkoscore') # Get the library extension if self.cfg['shared_libs']: lib_ext = get_shared_lib_ext() else: lib_ext = "a" custom_paths = { 'files': [os.path.join('lib', 'lib%s.%s' % (x.lower(), lib_ext)) for x in libs], 'dirs': ['bin', 'include'] } super(EB_Trilinos, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self, custom_paths=None, use_new_libnames=None, check_launchers=True): """ Custom sanity check for MPICH """ shlib_ext = get_shared_lib_ext() if custom_paths is None: custom_paths = {} if use_new_libnames is None: # cfr. http://git.mpich.org/mpich.git/blob_plain/v3.1.1:/CHANGES # MPICH changed its library names sinceversion 3.1.1 use_new_libnames = LooseVersion(self.version) >= LooseVersion("3.1.1") # Starting MPICH 3.1.1, libraries have been renamed # cf http://git.mpich.org/mpich.git/blob_plain/v3.1.1:/CHANGES if use_new_libnames: libnames = ["mpi", "mpicxx", "mpifort"] else: libnames = ["fmpich", "mpichcxx", "mpichf90", "mpich", "mpl", "opa"] binaries = ["mpicc", "mpicxx", "mpif77", "mpif90"] if check_launchers: binaries.extend(["mpiexec", "mpiexec.hydra", "mpirun"]) bins = [os.path.join("bin", x) for x in binaries] headers = [os.path.join("include", x) for x in ["mpi.h", "mpicxx.h", "mpif.h"]] libs = [os.path.join("lib", "lib%s.%s" % (l, e)) for l in libnames for e in ["a", shlib_ext]] custom_paths.setdefault("dirs", []).extend(["bin", "include", "lib"]) custom_paths.setdefault("files", []).extend(bins + headers + libs) super(EB_MPICH, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check paths for ifort.""" shlib_ext = get_shared_lib_ext() binprefix = 'bin/intel64' libprefix = 'lib/intel64' if LooseVersion(self.version) >= LooseVersion('2011'): if LooseVersion(self.version) <= LooseVersion('2011.3.174'): binprefix = 'bin' elif LooseVersion(self.version) >= LooseVersion('2013_sp1'): binprefix = 'bin' else: libprefix = 'compiler/lib/intel64' bins = ['ifort'] if LooseVersion(self.version) < LooseVersion('2013'): # idb is not shipped with ifort anymore in 2013.x versions (it is with icc though) bins.append('idb') libs = ['ifcore.a', 'ifcore.%s' % shlib_ext, 'iomp5.a', 'iomp5.%s' % shlib_ext] custom_paths = { 'files': [os.path.join(binprefix, x) for x in bins] + [os.path.join(libprefix, 'lib%s' % l) for l in libs], 'dirs': [], } # make very sure that expected 'compilers_and_libraries_<VERSION>/linux' subdir is there for recent versions, # since we rely on it being there in make_module_req_guess if self.comp_libs_subdir: custom_paths['dirs'].append(self.comp_libs_subdir) custom_commands = ["which ifort"] IntelBase.sanity_check_step(self, custom_paths=custom_paths, custom_commands=custom_commands)
def test_templating(self): """ test easyconfig templating """ inp = { 'name':'PI', 'version':'3.14', 'namelower':'pi', 'cmd': 'tar xfvz %s', } # don't use any escaping insanity here, since it is templated itself self.contents = '\n'.join([ 'name = "%(name)s"', 'version = "%(version)s"', 'homepage = "http://google.com"', 'description = "test easyconfig %%(name)s"', 'toolchain = {"name":"dummy", "version": "dummy2"}', 'source_urls = [(GOOGLECODE_SOURCE)]', 'sources = [SOURCE_TAR_GZ, (SOURCELOWER_TAR_GZ, "%(cmd)s")]', 'sanity_check_paths = {"files": [], "dirs": ["libfoo.%%s" %% SHLIB_EXT]}', ]) % inp self.prep() eb = EasyConfig(self.eb_file, validate=False, valid_stops=self.all_stops) eb.validate() eb.generate_template_values() self.assertEqual(eb['description'], "test easyconfig PI") const_dict = dict([(x[0], x[1]) for x in easyconfig.templates.TEMPLATE_CONSTANTS]) self.assertEqual(eb['sources'][0], const_dict['SOURCE_TAR_GZ'] % inp) self.assertEqual(eb['sources'][1][0], const_dict['SOURCELOWER_TAR_GZ'] % inp) self.assertEqual(eb['sources'][1][1], 'tar xfvz %s') self.assertEqual(eb['source_urls'][0], const_dict['GOOGLECODE_SOURCE'] % inp) self.assertEqual(eb['sanity_check_paths']['dirs'][0], 'libfoo.%s' % get_shared_lib_ext()) # test the escaping insanity here (ie all the crap we allow in easyconfigs) eb['description'] = "test easyconfig % %% %s% %%% %(name)s %%(name)s %%%(name)s %%%%(name)s" self.assertEqual(eb['description'], "test easyconfig % %% %s% %%% PI %(name)s %PI %%(name)s")
def sanity_check_step(self): """Custom sanity check for GATE.""" if LooseVersion(self.version) >= '6.2': subdir = '' extra_files = ["bin/gjm", "bin/gjs"] dirs = [] if LooseVersion(self.version) < '7.0': extra_files += ["Utilities/itkzlib/%s" % x for x in ['itk_zlib_mangle.h', 'zconf.h', 'zlibDllConfig.h', 'zlib.h']] extra_files += ["Utilities/MetaIO/%s" % x for x in ['localMetaConfiguration.h', 'metaDTITube.h', 'metaImage.h', 'metaMesh.h', 'metaTubeGraph.h', 'metaUtils.h']] else: subdir = self.g4system extra_files = [ os.path.join('cluster_tools', 'filemerger', 'bin', subdir, 'gjm'), os.path.join('cluster_tools', 'jobsplitter', 'bin', subdir, 'gjs'), 'lib/libGate.%s' % get_shared_lib_ext(), ] dirs = ['benchmarks', 'examples'] custom_paths = { 'files': [os.path.join('bin', subdir, 'Gate')] + extra_files, 'dirs' : dirs, } super(EB_GATE, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for NEURON.""" shlib_ext = get_shared_lib_ext() binpath = os.path.join(self.hostcpu, 'bin') libpath = os.path.join(self.hostcpu, 'lib', 'lib%s.' + shlib_ext) binaries = ["bbswork.sh", "hel2mos1.sh", "ivoc", "memacs", "mkthreadsafe", "modlunit", "mos2nrn", "mos2nrn2.sh", "neurondemo", "nocmodl", "oc"] # hoc_ed is not included in the sources of 7.4. However, it is included in the binary distribution. # Nevertheless, the binary has a date old enough (June 2014, instead of November 2015 like all the # others) to be considered a mistake in the distribution if LooseVersion(self.version) < LooseVersion('7.4'): binaries.append("hoc_ed") custom_paths = { 'files': [os.path.join(binpath, x) for x in binaries] + [os.path.join(binpath, "nrn%s" % x) for x in ["gui", "iv", "iv_makefile", "ivmodl", "mech_makefile", "oc", "oc_makefile", "ocmodl"]] + [libpath % x for x in ["ivoc", "ivos", "memacs", "meschach", "neuron_gnu", "nrniv", "nrnmpi", "nrnoc", "nrnpython", "oc", "ocxt", "scopmath", "sparse13", "sundials"]], 'dirs': ['include/nrn', 'share/nrn'], } super(EB_NEURON, self).sanity_check_step(custom_paths=custom_paths) try: fake_mod_data = self.load_fake_module() except EasyBuildError, err: self.log.debug("Loading fake module failed: %s" % err)
def build_easyconfig_variables_dict(): """Make a dictionary with all variables that can be used""" vars_dict = { "shared_lib_ext": get_shared_lib_ext(), } return vars_dict
def sanity_check_step(self): """Custom sanity check for Boost.""" shlib_ext = get_shared_lib_ext() custom_paths = { 'files': ['lib/libboost_system.%s' % shlib_ext], 'dirs': ['include/boost'] } if self.cfg['boost_mpi']: custom_paths["files"].append('lib/libboost_mpi.%s' % shlib_ext) if get_software_root('Python'): pymajorver = get_software_version('Python').split('.')[0] pyminorver = get_software_version('Python').split('.')[1] if int(pymajorver) >= 3: suffix = pymajorver elif LooseVersion(self.version) >= LooseVersion("1.67.0"): suffix = '%s%s' % (pymajorver, pyminorver) else: suffix = '' custom_paths["files"].append('lib/libboost_python%s.%s' % (suffix, shlib_ext)) if self.cfg['boost_multi_thread']: custom_paths["files"].append('lib/libboost_thread-mt.%s' % shlib_ext) if self.cfg['boost_mpi'] and self.cfg['boost_multi_thread']: custom_paths["files"].append('lib/libboost_mpi-mt.%s' % shlib_ext) super(EB_Boost, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for BamTools.""" sharedlib_ext = get_shared_lib_ext() custom_paths = { 'files': [ "bin/bamtools", "include/shared/bamtools_global.h", "lib/libbamtools.a", "lib/libbamtools.%s" % sharedlib_ext ], 'dirs': [ "include/api", "docs" ] } if LooseVersion(self.version) < LooseVersion('2.3.0'): # Buid environment changed: # https://github.com/pezmaster31/bamtools/commit/9cfa70bfe9cdf1b6adc06beb88246b45fdd6250a custom_paths['files'].extend([ "lib/libbamtools-utils.%s" % sharedlib_ext, "lib/libjsoncpp.%s" % sharedlib_ext ]) else: custom_paths['files'].extend([ "lib/libbamtools-utils.a", "lib/libjsoncpp.a" ]) super(EB_BamTools, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for PETSc""" prefix1 = '' prefix2 = '' if self.cfg['sourceinstall']: prefix1 = self.petsc_subdir prefix2 = os.path.join(self.petsc_subdir, self.petsc_arch) if self.cfg['shared_libs']: libext = get_shared_lib_ext() else: libext = 'a' custom_paths = { 'files': [os.path.join(prefix2, 'lib', 'libpetsc.%s' % libext)], 'dirs': [os.path.join(prefix1, 'bin'), os.path.join(prefix1, 'include'), os.path.join(prefix2, 'include')] } if LooseVersion(self.version) < LooseVersion('3.6'): custom_paths['dirs'].append(os.path.join(prefix2, 'conf')) else: custom_paths['dirs'].append(os.path.join(prefix2, 'lib', 'petsc', 'conf')) super(EB_PETSc, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self, custom_paths=None, use_new_libnames=None, check_launchers=True): """ Custom sanity check for MPICH """ shlib_ext = get_shared_lib_ext() if custom_paths is None: custom_paths = {} if use_new_libnames is None: # cfr. http://git.mpich.org/mpich.git/blob_plain/v3.1.1:/CHANGES # MPICH changed its library names sinceversion 3.1.1 use_new_libnames = LooseVersion(self.version) >= LooseVersion('3.1.1') # Starting MPICH 3.1.1, libraries have been renamed # cf http://git.mpich.org/mpich.git/blob_plain/v3.1.1:/CHANGES if use_new_libnames: libnames = ['mpi', 'mpicxx', 'mpifort'] else: libnames = ['fmpich', 'mpichcxx', 'mpichf90', 'mpich', 'mpl', 'opa'] binaries = ['mpicc', 'mpicxx', 'mpif77', 'mpif90'] if check_launchers: binaries.extend(['mpiexec', 'mpiexec.hydra', 'mpirun']) bins = [os.path.join('bin', x) for x in binaries] headers = [os.path.join('include', x) for x in ['mpi.h', 'mpicxx.h', 'mpif.h']] libs = [os.path.join('lib', 'lib%s.%s' % (l, e)) for l in libnames for e in ['a', shlib_ext]] custom_paths.setdefault('dirs', []).extend(['bin', 'include', 'lib']) custom_paths.setdefault('files', []).extend(bins + headers + libs) super(EB_MPICH, self).sanity_check_step(custom_paths=custom_paths)
def install_step(self): """Install in non-standard path by passing PREFIX variable to make install.""" self.cfg.update('installopts', "PREFIX=%s" % self.installdir) super(EB_bzip2, self).install_step() # also build & install shared libraries, if desired if self.cfg['with_shared_libs']: cmd = "%s make -f Makefile-libbz2_so %s" % (self.cfg['prebuildopts'], self.cfg['buildopts']) run_cmd(cmd, log_all=True, simple=True) # copy shared libraries to <install dir>/lib shlib_ext = get_shared_lib_ext() libdir = os.path.join(self.installdir, 'lib') try: for lib in glob.glob('libbz2.%s.*' % shlib_ext): # only way to copy a symlink is to check for it, # cfr. http://stackoverflow.com/questions/4847615/copying-a-symbolic-link-in-python if os.path.islink(lib): os.symlink(os.readlink(lib), os.path.join(libdir, lib)) else: shutil.copy2(lib, libdir) except OSError, err: raise EasyBuildError("Copying shared libraries to installation dir %s failed: %s", libdir, err) # create symlink libbz2.so >> libbz2.so.1.0.6 try: cwd = os.getcwd() os.chdir(libdir) os.symlink('libbz2.%s.%s' % (shlib_ext, self.version), 'libbz2.%s' % shlib_ext) os.chdir(cwd) except OSError, err: raise EasyBuildError("Creating symlink for libbz2.so failed: %s", err)
def sanity_check_step(self): """Custom sanity check for Armadillo.""" custom_paths = { 'files': ['include/armadillo', 'lib/libarmadillo.%s' % get_shared_lib_ext()], 'dirs': ['include/armadillo_bits'], } super(EB_Armadillo, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check paths for IMPI.""" suff = "64" if self.cfg['m32']: suff = "" mpi_mods = ['mpi.mod'] if LooseVersion(self.version) > LooseVersion('4.0'): mpi_mods.extend(["mpi_base.mod", "mpi_constants.mod", "mpi_sizeofs.mod"]) if LooseVersion(self.version) >= LooseVersion('2019'): bin_dir = 'intel64/bin' include_dir = 'intel64/include' lib_dir = 'intel64/lib/release' else: bin_dir = 'bin%s' % suff include_dir = 'include%s' % suff lib_dir = 'lib%s' % suff mpi_mods.extend(["i_malloc.h"]) custom_paths = { 'files': ["%s/mpi%s" % (bin_dir, x) for x in ["icc", "icpc", "ifort"]] + ["%s/mpi%s.h" % (include_dir, x) for x in ["cxx", "f", "", "o", "of"]] + ["%s/%s" % (include_dir, x) for x in mpi_mods] + ["%s/libmpi.%s" % (lib_dir, get_shared_lib_ext())] + ["%s/libmpi.a" % lib_dir], 'dirs': [], } super(EB_impi, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check paths for IPP.""" shlib_ext = get_shared_lib_ext() dirs = [os.path.join('ipp', x) for x in ['bin', 'include', os.path.join('tools', 'intel64')]] if LooseVersion(self.version) < LooseVersion('8.0'): dirs.extend([ os.path.join('compiler', 'lib', 'intel64'), os.path.join('ipp', 'interfaces', 'data-compression'), ]) elif LooseVersion(self.version) < LooseVersion('9.0'): dirs.extend([ os.path.join('composerxe', 'lib', 'intel64'), ]) ipp_libs = ['cc', 'ch', 'core', 'cv', 'dc', 'i', 's', 'vm'] if LooseVersion(self.version) < LooseVersion('9.0'): ipp_libs.extend(['ac', 'di', 'j', 'm', 'r', 'sc', 'vc']) custom_paths = { 'files': [ os.path.join('ipp', 'lib', 'intel64', 'libipp%s') % y for x in ipp_libs for y in ['%s.a' % x, '%s.%s' % (x, shlib_ext)] ], 'dirs': dirs, } super(EB_ipp, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for ACML.""" inc_extra = [] lib_extra = [] if LooseVersion(self.version) < LooseVersion("5"): inc_extra = ['_mv', '_mv_m128'] lib_extra = ['_mv'] inc_files = [] lib_files = [] for suff in ['', '_mp']: fp = "%s%s%s" % (self.basedir, suff, self.suffix) for inc in [''] + inc_extra: inc_files.append(os.path.join(fp, 'include', 'acml%s.h' % inc)) for lib in [suff] + lib_extra: for ext in ['a', get_shared_lib_ext()]: lib_files.append(os.path.join(fp, 'lib', 'libacml%s.%s' % (lib, ext))) custom_paths = { 'files': ['util/cpuid.exe'] + inc_files + lib_files, 'dirs': [], } super(EB_ACML, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check paths for ifort.""" shlib_ext = get_shared_lib_ext() binprefix = 'bin/intel64' libprefix = 'lib/intel64' if LooseVersion(self.version) >= LooseVersion('2011'): if LooseVersion(self.version) <= LooseVersion('2011.3.174'): binprefix = 'bin' elif LooseVersion(self.version) >= LooseVersion('2013_sp1'): binprefix = 'bin' else: libprefix = 'compiler/lib/intel64' bins = ['ifort'] if LooseVersion(self.version) < LooseVersion('2013'): # idb is not shipped with ifort anymore in 2013.x versions (it is with icc though) bins.append('idb') libs = ['ifcore.a', 'ifcore.%s' % shlib_ext, 'iomp5.a', 'iomp5.%s' % shlib_ext] custom_paths = { 'files': [os.path.join(binprefix, x) for x in bins] + [os.path.join(libprefix, 'lib%s' % l) for l in libs], 'dirs': [], } custom_commands = ["which ifort"] IntelBase.sanity_check_step(self, custom_paths=custom_paths, custom_commands=custom_commands)
def sanity_check_step(self): """Custom sanity check for CUDA.""" if LooseVersion(self.version) > LooseVersion("9"): versionfile = read_file(os.path.join(self.installdir, "version.txt")) if not re.search("Version %s$" % self.version, versionfile): raise EasyBuildError("Unable to find the correct version (%s) in the version.txt file", self.version) shlib_ext = get_shared_lib_ext() chk_libdir = ["lib64"] # Versions higher than 6 do not provide 32 bit libraries if LooseVersion(self.version) < LooseVersion("6"): chk_libdir += ["lib"] culibs = ["cublas", "cudart", "cufft", "curand", "cusparse"] custom_paths = { 'files': [os.path.join("bin", x) for x in ["fatbinary", "nvcc", "nvlink", "ptxas"]] + [os.path.join("%s", "lib%s.%s") % (x, y, shlib_ext) for x in chk_libdir for y in culibs], 'dirs': ["include"], } if LooseVersion(self.version) < LooseVersion('7'): custom_paths['files'].append(os.path.join('open64', 'bin', 'nvopencc')) if LooseVersion(self.version) >= LooseVersion('7'): custom_paths['files'].append(os.path.join("extras", "CUPTI", "lib64", "libcupti.%s") % shlib_ext) custom_paths['dirs'].append(os.path.join("extras", "CUPTI", "include")) super(EB_CUDA, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """ Custom sanity check for netCDF """ shlib_ext = get_shared_lib_ext() incs = ["netcdf.h"] libs = ["libnetcdf.%s" % shlib_ext, "libnetcdf.a"] # since v4.2, the non-C libraries have been split off in seperate extensions_step # see netCDF-Fortran and netCDF-C++ if LooseVersion(self.version) < LooseVersion("4.2"): incs += ["netcdf%s" % x for x in ["cpp.h", ".hh", ".inc", ".mod"]] + \ ["ncvalues.h", "typesizes.mod"] libs += ["libnetcdf_c++.%s" % shlib_ext, "libnetcdff.%s" % shlib_ext, "libnetcdf_c++.a", "libnetcdff.a"] custom_paths = { 'files': ["bin/nc%s" % x for x in ["-config", "copy", "dump", "gen", "gen3"]] + [("lib/%s" % x, "lib64/%s" % x) for x in libs] + ["include/%s" % x for x in incs], 'dirs': [] } super(EB_netCDF, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for OpenBabel.""" custom_paths = { 'files': ['bin/babel', 'lib/libopenbabel.%s' % get_shared_lib_ext()], 'dirs': ['share/openbabel'], } super(EB_OpenBabel, self).sanity_check_step(custom_paths=custom_paths)
def install_step(self): """Install python but only keep the bits we need""" super(EB_Tkinter, self).install_step() tmpdir = tempfile.mkdtemp(dir=self.builddir) pylibdir = os.path.join(self.installdir, os.path.dirname(det_pylibdir())) shlib_ext = get_shared_lib_ext() tkinter_so = os.path.join(pylibdir, 'lib-dynload', '_tkinter*.' + shlib_ext) tkinter_so_hits = glob.glob(tkinter_so) if len(tkinter_so_hits) != 1: raise EasyBuildError("Expected to find exactly one _tkinter*.so: %s", tkinter_so_hits) self.tkinter_so_basename = os.path.basename(tkinter_so_hits[0]) if LooseVersion(self.version) >= LooseVersion('3'): tkparts = ["tkinter", os.path.join("lib-dynload", self.tkinter_so_basename)] else: tkparts = ["lib-tk", os.path.join("lib-dynload", self.tkinter_so_basename)] copy([os.path.join(pylibdir, x) for x in tkparts], tmpdir) rmtree2(self.installdir) mkdir(pylibdir, parents=True) try: shutil.move(os.path.join(tmpdir, tkparts[0]), pylibdir) shutil.move(os.path.join(tmpdir, os.path.basename(tkparts[1])), pylibdir) except (IOError, OSError) as err: raise EasyBuildError("Failed to move Tkinter back to the install directory: %s", err)
def configure_step(self): # Use separate build directory self.cfg['separate_build_dir'] = True self.cfg['configopts'] += "-DENABLE_TESTS=ON " # Needs wxWidgets self.cfg['configopts'] += "-DBUILD_GUI=OFF " root_python = get_software_root('Python') if root_python and self.cfg['with_python_bindings']: self.log.info("Enabling Python bindings") self.with_python = True shortpyver = '.'.join(get_software_version('Python').split('.')[:2]) self.cfg['configopts'] += "-DPYTHON_BINDINGS=ON " shlib_ext = get_shared_lib_ext() self.cfg['configopts'] += "-DPYTHON_LIBRARY=%s/lib/libpython%s.%s " % (root_python, shortpyver, shlib_ext) self.cfg['configopts'] += "-DPYTHON_INCLUDE_DIR=%s/include/python%s " % (root_python, shortpyver) else: self.log.info("Not enabling Python bindings") root_eigen = get_software_root("Eigen") if root_eigen: self.log.info("Using Eigen") self.cfg['configopts'] += "-DEIGEN3_INCLUDE_DIR='%s/include' " % root_eigen else: self.log.info("Not using Eigen") super(EB_OpenBabel, self).configure_step()
def configure_step(self): """Set some extra environment variables before configuring.""" shlib_ext = get_shared_lib_ext() # make sure that required dependencies are loaded deps = ['Boost', 'Python', 'SWIG'] depsdict = {} for dep in deps: deproot = get_software_root(dep) if not deproot: raise EasyBuildError("%s module not loaded?", dep) else: depsdict.update({dep:deproot}) # SWIG version more recent than 2.0.4 have a regression # which causes problems with e.g. DOLFIN if UFC was built with it # fixed in 2.0.7? see https://bugs.launchpad.net/dolfin/+bug/996398 if LooseVersion(get_software_version('SWIG')) > '2.0.4': raise EasyBuildError("Using bad version of SWIG, expecting swig <= 2.0.4. " "See https://bugs.launchpad.net/dolfin/+bug/996398") self.pyver = ".".join(get_software_version('Python').split(".")[:-1]) self.cfg.update('configopts', "-DBoost_DIR=%s" % depsdict['Boost']) self.cfg.update('configopts', "-DBOOST_INCLUDEDIR=%s/include" % depsdict['Boost']) self.cfg.update('configopts', "-DBoost_DEBUG=ON -DBOOST_ROOT=%s" % depsdict['Boost']) self.cfg.update('configopts', '-DUFC_ENABLE_PYTHON:BOOL=ON') self.cfg.update('configopts', '-DSWIG_FOUND:BOOL=ON') python = depsdict['Python'] self.cfg.update('configopts', '-DPYTHON_LIBRARY=%s/lib/libpython%s.%s' % (python, self.pyver, shlib_ext)) self.cfg.update('configopts', '-DPYTHON_INCLUDE_PATH=%s/include/python%s' % (python, self.pyver)) super(EB_UFC, self).configure_step()
def sanity_check_step(self): """Custom sanity check for Hadoop.""" native_files = [] if self.cfg['build_native_libs']: native_files = ['lib/native/libhadoop.%s' % get_shared_lib_ext()] custom_paths = { 'files': ['bin/hadoop'] + native_files, 'dirs': ['etc', 'libexec'], } super(EB_Hadoop, self).sanity_check_step(custom_paths=custom_paths) fake_mod_data = self.load_fake_module(purge=True) # exit code is ignored, since this cmd exits with 1 if not all native libraries were found cmd = "hadoop checknative -a" out, _ = run_cmd(cmd, simple=False, log_all=False, log_ok=False) self.clean_up_fake_module(fake_mod_data) not_found = [] installdir = os.path.realpath(self.installdir) lib_src = os.path.join(installdir, 'lib', 'native') for native_lib, _ in self.cfg['extra_native_libs']: if not re.search(r'%s: *true *%s' % (native_lib, lib_src), out): not_found.append(native_lib) if not_found: raise EasyBuildError("%s not found by 'hadoop checknative -a'.", ', '.join(not_found))
def sanity_check_step(self): """Custom sanity check for MATLAB.""" custom_paths = { 'files': ["bin/matlab", "bin/mcc", "bin/glnxa64/MATLAB", "bin/glnxa64/mcc", "runtime/glnxa64/libmwmclmcrrt.%s" % get_shared_lib_ext(), "toolbox/local/classpath.txt"], 'dirs': ["java/jar", "toolbox/compiler"], } super(EB_MATLAB, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for Python.""" shlib_ext = get_shared_lib_ext() try: fake_mod_data = self.load_fake_module() except EasyBuildError, err: raise EasyBuildError("Loading fake module failed: %s", err)
def sanity_check_step(self): """Custom sanity check for Ruby gems""" majver = '.'.join(self.version.split('.')[:2]) custom_paths = { 'files': ['bin/erb', 'bin/gem', 'bin/irb', 'bin/rake', 'bin/rdoc', 'bin/ri', 'bin/ruby', 'lib/libruby.%s' % get_shared_lib_ext()], 'dirs': ['include/ruby-%s.0' % majver, 'lib/pkgconfig', 'lib/ruby/%s.0' % majver, 'lib/ruby/gems'], } return super(EB_Ruby, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for libQGLViewer.""" shlib_ext = get_shared_lib_ext() custom_paths = { 'files': [('lib/libQGLViewer.prl', 'lib64/libQGLViewer.prl'), ('lib/libQGLViewer.%s' % shlib_ext, 'lib64/libQGLViewer.%s' % shlib_ext)], 'dirs': ['include/QGLViewer'], }
def sanity_check_step(self): """Custom sanity check for bzip2.""" libs = ['lib/libbz2.a'] if self.cfg['with_shared_libs']: shlib_ext = get_shared_lib_ext() libs.extend([ 'lib/libbz2.%s.%s' % (shlib_ext, self.version), 'lib/libbz2.%s' % shlib_ext ]) custom_paths = { 'files': [ 'bin/b%s' % x for x in [ 'unzip2', 'zcat', 'zdiff', 'zgrep', 'zip2', 'zip2recover', 'zmore' ] ] + ['include/bzlib.h'] + libs, 'dirs': [], } super(EB_bzip2, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check paths for icc.""" binprefix = 'bin/intel64' libprefix = 'lib/intel64' if LooseVersion(self.version) >= LooseVersion('2011'): if LooseVersion(self.version) <= LooseVersion('2011.3.174'): binprefix = 'bin' elif LooseVersion(self.version) >= LooseVersion('2013_sp1'): binprefix = 'bin' else: libprefix = 'compiler/lib/intel64' binfiles = ['icc', 'icpc'] if LooseVersion(self.version) < LooseVersion('2014'): binfiles += ['idb'] binaries = [os.path.join(binprefix, f) for f in binfiles] libraries = [ os.path.join(libprefix, 'lib%s' % lib) for lib in ['iomp5.a', 'iomp5.%s' % get_shared_lib_ext()] ] sanity_check_files = binaries + libraries if LooseVersion(self.version) > LooseVersion('2015'): sanity_check_files.append('include/omp.h') custom_paths = { 'files': sanity_check_files, 'dirs': [], } # make very sure that expected 'compilers_and_libraries_<VERSION>/linux' subdir is there for recent versions, # since we rely on it being there in make_module_req_guess if self.comp_libs_subdir: custom_paths['dirs'].append(self.comp_libs_subdir) custom_commands = ["which icc"] super(EB_icc, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands)
def install_step(self): """Install python but only keep the bits we need""" super(EB_Tkinter, self).install_step() tmpdir = tempfile.mkdtemp(dir=self.builddir) pylibdir = os.path.join(self.installdir, os.path.dirname(det_pylibdir())) shlib_ext = get_shared_lib_ext() tkinter_so = os.path.join(pylibdir, 'lib-dynload', '_tkinter*.' + shlib_ext) tkinter_so_hits = glob.glob(tkinter_so) if len(tkinter_so_hits) != 1: raise EasyBuildError( "Expected to find exactly one _tkinter*.so: %s", tkinter_so_hits) self.tkinter_so_basename = os.path.basename(tkinter_so_hits[0]) if LooseVersion(self.version) >= LooseVersion('3'): tkparts = [ "tkinter", os.path.join("lib-dynload", self.tkinter_so_basename) ] else: tkparts = [ "lib-tk", os.path.join("lib-dynload", self.tkinter_so_basename) ] copy([os.path.join(pylibdir, x) for x in tkparts], tmpdir) rmtree2(self.installdir) mkdir(pylibdir, parents=True) try: shutil.move(os.path.join(tmpdir, tkparts[0]), pylibdir) shutil.move(os.path.join(tmpdir, os.path.basename(tkparts[1])), pylibdir) except (IOError, OSError) as err: raise EasyBuildError( "Failed to move Tkinter back to the install directory: %s", err)
def configure_step(self): """Set some extra environment variables before configuring.""" shlib_ext = get_shared_lib_ext() # make sure that required dependencies are loaded deps = ['Boost', 'Python', 'SWIG'] depsdict = {} for dep in deps: deproot = get_software_root(dep) if not deproot: raise EasyBuildError("%s module not loaded?", dep) else: depsdict.update({dep: deproot}) # SWIG version more recent than 2.0.4 have a regression # which causes problems with e.g. DOLFIN if UFC was built with it # fixed in 2.0.7? see https://bugs.launchpad.net/dolfin/+bug/996398 if LooseVersion(get_software_version('SWIG')) > '2.0.4': raise EasyBuildError( "Using bad version of SWIG, expecting swig <= 2.0.4. " "See https://bugs.launchpad.net/dolfin/+bug/996398") self.pyver = ".".join(get_software_version('Python').split(".")[:-1]) self.cfg.update('configopts', "-DBoost_DIR=%s" % depsdict['Boost']) self.cfg.update('configopts', "-DBOOST_INCLUDEDIR=%s/include" % depsdict['Boost']) self.cfg.update('configopts', "-DBoost_DEBUG=ON -DBOOST_ROOT=%s" % depsdict['Boost']) self.cfg.update('configopts', '-DUFC_ENABLE_PYTHON:BOOL=ON') self.cfg.update('configopts', '-DSWIG_FOUND:BOOL=ON') python = depsdict['Python'] self.cfg.update( 'configopts', '-DPYTHON_LIBRARY=%s/lib/libpython%s.%s' % (python, self.pyver, shlib_ext)) self.cfg.update( 'configopts', '-DPYTHON_INCLUDE_PATH=%s/include/python%s' % (python, self.pyver)) super(EB_UFC, self).configure_step()
def sanity_check_step(self): """Custom sanity check for ELPA.""" custom_paths = { 'dirs': ['lib/pkgconfig', 'bin'], } shlib_ext = get_shared_lib_ext() extra_files = [] with_mpi_opts = [False] if self.cfg['with_mpi']: with_mpi_opts.append(True) with_omp_opts = [False] if self.cfg['with_openmp']: with_omp_opts.append(True) for with_mpi in with_mpi_opts: if with_mpi: mpi_suff = '' else: mpi_suff = '_onenode' for with_omp in with_omp_opts: if with_omp: omp_suff = '_openmp' else: omp_suff = '' extra_files.append('include/elpa%s%s-%s/elpa/elpa.h' % (mpi_suff, omp_suff, self.version)) extra_files.append('include/elpa%s%s-%s/modules/elpa.mod' % (mpi_suff, omp_suff, self.version)) extra_files.append('lib/libelpa%s%s.a' % (mpi_suff, omp_suff)) if self.cfg['with_shared']: extra_files.append('lib/libelpa%s%s.%s' % (mpi_suff, omp_suff, shlib_ext)) custom_paths['files'] = nub(extra_files) super(EB_ELPA, self).sanity_check_step(custom_paths=custom_paths)
def install_step(self): """ Custom install step for GROMACS; figure out where libraries were installed to. """ # Skipping if CUDA is enabled and the current iteration is double precision if self.is_double_precision_cuda_build(): self.log.info("skipping install step") else: # run 'make install' in parallel since it involves more compilation self.cfg.update('installopts', "-j %s" % self.cfg['parallel']) super(EB_GROMACS, self).install_step() # the GROMACS libraries get installed in different locations (deeper subdirectory), # depending on the platform; # this is determined by the GNUInstallDirs CMake module; # rather than trying to replicate the logic, we just figure out where the library was placed if self.cfg['build_shared_libs']: self.libext = get_shared_lib_ext() else: self.libext = 'a' if LooseVersion(self.version) < LooseVersion('5.0'): libname = 'libgmx*.%s' % self.libext else: libname = 'libgromacs*.%s' % self.libext for libdir in ['lib', 'lib64']: if os.path.exists(os.path.join(self.installdir, libdir)): for subdir in [libdir, os.path.join(libdir, '*')]: libpaths = glob.glob(os.path.join(self.installdir, subdir, libname)) if libpaths: self.lib_subdir = os.path.dirname(libpaths[0])[len(self.installdir)+1:] self.log.info("Found lib subdirectory that contains %s: %s", libname, self.lib_subdir) break if not self.lib_subdir: raise EasyBuildError("Failed to determine lib subdirectory in %s", self.installdir) # Reset installopts etc for the benefit of the gmxapi extension self.cfg['installopts'] = self.orig_installopts
def sanity_check_step(self): """Custom sanity check for Mesa.""" shlib_ext = get_shared_lib_ext() if LooseVersion(self.version) >= LooseVersion('20.0'): header_files = [ os.path.join('include', 'EGL', x) for x in ['eglmesaext.h', 'eglextchromium.h'] ] header_files.extend([ os.path.join('include', 'GL', 'osmesa.h'), os.path.join('include', 'GL', 'internal', 'dri_interface.h'), ]) else: gl_inc_files = [ 'glext.h', 'gl_mangle.h', 'glx.h', 'osmesa.h', 'gl.h', 'glxext.h', 'glx_mangle.h' ] gles_inc_files = [('GLES', 'gl.h'), ('GLES2', 'gl2.h'), ('GLES3', 'gl3.h')] header_files = [ os.path.join('include', 'GL', x) for x in gl_inc_files ] header_files.extend( [os.path.join('include', x, y) for (x, y) in gles_inc_files]) custom_paths = { 'files': [os.path.join('lib', 'libOSMesa.%s' % shlib_ext)] + header_files, 'dirs': [os.path.join('include', 'GL', 'internal')], } if self.swr_arches: swr_arch_libs = [ os.path.join('lib', 'libswr%s.%s' % (a.upper(), shlib_ext)) for a in self.swr_arches ] custom_paths['files'].extend(swr_arch_libs) super(EB_Mesa, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for PETSc""" prefix1 = "" prefix2 = "" if self.cfg['sourceinstall']: prefix1 = self.petsc_subdir prefix2 = os.path.join(self.petsc_subdir, self.petsc_arch) if self.cfg['shared_libs']: libext = get_shared_lib_ext() else: libext = "a" custom_paths = { 'files': [os.path.join(prefix2, "lib", "libpetsc.%s" % libext)], 'dirs': [os.path.join(prefix1, "bin"), os.path.join(prefix2, "conf"), os.path.join(prefix1, "include"), os.path.join(prefix2, "include")] } super(EB_PETSc, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """ Custom sanity check for HDF5 """ # also returns False if MPI is not supported by this toolchain if self.toolchain.options.get('usempi', None): extra_binaries = ["h5perf", "h5pcc", "h5pfc", "ph5diff"] else: extra_binaries = ["h5cc", "h5fc"] h5binaries = ["2gif", "c++", "copy", "debug", "diff", "dump", "import", "jam", "ls", "mkgrp", "perf_serial", "redeploy", "repack", "repart", "stat", "unjam"] binaries = ["h5%s" % x for x in h5binaries] + ["gif2h5"] + extra_binaries shlib_ext = get_shared_lib_ext() libs = ["libhdf5%s.%s" % (lib, shlib_ext) for lib in ['', '_cpp', '_fortran', '_hl_cpp', '_hl', 'hl_fortran']] custom_paths = { 'files': [os.path.join("bin", x) for x in binaries] + [os.path.join("lib", lib) for lib in libs], 'dirs': ['include'], } super(EB_HDF5, self).sanity_check_step(custom_paths=custom_paths)
def det_flexiblas_backend_libs(): """Determine list of paths to FlexiBLAS backend libraries.""" # example output for 'flexiblas list': # System-wide (config directory): # OPENBLAS # library = libflexiblas_openblas.so out, _ = run_cmd("flexiblas list", simple=False, trace=False) shlib_ext = get_shared_lib_ext() flexiblas_lib_regex = re.compile(r'library = (?P<lib>lib.*\.%s)' % shlib_ext, re.M) flexiblas_libs = flexiblas_lib_regex.findall(out) backend_libs = [] for flexiblas_lib in flexiblas_libs: # assumption here is that the name of FlexiBLAS library (like 'libflexiblas_openblas.so') # maps directly to name of the backend library ('libopenblas.so') backend_lib = 'lib' + flexiblas_lib.replace('libflexiblas_', '') backend_libs.append(backend_lib) return backend_libs
def sanity_check_step(self): """Custom sanity check for Qt.""" shlib_ext = get_shared_lib_ext() if LooseVersion(self.version) >= LooseVersion('4'): libversion = '' if LooseVersion(self.version) >= LooseVersion('5'): libversion = self.version.split('.')[0] libfile = os.path.join('lib', 'libQt%sCore.%s' % (libversion, shlib_ext)) else: libfile = os.path.join('lib', 'libqt.%s' % shlib_ext) custom_paths = { 'files': [libfile], 'dirs': ['bin', 'include', 'plugins'], } super(EB_Qt, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for MRtrix.""" shlib_ext = get_shared_lib_ext() if LooseVersion(self.version) >= LooseVersion('0.3'): libso = 'libmrtrix.%s' % shlib_ext else: libso = 'libmrtrix-%s.%s' % ('_'.join( self.version.split('.')), shlib_ext) custom_paths = { 'files': [os.path.join('lib', libso)], 'dirs': ['bin'], } custom_commands = [] if LooseVersion(self.version) >= LooseVersion('3.0'): custom_commands.append("python -c 'import mrtrix3'") super(EB_MRtrix, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands)
def sanity_check_step(self): """Custom sanity check for NEURON.""" shlib_ext = get_shared_lib_ext() binpath = os.path.join(self.hostcpu, 'bin') libpath = os.path.join(self.hostcpu, 'lib', 'lib%s.' + shlib_ext) custom_paths = { 'files': [os.path.join(binpath, x) for x in ["bbswork.sh", "hel2mos1.sh", "hoc_ed", "ivoc", "memacs", "mkthreadsafe", "modlunit", "mos2nrn", "mos2nrn2.sh", "neurondemo", "nocmodl", "oc"]] + [os.path.join(binpath, "nrn%s" % x) for x in ["gui", "iv", "iv_makefile", "ivmodl", "mech_makefile", "oc", "oc_makefile", "ocmodl"]] + [libpath % x for x in ["ivoc", "ivos", "memacs", "meschach", "neuron_gnu", "nrniv", "nrnmpi", "nrnoc", "nrnpython", "oc", "ocxt", "scopmath", "sparse13", "sundials"]], 'dirs': ['include/nrn', 'share/nrn'], } super(EB_NEURON, self).sanity_check_step(custom_paths=custom_paths) try: fake_mod_data = self.load_fake_module() except EasyBuildError, err: self.log.debug("Loading fake module failed: %s" % err)
def sanity_check_step(self): """Custom sanity check for OpenCV.""" opencv_bins = [ 'annotation', 'createsamples', 'traincascade', 'interactive-calibration', 'version', 'visualisation' ] libfile = 'libopencv_core.%s' % get_shared_lib_ext() custom_paths = { 'files': [os.path.join('bin', 'opencv_%s' % x) for x in opencv_bins] + [os.path.join('lib64', libfile)], 'dirs': ['include', self.pylibdir], } if 'WITH_IPP=ON' in self.cfg['configopts']: custom_paths['files'].append(os.path.join('lib', 'libippicv.a')) custom_commands = [] if get_software_root('Python'): custom_commands.append("python -c 'import cv2'") super(EB_OpenCV, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check paths for ifort.""" shlib_ext = get_shared_lib_ext() binprefix = 'bin/intel64' libprefix = 'lib/intel64' if LooseVersion(self.version) >= LooseVersion('2011'): if LooseVersion(self.version) <= LooseVersion('2011.3.174'): binprefix = 'bin' elif LooseVersion(self.version) >= LooseVersion('2013_sp1'): binprefix = 'bin' else: libprefix = 'compiler/lib/intel64' bins = ['ifort'] if LooseVersion(self.version) < LooseVersion('2013'): # idb is not shipped with ifort anymore in 2013.x versions (it is with icc though) bins.append('idb') libs = [ 'ifcore.a', 'ifcore.%s' % shlib_ext, 'iomp5.a', 'iomp5.%s' % shlib_ext ] custom_paths = { 'files': [os.path.join(binprefix, x) for x in bins] + [os.path.join(libprefix, 'lib%s' % l) for l in libs], 'dirs': [], } # make very sure that expected 'compilers_and_libraries_<VERSION>/linux' subdir is there for recent versions, # since we rely on it being there in make_module_req_guess if self.comp_libs_subdir: custom_paths['dirs'].append(self.comp_libs_subdir) custom_commands = ["which ifort"] IntelBase.sanity_check_step(self, custom_paths=custom_paths, custom_commands=custom_commands)
def configure_step(self): """Custom configure procedure for OpenBabel.""" self.cfg['configopts'] += "-DENABLE_TESTS=ON " # Needs wxWidgets self.cfg['configopts'] += "-DBUILD_GUI=OFF " python_root = get_software_root('Python') if python_root and self.cfg['with_python_bindings']: self.log.info("Enabling Python bindings") self.with_python = True self.cfg.update('configopts', '-DPYTHON_BINDINGS=ON') if LooseVersion(self.version) >= LooseVersion('3.0.0'): self.log.info("Enabling SWIG") self.cfg.update('configopts', '-DRUN_SWIG=ON') # determine Python include subdir + libpython*.so path pyshortver = '.'.join(get_software_version('Python').split('.')[:2]) inc_dirs = glob.glob(os.path.join(python_root, 'include', 'python%s*' % pyshortver)) shlib_ext = get_shared_lib_ext() libpython_paths = glob.glob(os.path.join(python_root, 'lib', 'libpython%s*.%s' % (pyshortver, shlib_ext))) if len(inc_dirs) == 1 and len(libpython_paths) == 1: self.cfg.update('configopts', '-DPYTHON_INCLUDE_DIR=%s' % inc_dirs[0]) self.cfg.update('configopts', '-DPYTHON_LIBRARY=%s' % libpython_paths[0]) else: raise EasyBuildError("Failed to isolate Python include subdir and/or libpython*.so path: %s, %s", inc_dirs, libpython_paths) else: self.log.info("Not enabling Python bindings") root_eigen = get_software_root("Eigen") if root_eigen: self.log.info("Using Eigen") self.cfg['configopts'] += "-DEIGEN3_INCLUDE_DIR='%s/include' " % root_eigen else: self.log.info("Not using Eigen") super(EB_OpenBabel, self).configure_step()
def sanity_check_step(self): """Custom sanity check for AOCC, based on sanity check for Clang.""" shlib_ext = get_shared_lib_ext() custom_paths = { 'files': [ 'bin/clang', 'bin/clang++', 'bin/flang', 'bin/lld', 'bin/llvm-ar', 'bin/llvm-as', 'bin/llvm-config', 'bin/llvm-link', 'bin/llvm-nm', 'bin/llvm-symbolizer', 'bin/opt', 'bin/scan-build', 'bin/scan-view', 'include/clang-c/Index.h', 'include/llvm-c/Core.h', 'lib/clang/%s/include/omp.h' % self.clangversion, 'lib/clang/%s/include/stddef.h' % self.clangversion, 'lib/libclang.%s' % shlib_ext, 'lib/libomp.%s' % shlib_ext, ], 'dirs': ['include/llvm', 'lib/clang/%s/lib' % self.clangversion, 'lib32'], } custom_commands = [ "clang --help", "clang++ --help", "clang-%s --help" % LooseVersion(self.clangversion).version[0], "clang-cpp --help", "flang --help", "llvm-config --cxxflags", ] super(EB_AOCC, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands)
def sanity_check_step(self): """Custom sanity check for CUDA.""" shlib_ext = get_shared_lib_ext() chk_libdir = ["lib64", "lib"] culibs = ["cublas", "cudart", "cufft", "curand", "cusparse"] custom_paths = { 'files': [ os.path.join("bin", x) for x in ["fatbinary", "nvcc", "nvlink", "ptxas"] ] + [ os.path.join("%s", "lib%s.%s") % (x, y, shlib_ext) for x in chk_libdir for y in culibs ], 'dirs': ["include"], } # Samples moved to https://github.com/nvidia/cuda-samples if LooseVersion(self.version) > LooseVersion('5') and LooseVersion( self.version) < LooseVersion('11.6'): custom_paths['files'].append(os.path.join('samples', 'Makefile')) if LooseVersion(self.version) < LooseVersion('7'): custom_paths['files'].append( os.path.join('open64', 'bin', 'nvopencc')) if LooseVersion(self.version) >= LooseVersion('7'): custom_paths['files'].append( os.path.join("extras", "CUPTI", "lib64", "libcupti.%s") % shlib_ext) custom_paths['dirs'].append( os.path.join("extras", "CUPTI", "include")) # Just a subset of files are checked, since the whole list is likely to change, # and irrelevant in most cases anyway if os.path.exists(os.path.join(self.installdir, 'pkgconfig')): pc_files = ['cublas.pc', 'cudart.pc', 'cuda.pc'] custom_paths['files'].extend( os.path.join('pkgconfig', x) for x in pc_files) super(EB_CUDA, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for Clang.""" shlib_ext = get_shared_lib_ext() custom_paths = { 'files': [ "bin/clang", "bin/clang++", "bin/llvm-ar", "bin/llvm-nm", "bin/llvm-as", "bin/opt", "bin/llvm-link", "bin/llvm-config", "bin/llvm-symbolizer", "include/llvm-c/Core.h", "include/clang-c/Index.h", "lib/libclang.%s" % shlib_ext, "lib/clang/%s/include/stddef.h" % self.version, ], 'dirs': ["include/clang", "include/llvm", "lib/clang/%s/lib" % self.version], } if self.cfg['static_analyzer']: custom_paths['files'].extend(["bin/scan-build", "bin/scan-view"]) if self.cfg["usepolly"]: custom_paths['files'].extend(["lib/LLVMPolly.%s" % shlib_ext]) custom_paths['dirs'].extend(["include/polly"]) if LooseVersion(self.version) >= LooseVersion('3.8'): custom_paths['files'].extend(["lib/libomp.%s" % shlib_ext, "lib/clang/%s/include/omp.h" % self.version]) super(EB_Clang, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Check for main library files for MXNet""" custom_paths = { 'files': ['lib/libmxnet.a', 'lib/libmxnet.%s' % get_shared_lib_ext()], 'dirs': [], } super(EB_MXNet, self).sanity_check_step(custom_paths=custom_paths) # for the extension we are doing the loading of the fake module ourself try: fake_mod_data = self.load_fake_module() except EasyBuildError as err: raise EasyBuildError("Loading fake module failed: %s", err) if not self.py_ext.sanity_check_step(): raise EasyBuildError("The sanity check for the Python bindings failed") self.r_ext.options['modulename'] = self.name.lower() if not self.r_ext.sanity_check_step(): raise EasyBuildError("The sanity check for the R bindings failed") self.clean_up_fake_module(fake_mod_data)
def sanity_check_step(self): """Custom sanity check for CGAL.""" shlib_ext = get_shared_lib_ext() libs = [ os.path.join('lib64', 'libCGAL%s.%s' % (l, shlib_ext)) for l in ['', '_Core'] ] dirs = [os.path.join('include', 'CGAL')] if LooseVersion(self.version) >= LooseVersion('4.12'): dirs.append(os.path.join('lib64', 'cmake', 'CGAL')) else: dirs.append(os.path.join('lib64', 'CGAL')) custom_paths = { 'files': [ os.path.join('bin', 'cgal_%s') % x for x in ['create_cmake_script', 'make_macosx_app'] ] + libs, 'dirs': dirs, } super(EB_CGAL, self).sanity_check_step(custom_paths=custom_paths)
def install_step(self): """Install in non-standard path by passing PREFIX variable to make install.""" self.cfg.update('installopts', "PREFIX=%s" % self.installdir) super(EB_bzip2, self).install_step() # also build & install shared libraries, if desired if self.cfg['with_shared_libs']: cmd = "%s make -f Makefile-libbz2_so %s" % ( self.cfg['prebuildopts'], self.cfg['buildopts']) run_cmd(cmd, log_all=True, simple=True) # copy shared libraries to <install dir>/lib shlib_ext = get_shared_lib_ext() libdir = os.path.join(self.installdir, 'lib') try: for lib in glob.glob('libbz2.%s.*' % shlib_ext): # only way to copy a symlink is to check for it, # cfr. http://stackoverflow.com/questions/4847615/copying-a-symbolic-link-in-python if os.path.islink(lib): os.symlink(os.readlink(lib), os.path.join(libdir, lib)) else: shutil.copy2(lib, libdir) except OSError as err: raise EasyBuildError( "Copying shared libraries to installation dir %s failed: %s", libdir, err) # create symlink libbz2.so >> libbz2.so.1.0.6 try: cwd = os.getcwd() os.chdir(libdir) os.symlink('libbz2.%s.%s' % (shlib_ext, self.version), 'libbz2.%s' % shlib_ext) os.chdir(cwd) except OSError as err: raise EasyBuildError( "Creating symlink for libbz2.so failed: %s", err)
def sanity_check_step(self): """Custom sanity check for CUDA.""" shlib_ext = get_shared_lib_ext() chk_libdir = ["lib64"] # Versions higher than 6 do not provide 32 bit libraries if LooseVersion(self.version) < LooseVersion("6"): chk_libdir += ["lib"] extra_files = [] if LooseVersion(self.version) < LooseVersion('7'): extra_files.append('open64/bin/nvopencc') custom_paths = { 'files': ["bin/%s" % x for x in ["fatbinary", "nvcc", "nvlink", "ptxas"]] + extra_files + ["%s/lib%s.%s" % (x, y, shlib_ext) for x in chk_libdir for y in ["cublas", "cudart", "cufft", "curand", "cusparse"]], 'dirs': ["include"], } super(EB_CUDA, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for Trilinos.""" # selection of libraries libs = ["Amesos", "Anasazi", "AztecOO", "Belos", "Epetra", "Galeri", "GlobiPack", "Ifpack", "Intrepid", "Isorropia", "Kokkos", "Komplex", "LOCA", "Mesquite", "ML", "Moertel", "MOOCHO", "NOX", "Pamgen", "RTOp", "Rythmos", "Sacado", "Shards", "Stratimikos", "Teuchos", "Tpetra", "Triutils", "Zoltan"] libs = [l for l in libs if not l in self.cfg['skip_exts']] # Teuchos was refactored in 11.2 if LooseVersion(self.version) >= LooseVersion('11.2') and 'Teuchos' in libs: libs.remove('Teuchos') libs.extend(['teuchoscomm', 'teuchoscore', 'teuchosnumerics', 'teuchosparameterlist', 'teuchosremainder']) # Kokkos was refactored in 12.x, check for libkokkoscore.a rather than libkokkos.a if LooseVersion(self.version) >= LooseVersion('12') and 'Kokkos' in libs: libs.remove('Kokkos') libs.append('kokkoscore') # libgaleri was split into libgaleri-epetra & libgaleri-xpetra if LooseVersion(self.version) >= LooseVersion('12.6'): libs.remove('Galeri') libs.extend(['galeri-epetra', 'galeri-xpetra']) # Get the library extension if self.cfg['shared_libs']: lib_ext = get_shared_lib_ext() else: lib_ext = 'a' custom_paths = { 'files': [os.path.join('lib', 'lib%s.%s' % (l.lower(), lib_ext)) for l in libs], 'dirs': ['bin', 'include'] } super(EB_Trilinos, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for FlexiBLAS.""" shlib_ext = get_shared_lib_ext() libs = [] # libraries in lib/ top_libs = ['libflexiblas%s.%s' % (x, shlib_ext) for x in ('', '_api', '_mgmt')] libs.extend(os.path.join('lib', lf) for lf in top_libs) # libraries in lib/flexiblas/ lower_lib_names = self.blas_libs + ['hook_dummy', 'hook_profile'] if self.cfg['enable_lapack']: lower_lib_names += ['fallback_lapack'] lower_libs = ['libflexiblas_%s.%s' % (x.lower(), shlib_ext) for x in lower_lib_names] libs.extend(os.path.join('lib', 'flexiblas', lf) for lf in lower_libs) # include files in include/flexiblas flexiblas_includes = ['flexiblas_api.h', 'flexiblas_mgmt.h', 'cblas.h'] if self.cfg['enable_lapack']: flexiblas_includes += ['lapack.h'] includes = [os.path.join('include', 'flexiblas', x) for x in flexiblas_includes] custom_paths = { 'files': [os.path.join('bin', 'flexiblas'), os.path.join('etc', 'flexiblasrc')] + includes + libs, 'dirs': [os.path.join('etc', 'flexiblasrc.d'), os.path.join('share', 'man')], } custom_commands = [ "flexiblas --help", "flexiblas list", ] # make sure that each BLAS library is supported by FlexiBLAS by checking output of 'flexiblas list' for blas_lib in self.blas_libs: custom_commands.append("flexiblas list | grep %s" % blas_lib.upper()) super(EB_FlexiBLAS, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands)
def sanity_check_step(self): """ Custom sanity check for ATLAS """ libs = ["atlas", "cblas", "f77blas", "lapack", "ptcblas", "ptf77blas"] static_libs = ["lib/lib%s.a" % x for x in libs] if self.cfg['sharedlibs']: shlib_ext = get_shared_lib_ext() shared_libs = ["lib/lib%s.%s" % (x, shlib_ext) for x in libs] else: shared_libs = [] custom_paths = { 'files': ["include/%s" % x for x in ["cblas.h", "clapack.h"]] + static_libs + shared_libs, 'dirs': ["include/atlas"] } super(EB_ATLAS, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for ELPA.""" custom_paths = { 'dirs': ['lib/pkgconfig', 'bin'], } shlib_ext = get_shared_lib_ext() extra_files = [] # ELPA uses the following naming scheme: # "onenode" suffix: no MPI support # "openmp" suffix: OpenMP support if self.toolchain.options.get('usempi', None): mpi_suff = '' else: mpi_suff = '_onenode' for with_omp in nub( [False, self.toolchain.options.get('openmp', False)]): if with_omp: omp_suff = '_openmp' else: omp_suff = '' extra_files.append('include/elpa%s%s-%s/elpa/elpa.h' % (mpi_suff, omp_suff, self.version)) extra_files.append('include/elpa%s%s-%s/modules/elpa.mod' % (mpi_suff, omp_suff, self.version)) extra_files.append('lib/libelpa%s%s.a' % (mpi_suff, omp_suff)) if self.cfg['with_shared']: extra_files.append('lib/libelpa%s%s.%s' % (mpi_suff, omp_suff, shlib_ext)) custom_paths['files'] = extra_files super(EB_ELPA, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for Libint.""" shlib_ext = get_shared_lib_ext() if LooseVersion(self.version) >= LooseVersion('2.0'): custom_paths = { 'files': [ 'lib/libint2.a', 'lib/libint2.%s' % shlib_ext, 'include/libint2/libint2.h' ], 'dirs': [], } else: custom_paths = { 'files': [ 'include/libint/libint.h', 'include/libint/hrr_header.h', 'include/libint/vrr_header.h', 'lib/libint.a', 'lib/libint.%s' % shlib_ext ], 'dirs': [], } super(EB_Libint, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for Qt.""" shlib_ext = get_shared_lib_ext() if LooseVersion(self.version) >= LooseVersion('4'): libversion = '' if LooseVersion(self.version) >= LooseVersion('5'): libversion = self.version.split('.')[0] libfile = os.path.join('lib', 'libQt%sCore.%s' % (libversion, shlib_ext)) else: libfile = os.path.join('lib', 'libqt.%s' % shlib_ext) custom_paths = { 'files': ['bin/moc', 'bin/qmake', libfile], 'dirs': ['include', 'plugins'], } if self.cfg['check_qtwebengine']: glibc_version = get_glibc_version() if LooseVersion(glibc_version) > LooseVersion("2.16"): qtwebengine_libs = [ 'libQt%s%s.%s' % (libversion, l, shlib_ext) for l in ['WebEngine', 'WebEngineCore'] ] custom_paths['files'].extend( [os.path.join('lib', lib) for lib in qtwebengine_libs]) else: self.log.debug( "Skipping check for qtwebengine, since it requires a more recent glibc." ) if LooseVersion(self.version) >= LooseVersion('4'): custom_paths['files'].append('bin/xmlpatterns') super(EB_Qt, self).sanity_check_step(custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for CGAL.""" shlib_ext = get_shared_lib_ext() libdirs = ('lib', 'lib64') libs = [ tuple( os.path.join(d, 'libCGAL%s.%s' % (l, shlib_ext)) for d in libdirs) for l in ['', '_Core'] ] if LooseVersion(self.version) > LooseVersion('4.12'): dirs = ['include/CGAL', ('lib/cmake/CGAL', 'lib64/cmake/CGAL')] else: dirs = ['include/CGAL', ('lib/CGAL', 'lib64/CGAL')] custom_paths = { 'files': [ 'bin/cgal_%s' % x for x in ['create_cmake_script', 'make_macosx_app'] ] + libs, 'dirs': dirs, } super(EB_CGAL, self).sanity_check_step(custom_paths=custom_paths)