def test_pythonpackage_det_pylibdir(self): """Test det_pylibdir function from pythonpackage.py.""" from easybuild.easyblocks.generic.pythonpackage import det_pylibdir for pylibdir in [ det_pylibdir(), det_pylibdir(plat_specific=True), det_pylibdir(python_cmd=sys.executable) ]: self.assertTrue( pylibdir.startswith('lib') and '/python' in pylibdir and pylibdir.endswith('site-packages'))
def install_step(self): """Install python but only keep the bits we need""" super(EB_Tkinter, self).install_step() tmpdir = tempfile.mkdtemp(dir=self.builddir) self.tkinter_so_basename = self.get_tkinter_so_basename(False) if LooseVersion(self.version) >= LooseVersion('3'): tkparts = [ "tkinter", os.path.join("lib-dynload", self.tkinter_so_basename) ] else: tkparts = [ "lib-tk", os.path.join("lib-dynload", self.tkinter_so_basename) ] pylibdir = os.path.join(self.installdir, det_pylibdir()) copy([os.path.join(os.path.dirname(pylibdir), x) for x in tkparts], tmpdir) remove_dir(self.installdir) move_file(os.path.join(tmpdir, tkparts[0]), os.path.join(pylibdir, tkparts[0])) move_file(os.path.join(tmpdir, self.tkinter_so_basename), os.path.join(pylibdir, self.tkinter_so_basename))
def configure_step(self): """Custom configuration procedure for NEURON.""" # enable support for distributed simulations if desired if self.cfg['paranrn']: self.cfg.update('configopts', '--with-paranrn') # specify path to InterViews if it is available as a dependency interviews_root = get_software_root('InterViews') if interviews_root: self.cfg.update('configopts', "--with-iv=%s" % interviews_root) else: self.cfg.update('configopts', "--without-iv") # optionally enable support for Python as alternative interpreter python_root = get_software_root('Python') if python_root: self.with_python = True self.cfg.update('configopts', "--with-nrnpython=%s/bin/python" % python_root) # determine host CPU type cmd = "./config.guess" (out, ec) = run_cmd(cmd, simple=False) self.hostcpu = out.split('\n')[0].split('-')[0] self.log.debug("Determined host CPU type as %s" % self.hostcpu) # determine Python lib dir self.pylibdir = det_pylibdir() # complete configuration with configure_method of parent super(EB_NEURON, self).configure_step()
def make_module_extra(self): """Set PYTHONPATH""" txt = super(EB_Tkinter, self).make_module_extra() pylibdir = os.path.dirname(det_pylibdir()) txt += self.module_generator.prepend_paths('PYTHONPATH', pylibdir) return txt
def install_step(self): """Install python but only keep the bits we need""" super(EB_Tkinter, self).install_step() tmpdir = tempfile.mkdtemp(dir=self.builddir) pylibdir = os.path.join(self.installdir, os.path.dirname(det_pylibdir())) shlib_ext = get_shared_lib_ext() tkinter_so = os.path.join(pylibdir, 'lib-dynload', '_tkinter*.' + shlib_ext) tkinter_so_hits = glob.glob(tkinter_so) if len(tkinter_so_hits) != 1: raise EasyBuildError("Expected to find exactly one _tkinter*.so: %s", tkinter_so_hits) self.tkinter_so_basename = os.path.basename(tkinter_so_hits[0]) if LooseVersion(self.version) >= LooseVersion('3'): tkparts = ["tkinter", os.path.join("lib-dynload", self.tkinter_so_basename)] else: tkparts = ["lib-tk", os.path.join("lib-dynload", self.tkinter_so_basename)] copy([os.path.join(pylibdir, x) for x in tkparts], tmpdir) rmtree2(self.installdir) mkdir(pylibdir, parents=True) try: shutil.move(os.path.join(tmpdir, tkparts[0]), pylibdir) shutil.move(os.path.join(tmpdir, os.path.basename(tkparts[1])), pylibdir) except (IOError, OSError) as err: raise EasyBuildError("Failed to move Tkinter back to the install directory: %s", err)
def prepare_step(self, *args, **kwargs): """Prepare for installing bundle of Python packages.""" super(Bundle, self).prepare_step(*args, **kwargs) python_root = get_software_root('Python') if python_root is None: raise EasyBuildError("Python not included as dependency!") # when system Python is used, the first 'python' command in $PATH will not be $EBROOTPYTHON/bin/python, # since $EBROOTPYTHON is set to just 'Python' in that case # (see handling of allow_system_deps in EasyBlock.prepare_step) if which('python') == os.path.join(python_root, 'bin', 'python'): # if we're using a proper Python dependency, let det_pylibdir use 'python' like it does by default python_cmd = None else: # since det_pylibdir will use 'python' by default as command to determine Python lib directory, # we need to intervene when the system Python is used, by specifying version requirements # to pick_python_cmd so the right 'python' command is used; # if we're using the system Python and no Python version requirements are specified, # use major/minor version of Python being used in this EasyBuild session (as we also do in PythonPackage) req_py_majver = self.cfg['req_py_majver'] if req_py_majver is None: req_py_majver = sys.version_info[0] req_py_minver = self.cfg['req_py_minver'] if req_py_minver is None: req_py_minver = sys.version_info[1] python_cmd = pick_python_cmd(req_maj_ver=req_py_majver, req_min_ver=req_py_minver) self.pylibdir = det_pylibdir(python_cmd=python_cmd)
def sanity_check_step(self, *args, **kwargs): """Custom sanity check for numpy.""" # can't use self.pylibdir here, need to determine path on the fly using currently active 'python' command; # this is important for numpy installations for multiple Python version (via multi_deps) custom_paths = { 'files': [], 'dirs': [det_pylibdir()], } custom_commands = [] if LooseVersion(self.version) >= LooseVersion("1.10"): # generic check to see whether numpy v1.10.x and up was built against a CBLAS-enabled library # cfr. https://github.com/numpy/numpy/issues/6675#issuecomment-162601149 blas_check_pytxt = '; '.join([ "import sys", "import numpy", "blas_ok = 'HAVE_CBLAS' in dict(numpy.__config__.blas_opt_info['define_macros'])", "sys.exit((1, 0)[blas_ok])", ]) custom_commands.append('python -c "%s"' % blas_check_pytxt) else: # _dotblas is required for decent performance of numpy.dot(), but only there in numpy 1.9.x and older custom_commands.append("python -c 'import numpy.core._dotblas'") return super(EB_numpy, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands)
def make_module_extra(self): """Custom extra module file entries for QScintilla.""" txt = super(EB_QScintilla, self).make_module_extra() python = get_software_root('Python') if python: txt += self.module_generator.prepend_paths('PYTHONPATH', [det_pylibdir()]) return txt
def prepare_step(self, *args, **kwargs): """Prepare environment for installing OpenCV.""" super(EB_OpenCV, self).prepare_step(*args, **kwargs) self.pylibdir = det_pylibdir() ippicv_tgz = glob.glob(os.path.join(self.builddir, 'ippicv*.tgz')) if ippicv_tgz: if len(ippicv_tgz) == 1: # copy ippicv tarball in the right place # expected location is 3rdparty/ippicv/downloads/linux-<md5sum>/ ippicv_tgz = ippicv_tgz[0] ippicv_tgz_md5 = compute_checksum(ippicv_tgz, checksum_type='md5') target_subdir = os.path.join('3rdparty', 'ippicv', 'downloads', 'linux-%s' % ippicv_tgz_md5) copy([ippicv_tgz], os.path.join(self.cfg['start_dir'], target_subdir)) self.cfg.update('configopts', '-DWITH_IPP=ON') else: raise EasyBuildError( "Found multiple ippicv*.tgz source tarballs in %s: %s", self.builddir, ippicv_tgz)
def install_step(self): """Custom install procedure for QScintilla.""" super(EB_QScintilla, self).install_step() # also install Python bindings if Python is included as a dependency python = get_software_root('Python') if python: pydir = os.path.join(self.cfg['start_dir'], 'Python') try: os.chdir(pydir) except OSError as err: raise EasyBuildError("Failed to change to %s: %s", pydir, err) # apparently this directory has to be there qsci_sipdir = os.path.join(self.installdir, 'share', 'sip', 'PyQt4') mkdir(qsci_sipdir, parents=True) pylibdir = os.path.join(det_pylibdir(), 'PyQt4') pyqt = get_software_root('PyQt') if pyqt is None: raise EasyBuildError( "Failed to determine PyQt installation prefix, PyQt not included as dependency?" ) cfgopts = [ '--destdir %s' % os.path.join(self.installdir, pylibdir), '--qsci-sipdir %s' % qsci_sipdir, '--qsci-incdir %s' % os.path.join(self.installdir, 'include'), '--qsci-libdir %s' % os.path.join(self.installdir, 'lib'), '--pyqt-sipdir %s' % os.path.join(pyqt, 'share', 'sip', 'PyQt4'), '--apidir %s' % os.path.join(self.installdir, 'qsci', 'api', 'python'), '--no-stubs', ] run_cmd("python configure.py %s" % ' '.join(cfgopts)) super(EB_QScintilla, self).build_step() super(EB_QScintilla, self).install_step() target_dir = os.path.join(self.installdir, pylibdir) pyqt_pylibdir = os.path.join(pyqt, pylibdir) try: os.chdir(target_dir) for entry in [ x for x in os.listdir(pyqt_pylibdir) if not x.startswith('__init__.py') ]: symlink(os.path.join(pyqt_pylibdir, entry), os.path.join(target_dir, entry)) except OSError as err: raise EasyBuildError( "Failed to symlink PyQt Python bindings in %s: %s", target_dir, err) # also requires empty __init__.py file to ensure Python modules can be imported from this location write_file(os.path.join(target_dir, '__init__.py'), '')
def sanity_check_step(self): """Custom sanity check for QScintilla.""" if LooseVersion(self.version) >= LooseVersion('2.10'): if self.pyqt_pkg_name == 'PyQt5': qsci_lib = 'libqscintilla2_qt5' else: qsci_lib = 'libqscintilla2_qt4' else: qsci_lib = 'libqscintilla2' custom_paths = { 'files': [os.path.join('lib', qsci_lib + '.' + get_shared_lib_ext())], 'dirs': ['data', os.path.join('include', 'Qsci'), 'trans'], } # also install Python bindings if Python is included as a dependency python = get_software_root('Python') custom_commands = [] if python: custom_paths['dirs'].extend([ os.path.join(det_pylibdir(), self.pyqt_pkg_name), os.path.join('qsci', 'api', 'python'), os.path.join('share', 'sip', self.pyqt_pkg_name), ]) custom_commands.append("python -c 'import %s.Qsci'" % self.pyqt_pkg_name) super(EB_QScintilla, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands)
def make_module_extra(self): """Add bin dirs and lib dirs and set CPLEX_HOME and CPLEXDIR""" txt = super(EB_CPLEX, self).make_module_extra() # avoid failing miserably under --module-only --force if os.path.exists(self.installdir): cwd = change_dir(self.installdir) bins = glob.glob(os.path.join('*', 'bin', 'x86-64*')) libs = glob.glob(os.path.join('*', 'lib', 'x86-64*', '*pic')) change_dir(cwd) else: bins = [] libs = [] txt += self.module_generator.prepend_paths('PATH', [path for path in bins]) txt += self.module_generator.prepend_paths( 'LD_LIBRARY_PATH', [path for path in bins + libs]) txt += self.module_generator.set_environment( 'CPLEX_HOME', os.path.join(self.installdir, 'cplex')) txt += self.module_generator.set_environment( 'CPLEXDIR', os.path.join(self.installdir, 'cplex')) if self.with_python: if self.multi_python: txt += self.module_generator.prepend_paths( EBPYTHONPREFIXES, '') else: txt += self.module_generator.prepend_paths( 'PYTHONPATH', [det_pylibdir()]) self.log.debug("make_module_extra added %s" % txt) return txt
def prepare_step(self, *args, **kwargs): """Prepare environment for installing OpenCV.""" super(EB_OpenCV, self).prepare_step(*args, **kwargs) self.pylibdir = det_pylibdir() if get_cpu_architecture() == X86_64: # IPP are Intel's Integrated Performance Primitives - so only make sense on X86_64 ippicv_tgz = glob.glob(os.path.join(self.builddir, 'ippicv*.tgz')) if ippicv_tgz: if len(ippicv_tgz) == 1: # copy ippicv tarball in the right place # expected location is 3rdparty/ippicv/downloads/linux-<md5sum>/ ippicv_tgz = ippicv_tgz[0] ippicv_tgz_md5 = compute_checksum(ippicv_tgz, checksum_type='md5') target_subdir = os.path.join('3rdparty', 'ippicv', 'downloads', 'linux-%s' % ippicv_tgz_md5) copy([ippicv_tgz], os.path.join(self.cfg['start_dir'], target_subdir)) self.cfg.update('configopts', '-DWITH_IPP=ON') # for recent OpenCV 3.x versions (and newer), we must also specify the download location # to prevent that the ippicv tarball is re-downloaded if LooseVersion(self.version) >= LooseVersion('3.4.4'): self.cfg.update( 'configopts', '-DOPENCV_DOWNLOAD_PATH=%s' % self.builddir) else: raise EasyBuildError( "Found multiple ippicv*.tgz source tarballs in %s: %s", self.builddir, ippicv_tgz)
def prepare_step(self, *args, **kwargs): """Prepare for installing bundle of Python packages.""" super(Bundle, self).prepare_step(*args, **kwargs) if get_software_root('Python') is None: raise EasyBuildError("Python not included as dependency!") self.pylibdir = det_pylibdir()
def make_module_extra(self): """Custom extra module file entries for Gurobi.""" txt = super(EB_Gurobi, self).make_module_extra() txt += self.module_generator.set_environment('GUROBI_HOME', self.installdir) txt += self.module_generator.set_environment('GRB_LICENSE_FILE', os.path.join(self.installdir, 'gurobi.lic')) if get_software_root('Python'): txt += self.module_generator.prepend_paths('PYTHONPATH', det_pylibdir()) return txt
def sanity_check_step(self, *args, **kwargs): """Custom sanity check for scipy.""" # can't use self.pylibdir here, need to determine path on the fly using currently active 'python' command; # this is important for numpy installations for multiple Python version (via multi_deps) custom_paths = { 'files': [], 'dirs': [det_pylibdir()], } return super(EB_scipy, self).sanity_check_step(custom_paths=custom_paths)
def make_module_extra(self): """Custom extra module file entries for QScintilla.""" txt = super(EB_QScintilla, self).make_module_extra() python = get_software_root('Python') if python: if self.cfg['multi_deps'] and 'Python' in self.cfg['multi_deps']: txt += self.module_generator.prepend_paths( 'EBPYTHONPREFIXES', '') else: txt += self.module_generator.prepend_paths( 'PYTHONPATH', [det_pylibdir()]) return txt
def install_step(self): """Custom install procedure for QScintilla.""" super(EB_QScintilla, self).install_step() # also install Python bindings if Python is included as a dependency python = get_software_root('Python') if python: pydir = os.path.join(self.cfg['start_dir'], 'Python') try: os.chdir(pydir) except OSError as err: raise EasyBuildError("Failed to change to %s: %s", pydir, err) # apparently this directory has to be there qsci_sipdir = os.path.join(self.installdir, 'share', 'sip', 'PyQt4') mkdir(qsci_sipdir, parents=True) pylibdir = os.path.join(det_pylibdir(), 'PyQt4') pyqt = get_software_root('PyQt') if pyqt is None: raise EasyBuildError("Failed to determine PyQt installation prefix, PyQt not included as dependency?") cfgopts = [ '--destdir %s' % os.path.join(self.installdir, pylibdir), '--qsci-sipdir %s' % qsci_sipdir, '--qsci-incdir %s' % os.path.join(self.installdir, 'include'), '--qsci-libdir %s' % os.path.join(self.installdir, 'lib'), '--pyqt-sipdir %s' % os.path.join(pyqt, 'share', 'sip', 'PyQt4'), '--apidir %s' % os.path.join(self.installdir, 'qsci', 'api', 'python'), '--no-stubs', ] run_cmd("python configure.py %s" % ' '.join(cfgopts)) super(EB_QScintilla, self).build_step() super(EB_QScintilla, self).install_step() target_dir = os.path.join(self.installdir, pylibdir) pyqt_pylibdir = os.path.join(pyqt, pylibdir) try: os.chdir(target_dir) for entry in [x for x in os.listdir(pyqt_pylibdir) if not x.startswith('__init__.py')]: symlink(os.path.join(pyqt_pylibdir, entry), os.path.join(target_dir, entry)) except OSError as err: raise EasyBuildError("Failed to symlink PyQt Python bindings in %s: %s", target_dir, err) # also requires empty __init__.py file to ensure Python modules can be imported from this location write_file(os.path.join(target_dir, '__init__.py'), '')
def sanity_check_step(self): """Custom sanity check for Python.""" if LooseVersion(self.version) >= LooseVersion('3'): tkinter = 'tkinter' else: tkinter = 'Tkinter' custom_commands = ["python -c 'import %s'" % tkinter] shlib_ext = get_shared_lib_ext() pylibdir = os.path.dirname(det_pylibdir()) custom_paths = { 'files': ['%s/%s' % (pylibdir, self.tkinter_so_basename)], 'dirs': ['lib'] } super(EB_Python, self).sanity_check_step(custom_commands=custom_commands, custom_paths=custom_paths)
def make_module_extra(self): """Custom variables for OpenBabel module.""" txt = super(EB_OpenBabel, self).make_module_extra() if self.with_python: if LooseVersion(self.version) >= LooseVersion('2.4'): # since OpenBabel 2.4.0 the Python bindings under # ${PREFIX}/lib/python2.7/site-packages rather than ${PREFIX}/lib ob_pythonpath = det_pylibdir() else: ob_pythonpath = 'lib' txt += self.module_generator.prepend_paths('PYTHONPATH', [ob_pythonpath]) babel_libdir = os.path.join(self.installdir, 'lib', 'openbabel', self.version) txt += self.module_generator.set_environment('BABEL_LIBDIR', babel_libdir) babel_datadir = os.path.join(self.installdir, 'share', 'openbabel', self.version) txt += self.module_generator.set_environment('BABEL_DATADIR', babel_datadir) return txt
def sanity_check_step(self): """Custom sanity check for QScintilla.""" if LooseVersion(self.version) >= LooseVersion('2.10'): qsci_lib = 'libqscintilla2_qt4' else: qsci_lib = 'libqscintilla2' custom_paths = { 'files': [os.path.join('lib', qsci_lib + '.' + get_shared_lib_ext())], 'dirs': ['data', os.path.join('include', 'Qsci'), os.path.join(det_pylibdir(), 'PyQt4'), os.path.join('qsci', 'api', 'python'), os.path.join('share', 'sip', 'PyQt4'), 'trans'], } custom_commands = ["python -c 'import PyQt4.Qsci'"] super(EB_QScintilla, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands)
def get_tkinter_so_basename(self, in_final_dir): pylibdir = os.path.join(self.installdir, det_pylibdir()) shlib_ext = get_shared_lib_ext() if in_final_dir: # The build has already taken place so the file will have been moved into the final pylibdir tkinter_so = os.path.join(pylibdir, '_tkinter*.' + shlib_ext) else: tkinter_so = os.path.join(os.path.dirname(pylibdir), 'lib-dynload', '_tkinter*.' + shlib_ext) tkinter_so_hits = glob.glob(tkinter_so) if len(tkinter_so_hits) != 1: raise EasyBuildError( "Expected to find exactly one _tkinter*.so: %s", tkinter_so_hits) tkinter_so_basename = os.path.basename(tkinter_so_hits[0]) return tkinter_so_basename
def sanity_check_step(self): """Custom sanity check for Python.""" if LooseVersion(self.version) >= LooseVersion('3'): tkinter = 'tkinter' else: tkinter = 'Tkinter' custom_commands = ["python -c 'import %s'" % tkinter] custom_paths = { 'files': [ os.path.join(os.path.dirname(det_pylibdir()), self.tkinter_so_basename) ], 'dirs': ['lib'] } super(EB_Python, self).sanity_check_step(custom_commands=custom_commands, custom_paths=custom_paths)
def sanity_check_step(self): """Custom sanity check for Python.""" if LooseVersion(self.version) >= LooseVersion('3'): tkinter = 'tkinter' else: tkinter = 'Tkinter' custom_commands = ["python -c 'import %s'" % tkinter] shlib_ext = get_shared_lib_ext() pylibdir = os.path.dirname(det_pylibdir()) custom_paths = { 'files': ['%s/_tkinter.%s' % (pylibdir, shlib_ext)], 'dirs': ['lib'] } super(EB_Python, self).sanity_check_step(custom_commands=custom_commands, custom_paths=custom_paths)
def prepare_step(self, *args, **kwargs): """Prepare environment for installing OpenCV.""" super(EB_OpenCV, self).prepare_step(*args, **kwargs) self.pylibdir = det_pylibdir() ippicv_tgz = glob.glob(os.path.join(self.builddir, 'ippicv*.tgz')) if ippicv_tgz: if len(ippicv_tgz) == 1: # copy ippicv tarball in the right place # expected location is 3rdparty/ippicv/downloads/linux-<md5sum>/ ippicv_tgz = ippicv_tgz[0] ippicv_tgz_md5 = compute_checksum(ippicv_tgz, checksum_type='md5') target_subdir = os.path.join('3rdparty', 'ippicv', 'downloads', 'linux-%s' % ippicv_tgz_md5) copy([ippicv_tgz], os.path.join(self.cfg['start_dir'], target_subdir)) self.cfg.update('configopts', '-DWITH_IPP=ON') else: raise EasyBuildError("Found multiple ippicv*.tgz source tarballs in %s: %s", self.builddir, ippicv_tgz)
def install_step(self): """Install python but only keep the bits we need""" super(EB_Tkinter, self).install_step() tmpdir = tempfile.mkdtemp(dir=self.builddir) pylibdir = os.path.join(self.installdir, os.path.dirname(det_pylibdir())) shlib_ext = get_shared_lib_ext() tkinter_so = os.path.join(pylibdir, 'lib-dynload', '_tkinter*.' + shlib_ext) tkinter_so_hits = glob.glob(tkinter_so) if len(tkinter_so_hits) != 1: raise EasyBuildError( "Expected to find exactly one _tkinter*.so: %s", tkinter_so_hits) self.tkinter_so_basename = os.path.basename(tkinter_so_hits[0]) if LooseVersion(self.version) >= LooseVersion('3'): tkparts = [ "tkinter", os.path.join("lib-dynload", self.tkinter_so_basename) ] else: tkparts = [ "lib-tk", os.path.join("lib-dynload", self.tkinter_so_basename) ] copy([os.path.join(pylibdir, x) for x in tkparts], tmpdir) rmtree2(self.installdir) mkdir(pylibdir, parents=True) try: shutil.move(os.path.join(tmpdir, tkparts[0]), pylibdir) shutil.move(os.path.join(tmpdir, os.path.basename(tkparts[1])), pylibdir) except (IOError, OSError) as err: raise EasyBuildError( "Failed to move Tkinter back to the install directory: %s", err)
def sanity_check_step(self): """Custom sanity check for QScintilla.""" if LooseVersion(self.version) >= LooseVersion('2.10'): qsci_lib = 'libqscintilla2_qt4' else: qsci_lib = 'libqscintilla2' custom_paths = { 'files': [os.path.join('lib', qsci_lib + '.' + get_shared_lib_ext())], 'dirs': ['data', os.path.join('include', 'Qsci'), 'trans'], } # also install Python bindings if Python is included as a dependency python = get_software_root('Python') custom_commands = [] if python: custom_paths['dirs'] += [os.path.join(det_pylibdir(), 'PyQt4'), os.path.join('qsci', 'api', 'python'), os.path.join('share', 'sip', 'PyQt4')] custom_commands = ["python -c 'import PyQt4.Qsci'"] super(EB_QScintilla, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands)
def install_step(self): """Install python but only keep the bits we need""" super(EB_Tkinter, self).install_step() tmpdir = tempfile.mkdtemp(dir=self.builddir) pylibdir = os.path.join(self.installdir, os.path.dirname(det_pylibdir())) tkparts = ["lib-tk", "lib-dynload/_tkinter.so"] copy([os.path.join(pylibdir, x) for x in tkparts], tmpdir) rmtree2(self.installdir) mkdir(pylibdir, parents=True) try: shutil.move(os.path.join(tmpdir, tkparts[0]), pylibdir) shutil.move(os.path.join(tmpdir, os.path.basename(tkparts[1])), pylibdir) except (IOError, OSError) as err: raise EasyBuildError( "Failed to move Tkinter back to the install directory: %s", err)
def install_step(self): """Install python but only keep the bits we need""" super(EB_Tkinter, self).install_step() tmpdir = tempfile.mkdtemp(dir=self.builddir) pylibdir = os.path.join(self.installdir, os.path.dirname(det_pylibdir())) shlib_ext = get_shared_lib_ext() tkinter_so = os.path.join(pylibdir, 'lib-dynload', '_tkinter*.' + shlib_ext) tkinter_so_hits = glob.glob(tkinter_so) if len(tkinter_so_hits) != 1: raise EasyBuildError( "Expected to find exactly one _tkinter*.so: %s", tkinter_so_hits) self.tkinter_so_basename = os.path.basename(tkinter_so_hits[0]) if LooseVersion(self.version) >= LooseVersion('3'): tkparts = [ "tkinter", os.path.join("lib-dynload", self.tkinter_so_basename) ] else: tkparts = [ "lib-tk", os.path.join("lib-dynload", self.tkinter_so_basename) ] copy([os.path.join(pylibdir, x) for x in tkparts], tmpdir) remove_dir(self.installdir) move_file(os.path.join(tmpdir, tkparts[0]), os.path.join(pylibdir, tkparts[0])) tkinter_so = os.path.basename(tkparts[1]) move_file(os.path.join(tmpdir, tkinter_so), os.path.join(pylibdir, tkinter_so))
def install_step(self): """Custom build, test & install procedure for Amber.""" # unset $LIBS since it breaks the build env.unset_env_vars(['LIBS']) # define environment variables for MPI, BLAS/LAPACK & dependencies mklroot = get_software_root('imkl') openblasroot = get_software_root('OpenBLAS') if mklroot: env.setvar('MKL_HOME', mklroot) elif openblasroot: lapack = os.getenv('LIBLAPACK') if lapack is None: raise EasyBuildError( "LIBLAPACK (from OpenBLAS) not found in environment.") else: env.setvar('GOTO', lapack) mpiroot = get_software_root(self.toolchain.MPI_MODULE_NAME[0]) if mpiroot and self.toolchain.options.get('usempi', None): env.setvar('MPI_HOME', mpiroot) self.with_mpi = True if self.toolchain.mpi_family() == toolchain.INTELMPI: self.mpi_option = '-intelmpi' else: self.mpi_option = '-mpi' common_configopts = [self.cfg['configopts'], '--no-updates'] if get_software_root('X11') is None: common_configopts.append('-noX11') if self.name == 'Amber' and self.cfg['static']: common_configopts.append('-static') netcdfroot = get_software_root('netCDF') if netcdfroot: common_configopts.extend(["--with-netcdf", netcdfroot]) netcdf_fort_root = get_software_root('netCDF-Fortran') if netcdf_fort_root: common_configopts.extend(["--with-netcdf-fort", netcdf_fort_root]) pythonroot = get_software_root('Python') if pythonroot: common_configopts.extend( ["--with-python", os.path.join(pythonroot, 'bin', 'python')]) self.pylibdir = det_pylibdir() pythonpath = os.environ.get('PYTHONPATH', '') env.setvar( 'PYTHONPATH', os.pathsep.join( [os.path.join(self.installdir, self.pylibdir), pythonpath])) comp_fam = self.toolchain.comp_family() if comp_fam == toolchain.INTELCOMP: comp_str = 'intel' elif comp_fam == toolchain.GCC: comp_str = 'gnu' else: raise EasyBuildError( "Don't know how to compile with compiler family '%s' -- check EasyBlock?", comp_fam) # The NAB compiles need openmp flag if self.toolchain.options.get('openmp', None): env.setvar('CUSTOMBUILDFLAGS', self.toolchain.get_flag('openmp')) # compose list of build targets build_targets = [('', 'test')] if self.with_mpi: build_targets.append((self.mpi_option, 'test.parallel')) # hardcode to 4 MPI processes, minimal required to run all tests env.setvar('DO_PARALLEL', 'mpirun -np 4') cudaroot = get_software_root('CUDA') if cudaroot: env.setvar('CUDA_HOME', cudaroot) self.with_cuda = True build_targets.append(('-cuda', 'test.cuda')) if self.with_mpi: build_targets.append( ("-cuda %s" % self.mpi_option, 'test.cuda_parallel')) ld_lib_path = os.environ.get('LD_LIBRARY_PATH', '') env.setvar( 'LD_LIBRARY_PATH', os.pathsep.join( [os.path.join(self.installdir, 'lib'), ld_lib_path])) for flag, testrule in build_targets: # configure cmd = "%s ./configure %s" % (self.cfg['preconfigopts'], ' '.join(common_configopts + [flag, comp_str])) (out, _) = run_cmd(cmd, log_all=True, simple=False) # build in situ using 'make install' # note: not 'build' super(EB_Amber, self).install_step() # test if self.cfg['runtest']: run_cmd("make %s" % testrule, log_all=True, simple=False) # clean, overruling the normal 'build' run_cmd("make clean")
def configure_step(self): """ Configure VMD for building. """ # make sure required dependencies are available deps = {} for dep in ['FLTK', 'Mesa', 'netCDF', 'Python', 'Tcl', 'Tk']: deps[dep] = get_software_root(dep) if deps[dep] is None: raise EasyBuildError("Required dependency %s is missing", dep) # optional dependencies for dep in ['ACTC', 'CUDA', 'OptiX']: deps[dep] = get_software_root(dep) # specify Tcl/Tk locations & libraries tclinc = os.path.join(deps['Tcl'], 'include') tcllib = os.path.join(deps['Tcl'], 'lib') env.setvar('TCL_INCLUDE_DIR', tclinc) env.setvar('TCL_LIBRARY_DIR', tcllib) env.setvar('TK_INCLUDE_DIR', os.path.join(deps['Tk'], 'include')) env.setvar('TK_LIBRARY_DIR', os.path.join(deps['Tk'], 'lib')) tclshortver = '.'.join(get_software_version('Tcl').split('.')[:2]) self.cfg.update('buildopts', 'TCLLDFLAGS="-ltcl%s"' % tclshortver) # Python locations pyshortver = '.'.join(get_software_version('Python').split('.')[:2]) env.setvar('PYTHON_INCLUDE_DIR', os.path.join(deps['Python'], 'include/python%s' % pyshortver)) pylibdir = det_pylibdir() python_libdir = os.path.join(deps['Python'], os.path.dirname(pylibdir)) env.setvar('PYTHON_LIBRARY_DIR', python_libdir) # numpy include location, easiest way to determine it is via numpy.get_include() out, ec = run_cmd("python -c 'import numpy; print numpy.get_include()'", simple=False) if ec: raise EasyBuildError("Failed to determine numpy include directory: %s", out) else: env.setvar('NUMPY_INCLUDE_DIR', out.strip()) # compiler commands self.cfg.update('buildopts', 'CC="%s"' % os.getenv('CC')) self.cfg.update('buildopts', 'CCPP="%s"' % os.getenv('CXX')) # source tarballs contains a 'plugins' and 'vmd-<version>' directory vmddir = os.path.join(self.cfg['start_dir'], '%s-%s' % (self.name.lower(), self.version)) # plugins need to be built first (see http://www.ks.uiuc.edu/Research/vmd/doxygen/compiling.html) change_dir(os.path.join(self.cfg['start_dir'], 'plugins')) cmd = "make LINUXAMD64 TCLLIB='-F%s' TCLINC='-I%s' %s" % (tcllib, tclinc, self.cfg['buildopts']) run_cmd(cmd, log_all=True, simple=False) # create plugins distribution plugindir = os.path.join(vmddir, 'plugins') env.setvar('PLUGINDIR', plugindir) self.log.info("Generating VMD plugins in %s", plugindir) run_cmd("make distrib %s" % self.cfg['buildopts'], log_all=True, simple=False) # explicitely mention whether or not we're building with CUDA/OptiX support if deps['CUDA']: self.log.info("Building with CUDA %s support", get_software_version('CUDA')) if deps['OptiX']: self.log.info("Building with Nvidia OptiX %s support", get_software_version('OptiX')) else: self.log.warn("Not building with Nvidia OptiX support!") else: self.log.warn("Not building with CUDA nor OptiX support!") # see http://www.ks.uiuc.edu/Research/vmd/doxygen/configure.html # LINUXAMD64: Linux 64-bit # LP64: build VMD as 64-bit binary # IMD: enable support for Interactive Molecular Dynamics (e.g. to connect to NAMD for remote simulations) # PTHREADS: enable support for POSIX threads # COLVARS: enable support for collective variables (related to NAMD/LAMMPS) # NOSILENT: verbose build command self.cfg.update('configopts', "LINUXAMD64 LP64 IMD PTHREADS COLVARS NOSILENT") # add additional configopts based on available dependencies for key in deps: if deps[key]: if key == 'Mesa': self.cfg.update('configopts', "OPENGL MESA") elif key == 'OptiX': self.cfg.update('configopts', "LIBOPTIX") elif key == 'Python': self.cfg.update('configopts', "PYTHON NUMPY") else: self.cfg.update('configopts', key.upper()) # configure for building with Intel compilers specifically if self.toolchain.comp_family() == toolchain.INTELCOMP: self.cfg.update('configopts', 'ICC') # specify install location using environment variables env.setvar('VMDINSTALLBINDIR', os.path.join(self.installdir, 'bin')) env.setvar('VMDINSTALLLIBRARYDIR', os.path.join(self.installdir, 'lib')) # configure in vmd-<version> directory change_dir(vmddir) run_cmd("%s ./configure %s" % (self.cfg['preconfigopts'], self.cfg['configopts'])) # change to 'src' subdirectory, ready for building change_dir(os.path.join(vmddir, 'src'))
def test_pythonpackage_det_pylibdir(self): """Test det_pylibdir function from pythonpackage.py.""" from easybuild.easyblocks.generic.pythonpackage import det_pylibdir for pylibdir in [det_pylibdir(), det_pylibdir(plat_specific=True), det_pylibdir(python_cmd=sys.executable)]: self.assertTrue(pylibdir.startswith('lib') and '/python' in pylibdir and pylibdir.endswith('site-packages'))
def install_step(self): """Custom install procedure for QScintilla.""" super(EB_QScintilla, self).install_step() # also install Python bindings if Python is included as a dependency python = get_software_root('Python') if python: pydir = os.path.join(self.cfg['start_dir'], 'Python') try: os.chdir(pydir) except OSError as err: raise EasyBuildError("Failed to change to %s: %s", pydir, err) # apparently this directory has to be there qsci_sipdir = os.path.join(self.installdir, 'share', 'sip', self.pyqt_pkg_name) mkdir(qsci_sipdir, parents=True) pylibdir = os.path.join(det_pylibdir(), self.pyqt_pkg_name) pyshortver = '.'.join( get_software_version('Python').split('.')[:2]) sip_incdir = find_glob_pattern( os.path.join(self.pyqt_root, 'include', 'python%s*' % pyshortver), False) # in case PyQt5's sip was installed in directories that are specific to each version of python # as could happen with multi_deps pyqt_sipdir = find_glob_pattern( os.path.join(self.pyqt_root, 'share', 'python%s*' % pyshortver, 'site-packages', 'sip', self.pyqt_pkg_name), False) # fall back to a single sipdir if not pyqt_sipdir: pyqt_sipdir = os.path.join(self.pyqt_root, 'share', 'sip', self.pyqt_pkg_name) cfgopts = [ '--destdir %s' % os.path.join(self.installdir, pylibdir), '--qsci-sipdir %s' % qsci_sipdir, '--qsci-incdir %s' % os.path.join(self.installdir, 'include'), '--qsci-libdir %s' % os.path.join(self.installdir, 'lib'), '--pyqt-sipdir %s' % pyqt_sipdir, '--apidir %s' % os.path.join(self.installdir, 'qsci', 'api', 'python'), '--no-stubs', ] if sip_incdir: cfgopts += ['--sip-incdir %s' % sip_incdir] if LooseVersion(self.version) >= LooseVersion('2.10.7'): cfgopts.append('--no-dist-info') # This flag was added in version 2.11 if LooseVersion(self.version) >= LooseVersion('2.11'): cfgopts.append("--pyqt=%s" % self.pyqt_pkg_name) run_cmd("python configure.py %s" % ' '.join(cfgopts)) super(EB_QScintilla, self).build_step() super(EB_QScintilla, self).install_step() target_dir = os.path.join(self.installdir, pylibdir) pyqt_pylibdir = os.path.join(self.pyqt_root, pylibdir) try: os.chdir(target_dir) for entry in [ x for x in os.listdir(pyqt_pylibdir) if not x.startswith('__init__.py') ]: symlink(os.path.join(pyqt_pylibdir, entry), os.path.join(target_dir, entry)) except OSError as err: raise EasyBuildError( "Failed to symlink PyQt Python bindings in %s: %s", target_dir, err) # also requires empty __init__.py file to ensure Python modules can be imported from this location write_file(os.path.join(target_dir, '__init__.py'), '')
def configure_step(self): """ Configure VMD for building. """ # make sure required dependencies are available deps = {} for dep in ['FLTK', 'Mesa', 'netCDF', 'Python', 'Tcl', 'Tk']: deps[dep] = get_software_root(dep) if deps[dep] is None: raise EasyBuildError("Required dependency %s is missing", dep) # optional dependencies for dep in ['ACTC', 'CUDA', 'OptiX']: deps[dep] = get_software_root(dep) # specify Tcl/Tk locations & libraries tclinc = os.path.join(deps['Tcl'], 'include') tcllib = os.path.join(deps['Tcl'], 'lib') env.setvar('TCL_INCLUDE_DIR', tclinc) env.setvar('TCL_LIBRARY_DIR', tcllib) env.setvar('TK_INCLUDE_DIR', os.path.join(deps['Tk'], 'include')) env.setvar('TK_LIBRARY_DIR', os.path.join(deps['Tk'], 'lib')) tclshortver = '.'.join(get_software_version('Tcl').split('.')[:2]) self.cfg.update('buildopts', 'TCLLDFLAGS="-ltcl%s"' % tclshortver) # Netcdf locations netcdfinc = os.path.join(deps['netCDF'], 'include') netcdflib = os.path.join(deps['netCDF'], 'lib') # Python locations pymajver = get_software_version('Python').split('.')[0] out, ec = run_cmd( "python -c 'import sysconfig; print(sysconfig.get_path(\"include\"))'", simple=False) if ec: raise EasyBuildError("Failed to determine Python include path: %s", out) else: env.setvar('PYTHON_INCLUDE_DIR', out.strip()) pylibdir = det_pylibdir() python_libdir = os.path.join(deps['Python'], os.path.dirname(pylibdir)) env.setvar('PYTHON_LIBRARY_DIR', python_libdir) out, ec = run_cmd("python%s-config --libs" % pymajver, simple=False) if ec: raise EasyBuildError("Failed to determine Python library name: %s", out) else: env.setvar('PYTHON_LIBRARIES', out.strip()) # numpy include location, easiest way to determine it is via numpy.get_include() out, ec = run_cmd( "python -c 'import numpy; print(numpy.get_include())'", simple=False) if ec: raise EasyBuildError( "Failed to determine numpy include directory: %s", out) else: env.setvar('NUMPY_INCLUDE_DIR', out.strip()) # compiler commands self.cfg.update('buildopts', 'CC="%s"' % os.getenv('CC')) self.cfg.update('buildopts', 'CCPP="%s"' % os.getenv('CXX')) # plugins need to be built first (see http://www.ks.uiuc.edu/Research/vmd/doxygen/compiling.html) change_dir(os.path.join(self.builddir, 'plugins')) cmd = ' '.join([ 'make', 'LINUXAMD64', "TCLINC='-I%s'" % tclinc, "TCLLIB='-L%s'" % tcllib, "TCLLDFLAGS='-ltcl%s'" % tclshortver, "NETCDFINC='-I%s'" % netcdfinc, "NETCDFLIB='-L%s'" % netcdflib, self.cfg['buildopts'], ]) run_cmd(cmd, log_all=True, simple=False) # create plugins distribution plugindir = os.path.join(self.vmddir, 'plugins') env.setvar('PLUGINDIR', plugindir) self.log.info("Generating VMD plugins in %s", plugindir) run_cmd("make distrib %s" % self.cfg['buildopts'], log_all=True, simple=False) # explicitely mention whether or not we're building with CUDA/OptiX support if deps['CUDA']: self.log.info("Building with CUDA %s support", get_software_version('CUDA')) if deps['OptiX']: self.log.info("Building with Nvidia OptiX %s support", get_software_version('OptiX')) else: self.log.warn("Not building with Nvidia OptiX support!") else: self.log.warn("Not building with CUDA nor OptiX support!") # see http://www.ks.uiuc.edu/Research/vmd/doxygen/configure.html # LINUXAMD64: Linux 64-bit # LP64: build VMD as 64-bit binary # IMD: enable support for Interactive Molecular Dynamics (e.g. to connect to NAMD for remote simulations) # PTHREADS: enable support for POSIX threads # COLVARS: enable support for collective variables (related to NAMD/LAMMPS) # NOSILENT: verbose build command # FLTK: enable the standard FLTK GUI # TK: enable TK to support extension GUI elements # OPENGL: enable OpenGL self.cfg.update( 'configopts', "LINUXAMD64 LP64 IMD PTHREADS COLVARS NOSILENT FLTK TK OPENGL", allow_duplicate=False) # add additional configopts based on available dependencies for key in deps: if deps[key]: if key == 'Mesa': self.cfg.update('configopts', "OPENGL MESA", allow_duplicate=False) elif key == 'OptiX': self.cfg.update('configopts', "LIBOPTIX", allow_duplicate=False) elif key == 'Python': self.cfg.update('configopts', "PYTHON NUMPY", allow_duplicate=False) else: self.cfg.update('configopts', key.upper(), allow_duplicate=False) # configure for building with Intel compilers specifically if self.toolchain.comp_family() == toolchain.INTELCOMP: self.cfg.update('configopts', 'ICC', allow_duplicate=False) # specify install location using environment variables env.setvar('VMDINSTALLBINDIR', os.path.join(self.installdir, 'bin')) env.setvar('VMDINSTALLLIBRARYDIR', os.path.join(self.installdir, 'lib')) # configure in vmd-<version> directory change_dir(self.vmddir) run_cmd("%s ./configure %s" % (self.cfg['preconfigopts'], self.cfg['configopts'])) # change to 'src' subdirectory, ready for building change_dir(os.path.join(self.vmddir, 'src'))
def install_step(self): """Build/install Xmipp using provided install.sh script.""" pylibdir = det_pylibdir() self.xmipp_pythonpaths = [ # location where Python packages will be installed by Xmipp installer pylibdir, 'protocols', os.path.join('libraries', 'bindings', 'python'), ] python_root = get_software_root('Python') if python_root: # extend $PYTHONPATH all_pythonpaths = [ os.path.join(self.installdir, p) for p in self.xmipp_pythonpaths ] # required so packages installed as extensions in Pythpn dep are picked up all_pythonpaths.append(os.path.join(python_root, pylibdir)) all_pythonpaths.append(os.environ.get('PYTHONPATH', '')) env.setvar('PYTHONPATH', os.pathsep.join(all_pythonpaths)) # location where Python packages will be installed by Xmipp installer must exist already (setuptools) mkdir(os.path.join(self.installdir, pylibdir), parents=True) # put dummy xmipp_python script in place if Python is used as a dependency bindir = os.path.join(self.installdir, 'bin') mkdir(bindir) xmipp_python = os.path.join(bindir, 'xmipp_python') xmipp_python_script_body = '\n'.join([ '#!/bin/sh', '%s/bin/python "$@"' % python_root, ]) write_file(xmipp_python, xmipp_python_script_body) adjust_permissions(xmipp_python, stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) pyshortver = '.'.join( get_software_version('Python').split('.')[:2]) # make sure Python.h and numpy header are found env.setvar( 'CPATH', os.pathsep.join([ os.path.join(python_root, 'include', 'python%s' % pyshortver), os.path.join(python_root, pylibdir, 'numpy', 'core', 'include'), os.environ.get('CPATH', ''), ])) cmd_opts = [] # disable (re)building of supplied dependencies dep_names = [dep['name'] for dep in self.cfg['dependencies']] for dep in [ 'FFTW', 'HDF5', ('libjpeg-turbo', 'jpeg'), ('LibTIFF', 'tiff'), 'matplotlib', 'Python', 'SQLite', 'Tcl', 'Tk' ]: if isinstance(dep, tuple): dep, opt = dep else: opt = dep.lower() # don't check via get_software_root, check listed dependencies directly (relevant for FFTW) if dep in dep_names: cmd_opts.append('--%s=false' % opt) # Python should also provide numpy/mpi4py if dep == 'Python': cmd_opts.extend(['--numpy=false', '--mpi4py=false']) if '--tcl=false' in cmd_opts and '--tk=false' in cmd_opts: cmd_opts.append('--tcl-tk=false') # patch install.sh script to inject configure options # setting $CONFIGURE_ARGS or using --configure-args doesn't work... for line in fileinput.input('install.sh', inplace=1, backup='.orig.eb'): line = re.sub(r"^CONFIGURE_ARGS.*$", 'CONFIGURE_ARGS="%s"' % self.cfg['configopts'], line) sys.stdout.write(line) cmd = './install.sh -j %s --unattended=true %s' % ( self.cfg['parallel'], ' '.join(cmd_opts)) out, _ = run_cmd(cmd, log_all=True, simple=False) if not re.search("Xmipp has been successfully compiled", out): raise EasyBuildError( "Xmipp installation did not complete successfully?")
def install_step(self): """Build/install Xmipp using provided install.sh script.""" pylibdir = det_pylibdir() self.xmipp_pythonpaths = [ # location where Python packages will be installed by Xmipp installer pylibdir, 'protocols', os.path.join('libraries', 'bindings', 'python'), ] python_root = get_software_root('Python') if python_root: # extend $PYTHONPATH all_pythonpaths = [os.path.join(self.installdir, p) for p in self.xmipp_pythonpaths] # required so packages installed as extensions in Pythpn dep are picked up all_pythonpaths.append(os.path.join(python_root, pylibdir)) all_pythonpaths.append(os.environ.get('PYTHONPATH', '')) env.setvar('PYTHONPATH', os.pathsep.join(all_pythonpaths)) # location where Python packages will be installed by Xmipp installer must exist already (setuptools) mkdir(os.path.join(self.installdir, pylibdir), parents=True) # put dummy xmipp_python script in place if Python is used as a dependency bindir = os.path.join(self.installdir, 'bin') mkdir(bindir) xmipp_python = os.path.join(bindir, 'xmipp_python') xmipp_python_script_body = '\n'.join([ '#!/bin/sh', '%s/bin/python "$@"' % python_root, ]) write_file(xmipp_python, xmipp_python_script_body) adjust_permissions(xmipp_python, stat.S_IXUSR|stat.S_IXGRP|stat.S_IXOTH) pyshortver = '.'.join(get_software_version('Python').split('.')[:2]) # make sure Python.h and numpy header are found env.setvar('CPATH', os.pathsep.join([ os.path.join(python_root, 'include', 'python%s' % pyshortver), os.path.join(python_root, pylibdir, 'numpy', 'core', 'include'), os.environ.get('CPATH', ''), ])) cmd_opts = [] # disable (re)building of supplied dependencies dep_names = [dep['name'] for dep in self.cfg['dependencies']] for dep in ['FFTW', 'HDF5', ('libjpeg-turbo', 'jpeg'), ('LibTIFF', 'tiff'), 'matplotlib', 'Python', 'SQLite', 'Tcl', 'Tk']: if isinstance(dep, tuple): dep, opt = dep else: opt = dep.lower() # don't check via get_software_root, check listed dependencies directly (relevant for FFTW) if dep in dep_names: cmd_opts.append('--%s=false' % opt) # Python should also provide numpy/mpi4py if dep == 'Python': cmd_opts.extend(['--numpy=false', '--mpi4py=false']) if '--tcl=false' in cmd_opts and '--tk=false' in cmd_opts: cmd_opts.append('--tcl-tk=false') # patch install.sh script to inject configure options # setting $CONFIGURE_ARGS or using --configure-args doesn't work... for line in fileinput.input('install.sh', inplace=1, backup='.orig.eb'): line = re.sub(r"^CONFIGURE_ARGS.*$", 'CONFIGURE_ARGS="%s"' % self.cfg['configopts'], line) sys.stdout.write(line) cmd = './install.sh -j %s --unattended=true %s' % (self.cfg['parallel'], ' '.join(cmd_opts)) out, _ = run_cmd(cmd, log_all=True, simple=False) if not re.search("Xmipp has been successfully compiled", out): raise EasyBuildError("Xmipp installation did not complete successfully?")
def sanity_check_step(self): """Custom sanity check for EasyBuild.""" # check whether easy-install.pth contains correct entries easy_install_pth = os.path.join(self.installdir, det_pylibdir(), 'easy-install.pth') if os.path.exists(easy_install_pth): easy_install_pth_txt = read_file(easy_install_pth) for pkg in self.easybuild_pkgs: if pkg == 'vsc-base': # don't include strict version check for vsc-base pkg_regex = re.compile(r"^\./%s" % pkg.replace('-', '_'), re.M) else: major_minor_version = '.'.join(self.version.split('.')[:2]) pkg_regex = re.compile(r"^\./%s-%s" % (pkg.replace('-', '_'), major_minor_version), re.M) if not pkg_regex.search(easy_install_pth_txt): raise EasyBuildError("Failed to find pattern '%s' in %s: %s", pkg_regex.pattern, easy_install_pth, easy_install_pth_txt) # list of dirs to check, by package # boolean indicates whether dir is expected to reside in Python lib/pythonX/site-packages dir subdirs_by_pkg = { 'easybuild-framework': [('easybuild/framework', True), ('easybuild/tools', True)], 'easybuild-easyblocks': [('easybuild/easyblocks', True)], 'easybuild-easyconfigs': [('easybuild/easyconfigs', False)], } if LooseVersion(self.version) >= LooseVersion('2.0'): subdirs_by_pkg.update({ 'vsc-base': [('vsc/utils', True)], }) # final list of directories to check, by setup tool # order matters, e.g. setuptools before distutils eb_dirs = OrderedDict() eb_dirs['setuptools'] = [] eb_dirs['distutils.core'] = flatten([x for x in subdirs_by_pkg.values()]) # determine setup tool (setuptools or distutils) setup_tool = None for tool in eb_dirs.keys(): self.log.debug("Trying %s.." % tool) try: exec "from %s import setup" % tool del setup setup_tool = tool break except ImportError: pass self.log.debug('setup_tool: %s' % setup_tool) # for a setuptools installation, we need to figure out the egg dirs since we don't know the individual package versions if setup_tool == 'setuptools': try: installed_dirs = os.listdir(os.path.join(self.installdir, self.pylibdir)) for (pkg, subdirs) in subdirs_by_pkg.items(): sel_dirs = [x for x in installed_dirs if x.startswith(pkg.replace('-', '_'))] if not len(sel_dirs) == 1: raise EasyBuildError("Failed to isolate installed egg dir for %s", pkg) for (subdir, _) in subdirs: # eggs always go in Python lib/pythonX/site-packages dir with setuptools eb_dirs['setuptools'].append((os.path.join(sel_dirs[0], subdir), True)) except OSError, err: raise EasyBuildError("Failed to determine sanity check dir paths: %s", err)
def configure_step(self): """Apply the necessary CMake config opts.""" if LooseVersion(self.version) < LooseVersion('19'): # Configuring Amber <19 is done in install step. return # CMake will search a previous install directory for Amber-compiled libs. We will therefore # manually remove the install directory prior to configuration. remove_dir(self.installdir) external_libs_list = [] mpiroot = get_software_root(self.toolchain.MPI_MODULE_NAME[0]) if mpiroot and self.toolchain.options.get('usempi', None): self.with_mpi = True self.cfg.update('configopts', '-DMPI=TRUE') if self.toolchain.options.get('openmp', None): self.cfg.update('configopts', '-DOPENMP=TRUE') cudaroot = get_software_root('CUDA') if cudaroot: self.with_cuda = True self.cfg.update('configopts', '-DCUDA=TRUE') if get_software_root('NCCL'): self.cfg.update('configopts', '-DNCCL=TRUE') external_libs_list.append('nccl') pythonroot = get_software_root('Python') if pythonroot: self.cfg.update('configopts', '-DDOWNLOAD_MINICONDA=FALSE') self.cfg.update( 'configopts', '-DPYTHON_EXECUTABLE=%s' % os.path.join(pythonroot, 'bin', 'python')) self.pylibdir = det_pylibdir() pythonpath = os.environ.get('PYTHONPATH', '') env.setvar( 'PYTHONPATH', os.pathsep.join( [os.path.join(self.installdir, self.pylibdir), pythonpath])) if get_software_root('FFTW'): external_libs_list.append('fftw') if get_software_root('netCDF'): external_libs_list.append('netcdf') if get_software_root('netCDF-Fortran'): external_libs_list.append('netcdf-fortran') if get_software_root('zlib'): external_libs_list.append('zlib') if get_software_root('Boost'): external_libs_list.append('boost') if get_software_root('PnetCDF'): external_libs_list.append('pnetcdf') # Force libs for available deps (see cmake/3rdPartyTools.cmake in Amber source for list of 3rd party libs) # This provides an extra layer of checking but should already be handled by TRUST_SYSTEM_LIBS=TRUE external_libs = ";".join(external_libs_list) self.cfg.update('configopts', "-DFORCE_EXTERNAL_LIBS='%s'" % external_libs) if get_software_root('FFTW') or get_software_root('imkl'): self.cfg.update('configopts', '-DUSE_FFT=TRUE') # Set standard compile options self.cfg.update('configopts', '-DCHECK_UPDATES=FALSE') self.cfg.update('configopts', '-DAPPLY_UPDATES=FALSE') self.cfg.update('configopts', '-DTRUST_SYSTEM_LIBS=TRUE') self.cfg.update('configopts', '-DCOLOR_CMAKE_MESSAGES=FALSE') # Amber recommend running the tests from the sources, rather than putting in installation dir # due to size. We handle tests under the install step self.cfg.update('configopts', '-DINSTALL_TESTS=FALSE') self.cfg.update('configopts', '-DCOMPILER=AUTO') # configure using cmake super(EB_Amber, self).configure_step()
def install_step(self): """Custom build, test & install procedure for Amber.""" # unset $LIBS since it breaks the build env.unset_env_vars(['LIBS']) # define environment variables for MPI, BLAS/LAPACK & dependencies mklroot = get_software_root('imkl') openblasroot = get_software_root('OpenBLAS') if mklroot: env.setvar('MKL_HOME', mklroot) elif openblasroot: lapack = os.getenv('LIBLAPACK') if lapack is None: raise EasyBuildError("LIBLAPACK (from OpenBLAS) not found in environment.") else: env.setvar('GOTO', lapack) mpiroot = get_software_root(self.toolchain.MPI_MODULE_NAME[0]) if mpiroot and self.toolchain.options.get('usempi', None): env.setvar('MPI_HOME', mpiroot) self.with_mpi = True if self.toolchain.mpi_family() == toolchain.INTELMPI: self.mpi_option = '-intelmpi' else: self.mpi_option = '-mpi' common_configopts = [self.cfg['configopts'], '--no-updates'] if get_software_root('X11') is None: common_configopts.append('-noX11') if self.name == 'Amber' and self.cfg['static']: common_configopts.append('-static') netcdfroot = get_software_root('netCDF') if netcdfroot: common_configopts.extend(["--with-netcdf", netcdfroot]) netcdf_fort_root = get_software_root('netCDF-Fortran') if netcdf_fort_root: common_configopts.extend(["--with-netcdf-fort", netcdf_fort_root]) pythonroot = get_software_root('Python') if pythonroot: common_configopts.extend(["--with-python", os.path.join(pythonroot, 'bin', 'python')]) self.pylibdir = det_pylibdir() pythonpath = os.environ.get('PYTHONPATH', '') env.setvar('PYTHONPATH', os.pathsep.join([os.path.join(self.installdir, self.pylibdir), pythonpath])) comp_fam = self.toolchain.comp_family() if comp_fam == toolchain.INTELCOMP: comp_str = 'intel' elif comp_fam == toolchain.GCC: comp_str = 'gnu' else: raise EasyBuildError("Don't know how to compile with compiler family '%s' -- check EasyBlock?", comp_fam) # The NAB compiles need openmp flag if self.toolchain.options.get('openmp', None): env.setvar('CUSTOMBUILDFLAGS', self.toolchain.get_flag('openmp')) # compose list of build targets build_targets = [('', 'test')] if self.with_mpi: build_targets.append((self.mpi_option, 'test.parallel')) # hardcode to 4 MPI processes, minimal required to run all tests env.setvar('DO_PARALLEL', 'mpirun -np 4') cudaroot = get_software_root('CUDA') if cudaroot: env.setvar('CUDA_HOME', cudaroot) self.with_cuda = True build_targets.append(('-cuda', 'test.cuda')) if self.with_mpi: build_targets.append(("-cuda %s" % self.mpi_option, 'test.cuda_parallel')) ld_lib_path = os.environ.get('LD_LIBRARY_PATH', '') env.setvar('LD_LIBRARY_PATH', os.pathsep.join([os.path.join(self.installdir, 'lib'), ld_lib_path])) for flag, testrule in build_targets: # configure cmd = "%s ./configure %s" % (self.cfg['preconfigopts'], ' '.join(common_configopts + [flag, comp_str])) (out, _) = run_cmd(cmd, log_all=True, simple=False) # build in situ using 'make install' # note: not 'build' super(EB_Amber, self).install_step() # test if self.cfg['runtest']: run_cmd("make %s" % testrule, log_all=True, simple=False) # clean, overruling the normal 'build' run_cmd("make clean")