def install_step(self):
        """Extend make install to make sure that the 'python' command is present."""
        super(EB_Python, self).install_step()

        python_binary_path = os.path.join(self.installdir, 'bin', 'python')
        if not os.path.isfile(python_binary_path):
            symlink(python_binary_path + self.pyshortver, python_binary_path)
示例#2
0
    def extract_step(self):
        """
        Prepare a combined MXNet source tree. Move all submodules
        to their right place.
        """
        # Extract everything into separate directories.
        super(EB_MXNet, self).extract_step()

        mxnet_dirs = glob.glob(os.path.join(self.builddir, '*mxnet-*'))
        if len(mxnet_dirs) == 1:
            self.mxnet_src_dir = mxnet_dirs[0]
            self.log.debug("MXNet dir is: %s", self.mxnet_src_dir)
        else:
            raise EasyBuildError("Failed to find/isolate MXNet source directory: %s", mxnet_dirs)

        for srcdir in [d for d in os.listdir(self.builddir) if d != os.path.basename(self.mxnet_src_dir)]:
            submodule, _, _ = srcdir.rpartition('-')
            newdir = os.path.join(self.mxnet_src_dir, submodule)
            olddir = os.path.join(self.builddir, srcdir)
            # first remove empty existing directory
            rmtree2(newdir)
            try:
                shutil.move(olddir, newdir)
            except IOError as err:
                raise EasyBuildError("Failed to move %s to %s: %s", olddir, newdir, err)

        # the nnvm submodules has dmlc-core as a submodule too. Let's put a symlink in place.
        newdir = os.path.join(self.mxnet_src_dir, "nnvm", "dmlc-core")
        olddir = os.path.join(self.mxnet_src_dir, "dmlc-core")
        rmtree2(newdir)
        symlink(olddir, newdir)
示例#3
0
    def install_step(self):
        """Install using chimera.bin."""

        try:
            os.chdir(self.cfg['start_dir'])
        except OSError as err:
            raise EasyBuildError("Failed to change to %s: %s", self.cfg['start_dir'], err)

        # Chimera comes bundled with its dependencies, and follows a
        # UNIX file system layout with 'bin', 'include', 'lib', etc.  To
        # avoid conflicts with other modules, the Chimera module must
        # not add the 'bin', 'include', 'lib', etc. directories to PATH,
        # CPATH, LD_LIBRARY_PATH, etc.  We achieve this by installing
        # Chimera in a subdirectory (called 'chimera') instead of the
        # root directory.
        cmd = "./chimera.bin -q -d %s" % os.path.join(self.installdir,
                                                      'chimera')
        run_cmd(cmd, log_all=True, simple=True)

        # Create a symlink to the Chimera startup script; this symlink
        # will end up in PATH.  The startup script sets up the
        # environment, so that Chimera finds its dependencies.
        mkdir(os.path.join(self.installdir, 'bin'))
        symlink(os.path.join(self.installdir, 'chimera', 'bin', 'chimera'),
                os.path.join(self.installdir, 'bin', 'chimera'))
示例#4
0
def include_module_naming_schemes(tmpdir, paths):
    """Include module naming schemes at specified locations."""
    mns_path = os.path.join(tmpdir, 'included-module-naming-schemes')

    set_up_eb_package(mns_path, 'easybuild.tools.module_naming_scheme')

    mns_dir = os.path.join(mns_path, 'easybuild', 'tools', 'module_naming_scheme')

    allpaths = [p for p in expand_glob_paths(paths) if os.path.basename(p) != '__init__.py']
    for mns_module in allpaths:
        filename = os.path.basename(mns_module)
        target_path = os.path.join(mns_dir, filename)
        symlink(mns_module, target_path)

    included_mns = [x for x in os.listdir(mns_dir) if x not in ['__init__.py']]
    _log.debug("Included module naming schemes: %s", included_mns)

    # inject path into Python search path, and reload modules to get it 'registered' in sys.modules
    sys.path.insert(0, mns_path)

    # hard inject location to included module naming schemes into Python search path
    # only prepending to sys.path is not enough due to 'declare_namespace' in module_naming_scheme/__init__.py
    new_path = os.path.join(mns_path, 'easybuild', 'tools', 'module_naming_scheme')
    easybuild.tools.module_naming_scheme.__path__.insert(0, new_path)

    # sanity check: verify that included module naming schemes can be imported (from expected location)
    verify_imports([os.path.splitext(mns)[0] for mns in included_mns], 'easybuild.tools.module_naming_scheme', mns_dir)

    return mns_path
示例#5
0
    def extensions_step(self):
        """Build & Install both Python and R extension"""
        # we start with the python bindings
        self.py_ext.src = os.path.join(self.mxnet_src_dir, "python")
        change_dir(self.py_ext.src)

        self.py_ext.prerun()
        self.py_ext.run(unpack_src=False)
        self.py_ext.postrun()

        # next up, the R bindings
        self.r_ext.src = os.path.join(self.mxnet_src_dir, "R-package")
        change_dir(self.r_ext.src)
        mkdir("inst")
        symlink(os.path.join(self.installdir, "lib"), os.path.join("inst", "libs"))
        symlink(os.path.join(self.installdir, "include"), os.path.join("inst", "include"))

        # MXNet doesn't provide a list of its R dependencies by default
        write_file("NAMESPACE", R_NAMESPACE)
        change_dir(self.mxnet_src_dir)
        self.r_ext.prerun()
        # MXNet is just weird. To install the R extension, we have to:
        # - First install the extension like it is
        # - Let R export the extension again. By doing this, all the dependencies get
        #   correctly filled and some mappings are done
        # - Reinstal the exported version
        self.r_ext.run()
        run_cmd("R_LIBS=%s Rscript -e \"require(mxnet); mxnet:::mxnet.export(\\\"R-package\\\")\"" % self.installdir)
        change_dir(self.r_ext.src)
        self.r_ext.run()
        self.r_ext.postrun()
示例#6
0
    def install_step(self):
        """Extend make install to make sure that the 'python' command is present."""
        super(EB_Python, self).install_step()

        python_binary_path = os.path.join(self.installdir, 'bin', 'python')
        if not os.path.isfile(python_binary_path):
            pyver = '.'.join(self.version.split('.')[:2])
            symlink(python_binary_path + pyver, python_binary_path)
示例#7
0
    def install_step(self):
        """Extend make install to make sure that the 'python' command is present."""
        super(EB_Python, self).install_step()

        python_binary_path = os.path.join(self.installdir, 'bin', 'python')
        if not os.path.isfile(python_binary_path):
            symlink(python_binary_path + self.pyshortver, python_binary_path)

        if self.cfg['ebpythonprefixes']:
            write_file(os.path.join(self.installdir, self.pythonpath, 'sitecustomize.py'), SITECUSTOMIZE)
示例#8
0
def include_toolchains(tmpdir, paths):
    """Include toolchains and toolchain components at specified locations."""
    toolchains_path = os.path.join(tmpdir, 'included-toolchains')
    toolchain_subpkgs = ['compiler', 'fft', 'linalg', 'mpi']

    set_up_eb_package(toolchains_path, 'easybuild.toolchains', subpkgs=toolchain_subpkgs)

    tcs_dir = os.path.join(toolchains_path, 'easybuild', 'toolchains')

    allpaths = [p for p in expand_glob_paths(paths) if os.path.basename(p) != '__init__.py']
    for toolchain_module in allpaths:
        filename = os.path.basename(toolchain_module)

        parent_dir = os.path.basename(os.path.dirname(toolchain_module))

        # toolchain components are expected to be in a directory named according to the type of component
        if parent_dir in toolchain_subpkgs:
            target_path = os.path.join(tcs_dir, parent_dir, filename)
        else:
            target_path = os.path.join(tcs_dir, filename)

        if not os.path.exists(target_path):
            symlink(toolchain_module, target_path)

    included_toolchains = [x for x in os.listdir(tcs_dir) if x not in ['__init__.py'] + toolchain_subpkgs]
    _log.debug("Included toolchains: %s", included_toolchains)

    included_subpkg_modules = {}
    for subpkg in toolchain_subpkgs:
        included_subpkg_modules[subpkg] = [x for x in os.listdir(os.path.join(tcs_dir, subpkg)) if x != '__init__.py']
        _log.debug("Included toolchain %s components: %s", subpkg, included_subpkg_modules[subpkg])

    # inject path into Python search path, and reload modules to get it 'registered' in sys.modules
    sys.path.insert(0, toolchains_path)

    # reload toolchain modules and hard inject location to included toolchains into Python search path
    # only prepending to sys.path is not enough due to 'declare_namespace' in toolchains/*/__init__.py
    easybuild.toolchains.__path__.insert(0, os.path.join(toolchains_path, 'easybuild', 'toolchains'))
    for subpkg in toolchain_subpkgs:
        tcpkg = 'easybuild.toolchains.%s' % subpkg
        sys.modules[tcpkg].__path__.insert(0, os.path.join(toolchains_path, 'easybuild', 'toolchains', subpkg))

    # sanity check: verify that included toolchain modules can be imported (from expected location)
    verify_imports([os.path.splitext(mns)[0] for mns in included_toolchains], 'easybuild.toolchains', tcs_dir)
    for subpkg in toolchain_subpkgs:
        pkg = '.'.join(['easybuild', 'toolchains', subpkg])
        loc = os.path.join(tcs_dir, subpkg)
        verify_imports([os.path.splitext(tcmod)[0] for tcmod in included_subpkg_modules[subpkg]], pkg, loc)

    return toolchains_path
    def install_step(self):
        """Custom install procedure for QScintilla."""
        
        super(EB_QScintilla, self).install_step()

        # also install Python bindings if Python is included as a dependency
        python = get_software_root('Python')
        if python:
            pydir = os.path.join(self.cfg['start_dir'], 'Python')
            try:
                os.chdir(pydir)
            except OSError as err:
                raise EasyBuildError("Failed to change to %s: %s", pydir, err)

            # apparently this directory has to be there
            qsci_sipdir = os.path.join(self.installdir, 'share', 'sip', 'PyQt4')
            mkdir(qsci_sipdir, parents=True)

            pylibdir = os.path.join(det_pylibdir(), 'PyQt4')

            pyqt = get_software_root('PyQt')
            if pyqt is None:
                raise EasyBuildError("Failed to determine PyQt installation prefix, PyQt not included as dependency?")

            cfgopts = [
                '--destdir %s' % os.path.join(self.installdir, pylibdir),
                '--qsci-sipdir %s' % qsci_sipdir,
                '--qsci-incdir %s' % os.path.join(self.installdir, 'include'),
                '--qsci-libdir %s' % os.path.join(self.installdir, 'lib'),
                '--pyqt-sipdir %s' % os.path.join(pyqt, 'share', 'sip', 'PyQt4'),
                '--apidir %s' % os.path.join(self.installdir, 'qsci', 'api', 'python'),
                '--no-stubs',
            ]
            run_cmd("python configure.py %s" % ' '.join(cfgopts))

            super(EB_QScintilla, self).build_step()
            super(EB_QScintilla, self).install_step()

            target_dir = os.path.join(self.installdir, pylibdir)
            pyqt_pylibdir = os.path.join(pyqt, pylibdir)
            try:
                os.chdir(target_dir)
                for entry in [x for x in os.listdir(pyqt_pylibdir) if not x.startswith('__init__.py')]:
                    symlink(os.path.join(pyqt_pylibdir, entry), os.path.join(target_dir, entry))
            except OSError as err:
                raise EasyBuildError("Failed to symlink PyQt Python bindings in %s: %s", target_dir, err)

            # also requires empty __init__.py file to ensure Python modules can be imported from this location
            write_file(os.path.join(target_dir, '__init__.py'), '')
示例#10
0
    def install_step(self):
        """
        Install using make install (for non-source installations),
        or by symlinking files (old versions, < 3).
        """
        if LooseVersion(self.version) >= LooseVersion("3"):
            if not self.cfg['sourceinstall']:
                super(EB_PETSc, self).install_step()

        else:  # old versions (< 3.x)

            for fn in ['petscconf.h', 'petscconfiginfo.h', 'petscfix.h', 'petscmachineinfo.h']:
                includedir = os.path.join(self.installdir, 'include')
                bmakedir = os.path.join(self.installdir, 'bmake', 'linux-gnu-c-opt')
                symlink(os.path.join(bmakedir, fn), os.path.join(includedir, fn))
    def install_step(self):
        """Custom install procedure for MRtrix."""
        if LooseVersion(self.version) < LooseVersion('0.3'):
            cmd = "python build -verbose install=%s linkto=" % self.installdir
            run_cmd(cmd, log_all=True, simple=True, log_ok=True)

        elif LooseVersion(self.version) >= LooseVersion('3.0'):
            copy(os.path.join(self.builddir, 'bin'), self.installdir)
            copy(os.path.join(self.builddir, 'lib'), self.installdir)

        elif LooseVersion(self.version) >= LooseVersion('0.3.14'):
            copy(glob.glob(os.path.join(self.builddir, 'release', '*')), self.installdir)
            copy(os.path.join(self.builddir, 'scripts'), self.installdir)
            # some scripts expect 'release/bin' to be there, so we put a symlink in place
            symlink(self.installdir, os.path.join(self.installdir, 'release'))
示例#12
0
def include_easyblocks(tmpdir, paths):
    """Include generic and software-specific easyblocks found in specified locations."""
    easyblocks_path = os.path.join(tmpdir, 'included-easyblocks')

    set_up_eb_package(easyblocks_path, 'easybuild.easyblocks',
                      subpkgs=['generic'], pkg_init_body=EASYBLOCKS_PKG_INIT_BODY)

    easyblocks_dir = os.path.join(easyblocks_path, 'easybuild', 'easyblocks')

    allpaths = [p for p in expand_glob_paths(paths) if os.path.basename(p) != '__init__.py']
    for easyblock_module in allpaths:
        filename = os.path.basename(easyblock_module)

        # generic easyblocks are expected to be in a directory named 'generic'
        parent_dir = os.path.basename(os.path.dirname(easyblock_module))
        if parent_dir == 'generic':
            target_path = os.path.join(easyblocks_dir, 'generic', filename)
        else:
            target_path = os.path.join(easyblocks_dir, filename)

        symlink(easyblock_module, target_path)

    included_ebs = [x for x in os.listdir(easyblocks_dir) if x not in ['__init__.py', 'generic']]
    included_generic_ebs = [x for x in os.listdir(os.path.join(easyblocks_dir, 'generic')) if x != '__init__.py']
    _log.debug("Included generic easyblocks: %s", included_generic_ebs)
    _log.debug("Included software-specific easyblocks: %s", included_ebs)

    # prepend new location to Python search path
    sys.path.insert(0, easyblocks_path)

    # make sure easybuild.easyblocks(.generic)
    import easybuild.easyblocks
    import easybuild.easyblocks.generic

    # hard inject location to included (generic) easyblocks into Python search path
    # only prepending to sys.path is not enough due to 'declare_namespace' in easybuild/easyblocks/__init__.py
    new_path = os.path.join(easyblocks_path, 'easybuild', 'easyblocks')
    easybuild.easyblocks.__path__.insert(0, new_path)
    new_path = os.path.join(new_path, 'generic')
    easybuild.easyblocks.generic.__path__.insert(0, new_path)

    # sanity check: verify that included easyblocks can be imported (from expected location)
    for subdir, ebs in [('', included_ebs), ('generic', included_generic_ebs)]:
        pkg = '.'.join(['easybuild', 'easyblocks', subdir]).strip('.')
        loc = os.path.join(easyblocks_dir, subdir)
        verify_imports([os.path.splitext(eb)[0] for eb in ebs], pkg, loc)

    return easyblocks_path
示例#13
0
    def install_step(self):
        """Custom install procedure for MRtrix."""
        if LooseVersion(self.version) < LooseVersion('0.3'):
            cmd = "python build -verbose install=%s linkto=" % self.installdir
            run_cmd(cmd, log_all=True, simple=True, log_ok=True)

        elif LooseVersion(self.version) >= LooseVersion('3.0'):
            copy(os.path.join(self.builddir, 'bin'), self.installdir)
            copy(os.path.join(self.builddir, 'lib'), self.installdir)

        elif LooseVersion(self.version) >= LooseVersion('0.3.14'):
            copy(glob.glob(os.path.join(self.builddir, 'release', '*')),
                 self.installdir)
            copy(os.path.join(self.builddir, 'scripts'), self.installdir)
            # some scripts expect 'release/bin' to be there, so we put a symlink in place
            symlink(self.installdir, os.path.join(self.installdir, 'release'))
示例#14
0
    def install_step(self):
        """Installation of OpenSSL and SSL certificates"""
        super(EB_OpenSSL, self).install_step()

        # SSL certificates
        # OPENSSLDIR is already populated by the installation of OpenSSL
        # try to symlink system certificates in the empty 'certs' directory
        openssl_certs_dir = os.path.join(self.installdir, 'ssl', 'certs')

        if self.ssl_certs_dir:
            remove_dir(openssl_certs_dir)
            symlink(self.ssl_certs_dir, openssl_certs_dir)
        else:
            print_warning(
                "OpenSSL successfully installed without system SSL certificates. "
                "Some packages might experience limited functionality.")
示例#15
0
class EB_Chimera(PackedBinary):
    """Support for installing Chimera."""
    def extract_step(self, verbose=False):
        """Custom extraction of sources for Chimera: unpack installation file
        to obtain chimera.bin installer."""

        cmd = "unzip -d %s %s" % (self.builddir, self.src[0]['path'])
        run_cmd(cmd, log_all=True, simple=True)

    def install_step(self):
        """Install using chimera.bin."""

        try:
            os.chdir(self.cfg['start_dir'])
        except OSError, err:
            raise EasyBuildError("Failed to change to %s: %s",
                                 self.cfg['start_dir'], err)

        # Chimera comes bundled with its dependencies, and follows a
        # UNIX file system layout with 'bin', 'include', 'lib', etc.  To
        # avoid conflicts with other modules, the Chimera module must
        # not add the 'bin', 'include', 'lib', etc. directories to PATH,
        # CPATH, LD_LIBRARY_PATH, etc.  We achieve this by installing
        # Chimera in a subdirectory (called 'chimera') instead of the
        # root directory.
        cmd = "./chimera.bin -q -d %s" % os.path.join(self.installdir,
                                                      'chimera')
        run_cmd(cmd, log_all=True, simple=True)

        # Create a symlink to the Chimera startup script; this symlink
        # will end up in PATH.  The startup script sets up the
        # environment, so that Chimera finds its dependencies.
        mkdir(os.path.join(self.installdir, 'bin'))
        symlink(os.path.join(self.installdir, 'chimera', 'bin', 'chimera'),
                os.path.join(self.installdir, 'bin', 'chimera'))
示例#16
0
    def install_step(self):
        """Custom install procedure for wxPython."""
        # wxPython configure, build, and install with one script
        preinst_opts = self.cfg['preinstallopts']
        INSTALL_CMD = "%(preinst_opts)s %(pycmd)s %(script)s --prefix=%(prefix)s"
        if LooseVersion(self.version) >= LooseVersion("4"):
            script = 'build.py'
            cmd = INSTALL_CMD % {
                'preinst_opts': preinst_opts,
                'pycmd': self.python_cmd,
                'script': script,
                'prefix': self.installdir,
            }
            # install fails and attempts to install in the python module. building the wheel, and then installing it
            cmd = cmd + " %s -v bdist_wheel" % self.wxflag
            run_cmd(cmd, log_all=True, simple=True)

            # get whether it is 35, 36, 37, 38, etc.
            pyver = det_python_version(self.python_cmd)
            pyver = pyver[0] + pyver[2]

            cmd = "pip install --no-deps --prefix=%(prefix)s dist/wxPython-%(version)s-cp%(pyver)s*.whl" % {
                'prefix': self.installdir,
                'version': self.version,
                'pyver': pyver
            }
        else:
            script = os.path.join('wxPython', 'build-wxpython.py')
            cmd = INSTALL_CMD % {
                'preinst_opts': preinst_opts,
                'pycmd': self.python_cmd,
                'script': script,
                'prefix': self.installdir,
            }
            cmd = cmd + " --wxpy_installdir=%s --install" % self.installdir

        run_cmd(cmd, log_all=True, simple=True)

        # add symbolic links for libwx_*so.* files
        # (which are created automatically by 'build.py install', but not by 'pip install *.whl')
        if LooseVersion(self.version) >= LooseVersion("4"):
            wx_lib_dir = os.path.join(self.installdir, self.pylibdir, 'wx')
            cwd = change_dir(wx_lib_dir)
            lib_so_files = glob.glob('libwx*.so.*')
            for lib_so_file in lib_so_files:
                symlink(lib_so_file, lib_so_file[:-2])
            change_dir(cwd)
示例#17
0
def include_easyblocks(tmpdir, paths):
    """Include generic and software-specific easyblocks found in specified locations."""
    easyblocks_path = os.path.join(tmpdir, 'included-easyblocks')

    set_up_eb_package(easyblocks_path, 'easybuild.easyblocks',
                      subpkgs=['generic'], pkg_init_body=EASYBLOCKS_PKG_INIT_BODY)

    easyblocks_dir = os.path.join(easyblocks_path, 'easybuild', 'easyblocks')

    allpaths = [p for p in expand_glob_paths(paths) if os.path.basename(p) != '__init__.py']
    for easyblock_module in allpaths:
        filename = os.path.basename(easyblock_module)

        if is_software_specific_easyblock(easyblock_module):
            target_path = os.path.join(easyblocks_dir, filename)
        else:
            target_path = os.path.join(easyblocks_dir, 'generic', filename)

        if not os.path.exists(target_path):
            symlink(easyblock_module, target_path)

    included_ebs = [x for x in os.listdir(easyblocks_dir) if x not in ['__init__.py', 'generic']]
    included_generic_ebs = [x for x in os.listdir(os.path.join(easyblocks_dir, 'generic')) if x != '__init__.py']
    _log.debug("Included generic easyblocks: %s", included_generic_ebs)
    _log.debug("Included software-specific easyblocks: %s", included_ebs)

    # prepend new location to Python search path
    sys.path.insert(0, easyblocks_path)

    # make sure easybuild.easyblocks(.generic)
    import easybuild.easyblocks
    import easybuild.easyblocks.generic

    # hard inject location to included (generic) easyblocks into Python search path
    # only prepending to sys.path is not enough due to 'pkgutil.extend_path' in easybuild/easyblocks/__init__.py
    new_path = os.path.join(easyblocks_path, 'easybuild', 'easyblocks')
    easybuild.easyblocks.__path__.insert(0, new_path)
    new_path = os.path.join(new_path, 'generic')
    easybuild.easyblocks.generic.__path__.insert(0, new_path)

    # sanity check: verify that included easyblocks can be imported (from expected location)
    for subdir, ebs in [('', included_ebs), ('generic', included_generic_ebs)]:
        pkg = '.'.join(['easybuild', 'easyblocks', subdir]).strip('.')
        loc = os.path.join(easyblocks_dir, subdir)
        verify_imports([os.path.splitext(eb)[0] for eb in ebs], pkg, loc)

    return easyblocks_path
示例#18
0
    def post_install_step(self, *args, **kwargs):
        """
        Post-processing after installation: add symlinks for cc, c++, f77, f95
        """
        super(EB_GCC, self).post_install_step(*args, **kwargs)

        bindir = os.path.join(self.installdir, 'bin')
        for key in COMP_CMD_SYMLINKS:
            src = COMP_CMD_SYMLINKS[key]
            target = os.path.join(bindir, key)
            if os.path.exists(target):
                self.log.info("'%s' already exists in %s, not replacing it with symlink to '%s'",
                              key, bindir, src)
            elif os.path.exists(os.path.join(bindir, src)):
                symlink(src, target, use_abspath_source=False)
            else:
                raise EasyBuildError("Can't link '%s' to non-existing location %s", target, os.path.join(bindir, src))
示例#19
0
 def extract_step(self):
     """Extract Xmipp sources."""
     if LooseVersion(self.version) < LooseVersion('3.20.07'):
         # Xmipp < 3.20.07 assumes that everything is unpacked in a "src" dir
         # Xmipp >= 3.20.07 assumes that everything is unpacked in the "src" dir of Xmipp itself
         mkdir(self.srcdir)
         self.cfg.update('unpack_options',
                         '--directory %s' % os.path.basename(self.srcdir))
     super(EB_Xmipp, self).extract_step()
     for module in self.xmipp_modules:
         if LooseVersion(self.version) >= LooseVersion(
                 '3.20.07') and module == 'xmipp':
             pass
         else:
             symlink('%s-%s' % (module, self.version),
                     os.path.join(self.srcdir, module),
                     use_abspath_source=False)
示例#20
0
    def post_install_step(self, *args, **kwargs):
        """
        Post-processing after installation: add symlinks for cc, c++, f77, f95
        """
        super(EB_GCC, self).post_install_step(*args, **kwargs)

        bindir = os.path.join(self.installdir, 'bin')
        for key in COMP_CMD_SYMLINKS:
            src = COMP_CMD_SYMLINKS[key]
            target = os.path.join(bindir, key)
            if os.path.exists(target):
                self.log.info("'%s' already exists in %s, not replacing it with symlink to '%s'",
                              key, bindir, src)
            elif os.path.exists(os.path.join(bindir, src)):
                symlink(src, target, use_abspath_source=False)
            else:
                raise EasyBuildError("Can't link '%s' to non-existing location %s", target, os.path.join(bindir, src))
    def set_as_default(self, module_folder_path, module_version):
        """
        Create a symlink named 'default' inside the package's module folder in order to set the default module version

        :param module_folder_path: module folder path, e.g. $HOME/easybuild/modules/all/Bison
        :param module_version: module version, e.g. 3.0.4
        """
        default_filepath = os.path.join(module_folder_path, 'default')

        if os.path.islink(default_filepath):
            link_target = resolve_path(default_filepath)
            remove_file(default_filepath)
            self.log.info("Removed default version marking from %s.", link_target)
        elif os.path.exists(default_filepath):
            raise EasyBuildError('Found an unexpected file named default in dir %s' % module_folder_path)

        symlink(module_version + self.MODULE_FILE_EXTENSION, default_filepath, use_abspath_source=False)
        self.log.info("Module default version file written to point to %s", default_filepath)
示例#22
0
    def install_step(self):
        """Create symlinks into arch-specific directories"""

        if self.cfg['parallel']:
            self.cfg.update('installopts', '-j %s' % self.cfg['parallel'])

        super(EB_PDT, self).install_step()

        # Link arch-specific directories into prefix
        arch_dir = find_arch_dir(self.installdir)
        self.log.info('Found %s as architecture specific directory. Creating symlinks...', arch_dir)
        for subdir in ('bin', 'lib'):
            src = os.path.join(arch_dir, subdir)
            dst = os.path.join(self.installdir, subdir)
            if os.path.lexists(dst):
                self.log.info('Skipping creation of symlink %s as it already exists', dst)
            else:
                symlink(os.path.relpath(src, self.installdir), dst, use_abspath_source=False)
    def set_as_default(self, module_folder_path, module_version):
        """
        Create a symlink named 'default' inside the package's module folder in order to set the default module version

        :param module_folder_path: module folder path, e.g. $HOME/easybuild/modules/all/Bison
        :param module_version: module version, e.g. 3.0.4
        """
        default_filepath = os.path.join(module_folder_path, 'default')

        if os.path.islink(default_filepath):
            link_target = resolve_path(default_filepath)
            remove_file(default_filepath)
            self.log.info("Removed default version marking from %s.", link_target)
        elif os.path.exists(default_filepath):
            raise EasyBuildError('Found an unexpected file named default in dir %s' % module_folder_path)

        symlink(module_version + self.MODULE_FILE_EXTENSION, default_filepath, use_abspath_source=False)
        self.log.info("Module default version file written to point to %s", default_filepath)
    def make_module_step(self, fake=False):
        """Install .modulerc file."""
        modfile_path = self.module_generator.get_module_filepath(fake=fake)
        modulerc = os.path.join(os.path.dirname(modfile_path), self.module_generator.DOT_MODULERC)

        deps = self.cfg['dependencies']
        if len(deps) != 1:
            raise EasyBuildError("There should be exactly one dependency specified, found %d", len(deps))

        # names should match
        if self.name != deps[0]['name']:
            raise EasyBuildError("Name does not match dependency name: %s vs %s", self.name, deps[0]['name'])

        # ensure version to alias to is a prefix of the version of the dependency
        if not deps[0]['version'].startswith(self.version):
            raise EasyBuildError("Version is not a prefix of dependency version: %s vs %s",
                                 self.version, deps[0]['version'])

        alias_modname = deps[0]['short_mod_name']
        self.log.info("Adding module version alias for %s to %s", alias_modname, modulerc)

        # add symlink to wrapped module file when generating .modulerc in temporary directory (done during sanity check)
        # this is strictly required for Lmod 6.x, for which .modulerc and wrapped module file must be in same location
        if fake:
            wrapped_mod_path = self.modules_tool.modulefile_path(alias_modname)
            wrapped_mod_filename = os.path.basename(wrapped_mod_path)
            target = os.path.join(os.path.dirname(modulerc), wrapped_mod_filename)
            mkdir(os.path.dirname(target), parents=True)
            symlink(wrapped_mod_path, target)

        module_version_specs = {
            'modname': alias_modname,
            'sym_version': self.version,
            'version': deps[0]['version'],
        }
        self.module_generator.modulerc(module_version=module_version_specs, filepath=modulerc)

        if not fake:
            print_msg("created .modulerc file at %s" % modulerc, log=self.log)

        modpath = self.module_generator.get_modules_path(fake=fake)
        self.invalidate_module_caches(modpath)

        return modpath
示例#25
0
文件: netpbm.py 项目: namjals/JSC
    def install_step(self):
        """Custom install step for netpbm."""
        # Preinstallation to a tmp directory. Can't install directly to installdir because the make command fails if the
        # directory already exists
        cmd = "make package pkgdir=%s/pkg" % self.builddir
        (out, _) = run_cmd(cmd, log_all=True, simple=False)

        # Move things to installdir
        copy(["%s/pkg/%s" % (self.builddir, x) for x in os.listdir("%s/pkg/" % self.builddir)], self.installdir)

        # Need to do manually the last bits of the installation
        configs = [
            ("%s/config_template" % self.installdir, "%s/bin/netpbm-config" % self.installdir),
            ("%s/pkgconfig_template" % self.installdir, "%s/lib/pkgconfig/netpbm.pc" % self.installdir)
        ]

        mkdir("%s/lib/pkgconfig" % self.installdir)
        for template, config_file in configs:
            move_file(template, config_file)
            for line in fileinput.input(config_file, inplace=1, backup='.orig'):
                if re.match(r"^@", line):
                    continue
                else:
                    line = re.sub(r'@VERSION@', 'Netpbm %s' % self.version, line)
                    line = re.sub(r'@DATADIR@', '%s/lib' % self.installdir, line)
                    line = re.sub(r'@LINKDIR@', '%s/lib' % self.installdir, line)
                    line = re.sub(r'@INCLUDEDIR@', '%s/include' % self.installdir, line)
                    line = re.sub(r'@BINDIR@', '%s/bin' % self.installdir, line)

                sys.stdout.write(line)

        adjust_permissions("%s/bin/netpbm-config" % self.installdir, stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
        move_file("%s/link/libnetpbm.a" % self.installdir, "%s/lib/libnetpbm.a" % self.installdir)
        symlink("%s/lib/libnetpbm.so.11" % self.installdir, "%s/lib/libnetpbm.so" % self.installdir)
        for f in os.listdir("%s/misc/" % self.installdir):
            move_file("%s/misc/%s" % (self.installdir, f), "%s/lib/%s" % (self.installdir, f))
        rmtree2("%s/misc/" % self.installdir)
        rmtree2("%s/link/" % self.installdir)

        headers = os.listdir("%s/include/netpbm" % self.installdir)
        for header in headers:
            symlink("%s/include/netpbm/%s" % (self.installdir, header), "%s/include/%s" % (self.installdir, header))

        return out
示例#26
0
    def install_step(self):
        """Extend make install to make sure that the 'python' command is present."""

        # avoid that pip (ab)uses $HOME/.cache/pip
        # cfr. https://pip.pypa.io/en/stable/reference/pip_install/#caching
        env.setvar('XDG_CACHE_HOME', tempfile.gettempdir())
        self.log.info("Using %s as pip cache directory",
                      os.environ['XDG_CACHE_HOME'])

        super(EB_Python, self).install_step()

        python_binary_path = os.path.join(self.installdir, 'bin', 'python')
        if not os.path.isfile(python_binary_path):
            symlink(python_binary_path + self.pyshortver, python_binary_path)

        if self.cfg['ebpythonprefixes']:
            write_file(
                os.path.join(self.installdir, self.pythonpath,
                             'sitecustomize.py'), SITECUSTOMIZE)
示例#27
0
    def install_step(self):
        """
        Install using make install (for non-source installations),
        or by symlinking files (old versions, < 3).
        """
        if LooseVersion(self.version) >= LooseVersion("3"):
            if not self.cfg['sourceinstall']:
                super(EB_PETSc, self).install_step()

        else:  # old versions (< 3.x)

            for fn in [
                    'petscconf.h', 'petscconfiginfo.h', 'petscfix.h',
                    'petscmachineinfo.h'
            ]:
                includedir = os.path.join(self.installdir, 'include')
                bmakedir = os.path.join(self.installdir, 'bmake',
                                        'linux-gnu-c-opt')
                symlink(os.path.join(bmakedir, fn),
                        os.path.join(includedir, fn))
示例#28
0
 def prepare_step(self, *args, **kwargs):
     """Make sure that versioned CMake alias exists"""
     super(EB_PyTorch, self).prepare_step(*args, **kwargs)
     # PyTorch preferes cmake3 over cmake which usually does not exist
     cmake_root = get_software_root('CMake')
     cmake_version = get_software_version('CMake')
     if cmake_root and not os.path.isfile(
             os.path.join(cmake_root, 'bin', 'cmake3')):
         if cmake_version and cmake_version.split('.')[0] != '3':
             raise EasyBuildError(
                 'PyTorch requires CMake 3 but CMake %s was found',
                 cmake_version)
         cmake_bin_dir = tempfile.mkdtemp(suffix='cmake-bin')
         self.log.warning(
             'Creating symlink `cmake3` in %s to avoid PyTorch picking up a system CMake. '
             + 'Reinstall the CMake module to avoid this!', cmake_bin_dir)
         symlink(os.path.join(cmake_root, 'bin', 'cmake'),
                 os.path.join(cmake_bin_dir, 'cmake3'))
         path = "%s:%s" % (cmake_bin_dir, os.getenv('PATH'))
         env.setvar('PATH', path)
示例#29
0
def include_easyblocks(tmpdir, paths):
    """Include generic and software-specific easyblocks found in specified locations."""
    easyblocks_path = os.path.join(tmpdir, 'included-easyblocks')

    set_up_eb_package(easyblocks_path, 'easybuild.easyblocks',
                      subpkgs=['generic'], pkg_init_body=EASYBLOCKS_PKG_INIT_BODY)

    easyblocks_dir = os.path.join(easyblocks_path, 'easybuild', 'easyblocks')

    allpaths = expand_glob_paths(paths)
    for easyblock_module in allpaths:
        filename = os.path.basename(easyblock_module)

        # generic easyblocks are expected to be in a directory named 'generic'
        parent_dir = os.path.basename(os.path.dirname(easyblock_module))
        if parent_dir == 'generic':
            target_path = os.path.join(easyblocks_dir, 'generic', filename)
        else:
            target_path = os.path.join(easyblocks_dir, filename)

        symlink(easyblock_module, target_path)

    included_ebs = [x for x in os.listdir(easyblocks_dir) if x not in ['__init__.py', 'generic']]
    included_generic_ebs = [x for x in os.listdir(os.path.join(easyblocks_dir, 'generic')) if x != '__init__.py']
    _log.debug("Included generic easyblocks: %s", included_generic_ebs)
    _log.debug("Included software-specific easyblocks: %s", included_ebs)

    # inject path into Python search path, and reload modules to get it 'registered' in sys.modules
    sys.path.insert(0, easyblocks_path)
    reload(easybuild)
    if 'easybuild.easyblocks' in sys.modules:
        reload(easybuild.easyblocks)
        reload(easybuild.easyblocks.generic)

    # sanity check: verify that included easyblocks can be imported (from expected location)
    for subdir, ebs in [('', included_ebs), ('generic', included_generic_ebs)]:
        pkg = '.'.join(['easybuild', 'easyblocks', subdir]).strip('.')
        loc = os.path.join(easyblocks_dir, subdir)
        verify_imports([os.path.splitext(eb)[0] for eb in ebs], pkg, loc)

    return easyblocks_path
示例#30
0
class EB_MXNet(MakeCp):
    """Easyblock to build and install MXNet"""

    @staticmethod
    def extra_options(extra_vars=None):
        """Change default values of options"""
        extra = MakeCp.extra_options()
        # files_to_copy is not mandatory here
        extra['files_to_copy'][2] = CUSTOM
        return extra

    def __init__(self, *args, **kwargs):
        """Initialize custom class variables."""
        super(EB_MXNet, self).__init__(*args, **kwargs)

        self.mxnet_src_dir = None
        self.py_ext = PythonPackage(self, {'name': self.name, 'version': self.version})
        self.py_ext.module_generator = self.module_generator
        self.r_ext = RPackage(self, {'name': self.name, 'version': self.version})
        self.r_ext.module_generator = self.module_generator

    def extract_step(self):
        """
        Prepare a combined MXNet source tree. Move all submodules
        to their right place.
        """
        # Extract everything into separate directories.
        super(EB_MXNet, self).extract_step()

        mxnet_dirs = glob.glob(os.path.join(self.builddir, '*mxnet-*'))
        if len(mxnet_dirs) == 1:
            self.mxnet_src_dir = mxnet_dirs[0]
            self.log.debug("MXNet dir is: %s", self.mxnet_src_dir)
        else:
            raise EasyBuildError("Failed to find/isolate MXNet source directory: %s", mxnet_dirs)

        for srcdir in [d for d in os.listdir(self.builddir) if d != os.path.basename(self.mxnet_src_dir)]:
            submodule, _, _ = srcdir.rpartition('-')
            newdir = os.path.join(self.mxnet_src_dir, submodule)
            olddir = os.path.join(self.builddir, srcdir)
            # first remove empty existing directory
            rmtree2(newdir)
            try:
                shutil.move(olddir, newdir)
            except IOError, err:
                raise EasyBuildError("Failed to move %s to %s: %s", olddir, newdir, err)

        # the nnvm submodules has dmlc-core as a submodule too. Let's put a symlink in place.
        newdir = os.path.join(self.mxnet_src_dir, "nnvm", "dmlc-core")
        olddir = os.path.join(self.mxnet_src_dir, "dmlc-core")
        rmtree2(newdir)
        symlink(olddir, newdir)
示例#31
0
    def install_step(self):
        """Extend make install to make sure that the 'python' command is present."""

        # avoid that pip (ab)uses $HOME/.cache/pip
        # cfr. https://pip.pypa.io/en/stable/reference/pip_install/#caching
        env.setvar('XDG_CACHE_HOME', tempfile.gettempdir())
        self.log.info("Using %s as pip cache directory",
                      os.environ['XDG_CACHE_HOME'])

        super(EB_Python, self).install_step()

        python_binary_path = os.path.join(self.installdir, 'bin', 'python')
        if not os.path.isfile(python_binary_path):
            symlink(python_binary_path + self.pyshortver, python_binary_path)

        if self.cfg['ebpythonprefixes']:
            write_file(
                os.path.join(self.installdir, self.pythonpath,
                             'sitecustomize.py'), SITECUSTOMIZE)

        # symlink lib/python*/lib-dynload to lib64/python*/lib-dynload if it doesn't exist;
        # see https://github.com/easybuilders/easybuild-easyblocks/issues/1957
        lib_dynload = 'lib-dynload'
        python_lib_dynload = os.path.join('python%s' % self.pyshortver,
                                          lib_dynload)
        lib_dynload_path = os.path.join(self.installdir, 'lib',
                                        python_lib_dynload)
        if not os.path.exists(lib_dynload_path):
            lib64_dynload_path = os.path.join('lib64', python_lib_dynload)
            if os.path.exists(os.path.join(self.installdir,
                                           lib64_dynload_path)):
                lib_dynload_parent = os.path.dirname(lib_dynload_path)
                mkdir(lib_dynload_parent, parents=True)
                cwd = change_dir(lib_dynload_parent)
                # use relative path as target, to avoid hardcoding path to install directory
                target_lib_dynload = os.path.join('..', '..',
                                                  lib64_dynload_path)
                symlink(target_lib_dynload, lib_dynload)
                change_dir(cwd)
    def install_step(self):
        """Installation of OpenSSL and SSL certificates"""
        super(EB_OpenSSL, self).install_step()

        # SSL certificates
        # OPENSSLDIR is already populated by the installation of OpenSSL
        # try to symlink system certificates in the empty 'certs' directory
        ssl_dir = os.path.join(self.installdir, 'ssl')
        openssl_certs_dir = os.path.join(ssl_dir, 'certs')

        if self.ssl_certs_dir:
            remove_dir(openssl_certs_dir)
            symlink(self.ssl_certs_dir, openssl_certs_dir)

            # also symlink cert.pem file, if it exists
            # (required on CentOS 7, see https://github.com/easybuilders/easybuild-easyconfigs/issues/14058)
            cert_pem_path = os.path.join(os.path.dirname(self.ssl_certs_dir), 'cert.pem')
            if os.path.isfile(cert_pem_path):
                symlink(cert_pem_path, os.path.join(ssl_dir, os.path.basename(cert_pem_path)))
        else:
            print_warning("OpenSSL successfully installed without system SSL certificates. "
                          "Some packages might experience limited functionality.")
示例#33
0
    def extract_step(self):
        """
        Prepare a combined MXNet source tree. Move all submodules
        to their right place.
        """
        # Extract everything into separate directories.
        super(EB_MXNet, self).extract_step()

        mxnet_dirs = glob.glob(os.path.join(self.builddir, '*mxnet-*'))
        if len(mxnet_dirs) == 1:
            self.mxnet_src_dir = mxnet_dirs[0]
            self.log.debug("MXNet dir is: %s", self.mxnet_src_dir)
        else:
            raise EasyBuildError(
                "Failed to find/isolate MXNet source directory: %s",
                mxnet_dirs)

        for srcdir in [
                d for d in os.listdir(self.builddir)
                if d != os.path.basename(self.mxnet_src_dir)
        ]:
            submodule, _, _ = srcdir.rpartition('-')
            newdir = os.path.join(self.mxnet_src_dir, submodule)
            olddir = os.path.join(self.builddir, srcdir)
            # first remove empty existing directory
            remove_dir(newdir)
            try:
                shutil.move(olddir, newdir)
            except IOError as err:
                raise EasyBuildError("Failed to move %s to %s: %s", olddir,
                                     newdir, err)

        # the nnvm submodules has dmlc-core as a submodule too. Let's put a symlink in place.
        newdir = os.path.join(self.mxnet_src_dir, "nnvm", "dmlc-core")
        olddir = os.path.join(self.mxnet_src_dir, "dmlc-core")
        remove_dir(newdir)
        symlink(olddir, newdir)
示例#34
0
def include_module_naming_schemes(tmpdir, paths):
    """Include module naming schemes at specified locations."""
    mns_path = os.path.join(tmpdir, 'included-module-naming-schemes')

    set_up_eb_package(mns_path, 'easybuild.tools.module_naming_scheme')

    mns_dir = os.path.join(mns_path, 'easybuild', 'tools',
                           'module_naming_scheme')

    allpaths = [
        p for p in expand_glob_paths(paths)
        if os.path.basename(p) != '__init__.py'
    ]
    for mns_module in allpaths:
        filename = os.path.basename(mns_module)
        target_path = os.path.join(mns_dir, filename)
        if not os.path.exists(target_path):
            symlink(mns_module, target_path)

    included_mns = [x for x in os.listdir(mns_dir) if x not in ['__init__.py']]
    _log.debug("Included module naming schemes: %s", included_mns)

    # inject path into Python search path, and reload modules to get it 'registered' in sys.modules
    sys.path.insert(0, mns_path)

    # hard inject location to included module naming schemes into Python search path
    # only prepending to sys.path is not enough due to 'pkgutil.extend_path' in module_naming_scheme/__init__.py
    new_path = os.path.join(mns_path, 'easybuild', 'tools',
                            'module_naming_scheme')
    easybuild.tools.module_naming_scheme.__path__.insert(0, new_path)

    # sanity check: verify that included module naming schemes can be imported (from expected location)
    verify_imports([os.path.splitext(mns)[0] for mns in included_mns],
                   'easybuild.tools.module_naming_scheme', mns_dir)

    return mns_path
    def install_step(self):
        """
        Install using make install (for non-source installations),
        or by symlinking files (old versions, < 3).
        """
        if LooseVersion(self.version) >= LooseVersion("3"):
            if not self.cfg['sourceinstall']:
                super(EB_PETSc, self).install_step()
                petsc_root = self.installdir
            else:
                petsc_root = os.path.join(self.installdir, self.petsc_subdir)
            # Remove MPI-CXX flags added during configure to prevent them from being passed to consumers of PETsc
            petsc_variables_path = os.path.join(petsc_root, 'lib', 'petsc', 'conf', 'petscvariables')
            if os.path.isfile(petsc_variables_path):
                fix = (r'^(CXX_FLAGS|CXX_LINKER_FLAGS|CONFIGURE_OPTIONS)( = .*)%s(.*)$' % NO_MPI_CXX_EXT_FLAGS,
                       r'\1\2\3')
                apply_regex_substitutions(petsc_variables_path, [fix])

        else:  # old versions (< 3.x)

            for fn in ['petscconf.h', 'petscconfiginfo.h', 'petscfix.h', 'petscmachineinfo.h']:
                includedir = os.path.join(self.installdir, 'include')
                bmakedir = os.path.join(self.installdir, 'bmake', 'linux-gnu-c-opt')
                symlink(os.path.join(bmakedir, fn), os.path.join(includedir, fn))
示例#36
0
    def post_install_step(self, *args, **kwargs):
        """
        Post-processing after installation: add symlinks for cc, c++, f77, f95
        """
        super(EB_GCC, self).post_install_step(*args, **kwargs)

        # Add symlinks for cc/c++/f77/f95.
        bindir = os.path.join(self.installdir, 'bin')
        for key in COMP_CMD_SYMLINKS:
            src = COMP_CMD_SYMLINKS[key]
            target = os.path.join(bindir, key)
            if os.path.exists(target):
                self.log.info(
                    "'%s' already exists in %s, not replacing it with symlink to '%s'",
                    key, bindir, src)
            elif os.path.exists(os.path.join(bindir, src)):
                symlink(src, target, use_abspath_source=False)
            else:
                raise EasyBuildError(
                    "Can't link '%s' to non-existing location %s", target,
                    os.path.join(bindir, src))

        # Rename include-fixed directory which includes system header files that were processed by fixincludes,
        # since these may cause problems when upgrading to newer OS version.
        # (see https://github.com/easybuilders/easybuild-easyconfigs/issues/10666)
        glob_pattern = os.path.join(self.installdir, 'lib*', 'gcc',
                                    '*-linux-gnu', self.version,
                                    'include-fixed')
        paths = glob.glob(glob_pattern)
        if paths:
            # weed out paths that point to the same location,
            # for example when 'lib64' is a symlink to 'lib'
            include_fixed_paths = []
            for path in paths:
                if not any(
                        os.path.samefile(path, x)
                        for x in include_fixed_paths):
                    include_fixed_paths.append(path)

            if len(include_fixed_paths) == 1:
                include_fixed_path = include_fixed_paths[0]

                msg = "Found include-fixed subdirectory at %s, "
                msg += "renaming it to avoid using system header files patched by fixincludes..."
                self.log.info(msg, include_fixed_path)

                # limits.h and syslimits.h need to be copied to include/ first,
                # these are strictly required (by /usr/include/limits.h for example)
                include_path = os.path.join(
                    os.path.dirname(include_fixed_path), 'include')
                retained_header_files = ['limits.h', 'syslimits.h']
                for fn in retained_header_files:
                    from_path = os.path.join(include_fixed_path, fn)
                    to_path = os.path.join(include_path, fn)
                    if os.path.exists(from_path):
                        if os.path.exists(to_path):
                            raise EasyBuildError(
                                "%s already exists, not overwriting it with %s!",
                                to_path, from_path)
                        else:
                            copy_file(from_path, to_path)
                            self.log.info("%s copied to %s before renaming %s",
                                          from_path, to_path,
                                          include_fixed_path)
                    else:
                        self.log.warning(
                            "Can't copy non-existing file %s to %s, since it doesn't exist!",
                            from_path, to_path)

                readme = os.path.join(include_fixed_path, 'README.easybuild')
                readme_txt = '\n'.join([
                    "This directory was renamed by EasyBuild to avoid that the header files in it are picked up,",
                    "since they may cause problems when the OS is upgraded to a new (minor) version.",
                    '',
                    "These files were copied to %s first: %s" %
                    (include_path, ', '.join(retained_header_files)),
                    '',
                    "See https://github.com/easybuilders/easybuild-easyconfigs/issues/10666 for more information.",
                    '',
                ])
                write_file(readme, readme_txt)

                include_fixed_renamed = include_fixed_path + '.renamed-by-easybuild'
                move_file(include_fixed_path, include_fixed_renamed)
                self.log.info(
                    "%s renamed to %s to avoid using the header files in it",
                    include_fixed_path, include_fixed_renamed)
            else:
                raise EasyBuildError(
                    "Exactly one 'include-fixed' directory expected, found %d: %s",
                    len(include_fixed_paths), include_fixed_paths)
        else:
            self.log.info("No include-fixed subdirectory found at %s",
                          glob_pattern)
示例#37
0
    def configure_step(self):
        """
        Configure for GCC build:
        - prepare extra source dirs (GMP, MPFR, MPC, ...)
        - create obj dir to build in (GCC doesn't like to be built in source dir)
        - add configure and make options, according to .eb spec file
        - decide whether or not to do a staged build (which is required to enable PPL/CLooG support)
        - set platform_lib based on config.guess output
        """

        sysroot = build_option('sysroot')
        if sysroot:
            # based on changes made to GCC in Gentoo Prefix
            # https://gitweb.gentoo.org/repo/gentoo.git/tree/profiles/features/prefix/standalone/profile.bashrc

            # add --with-sysroot configure option, to instruct GCC to consider
            # value set for EasyBuild's --sysroot configuration option as the root filesystem of the operating system
            # (see https://gcc.gnu.org/install/configure.html)
            self.cfg.update('configopts', '--with-sysroot=%s' % sysroot)

            # avoid that --sysroot is passed to linker by patching value for SYSROOT_SPEC in gcc/gcc.c
            apply_regex_substitutions(os.path.join('gcc', 'gcc.c'),
                                      [('--sysroot=%R', '')])

            # prefix dynamic linkers with sysroot
            # this patches lines like:
            # #define GLIBC_DYNAMIC_LINKER64 "/lib64/ld-linux-x86-64.so.2"
            # for PowerPC (rs6000) we have to set DYNAMIC_LINKER_PREFIX to sysroot
            gcc_config_headers = glob.glob(
                os.path.join('gcc', 'config', '*', '*linux*.h'))
            regex_subs = [
                ('(_DYNAMIC_LINKER.*[":])/lib', r'\1%s/lib' % sysroot),
                ('(DYNAMIC_LINKER_PREFIX\\s+)""', r'\1"%s"' % sysroot),
            ]
            for gcc_config_header in gcc_config_headers:
                apply_regex_substitutions(gcc_config_header, regex_subs)

        # self.configopts will be reused in a 3-staged build,
        # configopts is only used in first configure
        self.configopts = self.cfg['configopts']

        # I) prepare extra source dirs, e.g. for GMP, MPFR, MPC (if required), so GCC can build them
        stage1_info = self.prep_extra_src_dirs("stage1")
        configopts = stage1_info['configopts']

        # II) update config options

        # enable specified language support
        if self.cfg['languages']:
            self.configopts += " --enable-languages=%s" % ','.join(
                self.cfg['languages'])

        if self.cfg['withnvptx']:
            if self.iter_idx == 0:
                self.configopts += " --without-cuda-driver"
                self.configopts += " --enable-offload-targets=nvptx-none"
            else:
                # register installed GCC as compiler to use nvptx
                path = "%s/bin:%s" % (self.installdir, os.getenv('PATH'))
                env.setvar('PATH', path)

                ld_lib_path = "%(dir)s/lib64:%(dir)s/lib:%(val)s" % {
                    'dir': self.installdir,
                    'val': os.getenv('LD_LIBRARY_PATH')
                }
                env.setvar('LD_LIBRARY_PATH', ld_lib_path)
                extra_source = {1: "nvptx-tools", 2: "newlib"}[self.iter_idx]
                extra_source_dirs = glob.glob(
                    os.path.join(self.builddir, '%s-*' % extra_source))
                if len(extra_source_dirs) != 1:
                    raise EasyBuildError("Failed to isolate %s source dir" %
                                         extra_source)
                if self.iter_idx == 1:
                    # compile nvptx-tools
                    change_dir(extra_source_dirs[0])
                else:  # self.iter_idx == 2
                    # compile nvptx target compiler
                    symlink(os.path.join(extra_source_dirs[0], 'newlib'),
                            'newlib')
                    self.create_dir("build-nvptx-gcc")
                    self.cfg.update('configopts', self.configopts)
                    self.cfg.update(
                        'configopts',
                        "--with-build-time-tools=%s/nvptx-none/bin" %
                        self.installdir)
                    self.cfg.update('configopts', "--target=nvptx-none")
                    host_type = self.determine_build_and_host_type()[1]
                    self.cfg.update(
                        'configopts',
                        "--enable-as-accelerator-for=%s" % host_type)
                    self.cfg.update('configopts', "--disable-sjlj-exceptions")
                    self.cfg.update('configopts',
                                    "--enable-newlib-io-long-long")
                    self.cfg['configure_cmd_prefix'] = '../'
                return super(EB_GCC, self).configure_step()

        # enable building of libiberty, if desired
        if self.cfg['withlibiberty']:
            self.configopts += " --enable-install-libiberty"

        # enable link-time-optimization (LTO) support, if desired
        if self.cfg['withlto']:
            self.configopts += " --enable-lto"
        else:
            self.configopts += " --disable-lto"

        # configure for a release build
        self.configopts += " --enable-checking=release "
        # enable multilib: allow both 32 and 64 bit
        if self.cfg['multilib']:
            glibc_32bit = [
                "glibc.i686",  # Fedora, RedHat-based
                "glibc.ppc",  # "" on Power
                "libc6-dev-i386",  # Debian-based
                "gcc-c++-32bit",  # OpenSuSE, SLES
            ]
            if not any([check_os_dependency(dep) for dep in glibc_32bit]):
                raise EasyBuildError(
                    "Using multilib requires 32-bit glibc (install one of %s, depending on your OS)",
                    ', '.join(glibc_32bit))
            self.configopts += " --enable-multilib --with-multilib-list=m32,m64"
        else:
            self.configopts += " --disable-multilib"
        # build both static and dynamic libraries (???)
        self.configopts += " --enable-shared=yes --enable-static=yes "

        # use POSIX threads
        self.configopts += " --enable-threads=posix "

        # enable plugin support
        self.configopts += " --enable-plugins "

        # use GOLD as default linker
        if self.cfg['use_gold_linker']:
            self.configopts += " --enable-gold=default --enable-ld --with-plugin-ld=ld.gold"
        else:
            self.configopts += " --enable-gold --enable-ld=default"

        # enable bootstrap build for self-containment (unless for staged build)
        if not self.stagedbuild:
            configopts += " --enable-bootstrap"
        else:
            configopts += " --disable-bootstrap"

        if self.stagedbuild:
            #
            # STAGE 1: configure GCC build that will be used to build PPL/CLooG
            #
            self.log.info(
                "Starting with stage 1 of 3-staged build to enable CLooG and/or PPL, ISL support..."
            )
            self.stage1installdir = os.path.join(self.builddir,
                                                 'GCC_stage1_eb')
            configopts += " --prefix=%(p)s --with-local-prefix=%(p)s" % {
                'p': self.stage1installdir
            }

        else:
            # unstaged build, so just run standard configure/make/make install
            # set prefixes
            self.log.info("Performing regular GCC build...")
            configopts += " --prefix=%(p)s --with-local-prefix=%(p)s" % {
                'p': self.installdir
            }

        # prioritize lib over lib{64,32,x32} for all architectures by overriding default MULTILIB_OSDIRNAMES config
        # only do this when multilib is not enabled
        if self.cfg['prefer_lib_subdir'] and not self.cfg['multilib']:
            cfgfile = 'gcc/config/i386/t-linux64'
            multilib_osdirnames = "MULTILIB_OSDIRNAMES = m64=../lib:../lib64 m32=../lib:../lib32 mx32=../lib:../libx32"
            self.log.info("Patching MULTILIB_OSDIRNAMES in %s with '%s'",
                          cfgfile, multilib_osdirnames)
            write_file(cfgfile, multilib_osdirnames, append=True)
        elif self.cfg['multilib']:
            self.log.info(
                "Not patching MULTILIB_OSDIRNAMES since use of --enable-multilib is enabled"
            )

        # III) create obj dir to build in, and change to it
        #     GCC doesn't like to be built in the source dir
        if self.stagedbuild:
            objdir = self.create_dir("stage1_obj")
            self.stage1prefix = objdir
        else:
            objdir = self.create_dir("obj")

        # IV) actual configure, but not on default path
        cmd = "../configure  %s %s" % (self.configopts, configopts)

        self.run_configure_cmd(cmd)

        self.disable_lto_mpfr_old_gcc(objdir)
    def test_cases_step(self):
        """Run provided list of test cases, or provided examples is no test cases were specified."""

        # run all examples if no test cases were specified
        # order and grouping is important for some of these tests (e.g., [o]h3tr*
        # Some of the examples are deleted
        # missing md parameter files: dna.nw, mache.nw, 18c6NaK.nw, membrane.nw, sdm.nw
        # method not implemented (unknown thory) or keyword not found: triplet.nw, C2H6.nw, pspw_MgO.nw, ccsdt_polar_small.nw, CG.nw
        # no convergence: diamond.nw
        # Too much memory required: ccsd_polar_big.nw
        if type(self.cfg['tests']) is bool:
            examples = [('qmd', ['3carbo_dft.nw', '3carbo.nw', 'h2o_scf.nw']),
                        ('pspw', ['C2.nw', 'C6.nw', 'Carbene.nw', 'Na16.nw', 'NaCl.nw']),
                        ('tcepolar', ['ccsd_polar_small.nw']),
                        ('dirdyvtst/h3', ['h3tr1.nw', 'h3tr2.nw']),
                        ('dirdyvtst/h3', ['h3tr3.nw']), ('dirdyvtst/h3', ['h3tr4.nw']), ('dirdyvtst/h3', ['h3tr5.nw']),
                        ('dirdyvtst/oh3', ['oh3tr1.nw', 'oh3tr2.nw']),
                        ('dirdyvtst/oh3', ['oh3tr3.nw']), ('dirdyvtst/oh3', ['oh3tr4.nw']), ('dirdyvtst/oh3', ['oh3tr5.nw']),
                        ('pspw/session1', ['band.nw', 'si4.linear.nw', 'si4.rhombus.nw', 'S2-drift.nw', 
                                           'silicon.nw', 'S2.nw', 'si4.rectangle.nw']),
                        ('md/myo', ['myo.nw']), ('md/nak', ['NaK.nw']), ('md/crown', ['crown.nw']), ('md/hrc', ['hrc.nw']),
                        ('md/benzene', ['benzene.nw'])]

            self.cfg['tests'] = [(os.path.join(self.examples_dir, d), l) for (d, l) in examples]
            self.log.info("List of examples to be run as test cases: %s" % self.cfg['tests'])

        try:
            # symlink $HOME/.nwchemrc to local copy of default nwchemrc
            default_nwchemrc = os.path.join(self.installdir, 'data', 'default.nwchemrc')

            # make a local copy of the default .nwchemrc file at a fixed path, so we can symlink to it
            # this makes sure that multiple parallel builds can reuse the same symlink, even for different builds
            # there is apparently no way to point NWChem to a particular config file other that $HOME/.nwchemrc
            try:
                local_nwchemrc_dir = os.path.dirname(self.local_nwchemrc)
                if not os.path.exists(local_nwchemrc_dir):
                    os.makedirs(local_nwchemrc_dir)
                shutil.copy2(default_nwchemrc, self.local_nwchemrc)

                # only try to create symlink if it's not there yet
                # we've verified earlier that the symlink is what we expect it to be if it's there
                if not os.path.islink(self.home_nwchemrc):
                    symlink(self.local_nwchemrc, self.home_nwchemrc)
            except OSError, err:
                raise EasyBuildError("Failed to symlink %s to %s: %s", self.home_nwchemrc, self.local_nwchemrc, err)

            # run tests, keep track of fail ratio
            cwd = os.getcwd()

            fail = 0.0
            tot = 0.0

            success_regexp = re.compile("Total times\s*cpu:.*wall:.*")

            test_cases_logfn = os.path.join(self.installdir, config.log_path(), 'test_cases.log')
            test_cases_log = open(test_cases_logfn, "w")

            for (testdir, tests) in self.cfg['tests']:

                # run test in a temporary dir
                tmpdir = tempfile.mkdtemp(prefix='nwchem_test_')
                change_dir(tmpdir)

                # copy all files in test case dir
                for item in os.listdir(testdir):
                    test_file = os.path.join(testdir, item)
                    if os.path.isfile(test_file):
                        self.log.debug("Copying %s to %s" % (test_file, tmpdir))
                        shutil.copy2(test_file, tmpdir)

                # run tests
                for testx in tests:
                    cmd = "nwchem %s" % testx
                    msg = "Running test '%s' (from %s) in %s..." % (cmd, testdir, tmpdir)
                    self.log.info(msg)
                    test_cases_log.write("\n%s\n" % msg)
                    (out, ec) = run_cmd(cmd, simple=False, log_all=False, log_ok=False, log_output=True)

                    # check exit code and output
                    if ec:
                        msg = "Test %s failed (exit code: %s)!" % (testx, ec)
                        self.log.warning(msg)
                        test_cases_log.write('FAIL: %s' % msg)
                        fail += 1
                    else:
                        if success_regexp.search(out):
                            msg = "Test %s successful!" % testx
                            self.log.info(msg)
                            test_cases_log.write('SUCCESS: %s' % msg)
                        else:
                            msg = "No 'Total times' found for test %s (but exit code is %s)!" % (testx, ec)
                            self.log.warning(msg)
                            test_cases_log.write('FAIL: %s' % msg)
                            fail += 1

                    test_cases_log.write("\nOUTPUT:\n\n%s\n\n" % out)

                    tot += 1

                # go back
                change_dir(cwd)
                shutil.rmtree(tmpdir)

            fail_ratio = fail / tot
            fail_pcnt = fail_ratio * 100

            msg = "%d of %d tests failed (%s%%)!" % (fail, tot, fail_pcnt)
            self.log.info(msg)
            test_cases_log.write('\n\nSUMMARY: %s' % msg)

            test_cases_log.close()
            self.log.info("Log for test cases saved at %s" % test_cases_logfn)

            if fail_ratio > self.cfg['max_fail_ratio']:
                max_fail_pcnt = self.cfg['max_fail_ratio'] * 100
                raise EasyBuildError("Over %s%% of test cases failed, assuming broken build.", max_fail_pcnt)

            # cleanup
            try:
                shutil.rmtree(self.examples_dir)
                shutil.rmtree(local_nwchemrc_dir)
            except OSError, err:
                raise EasyBuildError("Cleanup failed: %s", err)
示例#39
0
def include_toolchains(tmpdir, paths):
    """Include toolchains and toolchain components at specified locations."""
    toolchains_path = os.path.join(tmpdir, 'included-toolchains')
    toolchain_subpkgs = ['compiler', 'fft', 'linalg', 'mpi']

    set_up_eb_package(toolchains_path,
                      'easybuild.toolchains',
                      subpkgs=toolchain_subpkgs)

    tcs_dir = os.path.join(toolchains_path, 'easybuild', 'toolchains')

    allpaths = [
        p for p in expand_glob_paths(paths)
        if os.path.basename(p) != '__init__.py'
    ]
    for toolchain_module in allpaths:
        filename = os.path.basename(toolchain_module)

        parent_dir = os.path.basename(os.path.dirname(toolchain_module))

        # toolchain components are expected to be in a directory named according to the type of component
        if parent_dir in toolchain_subpkgs:
            target_path = os.path.join(tcs_dir, parent_dir, filename)
        else:
            target_path = os.path.join(tcs_dir, filename)

        if not os.path.exists(target_path):
            symlink(toolchain_module, target_path)

    included_toolchains = [
        x for x in os.listdir(tcs_dir)
        if x not in ['__init__.py'] + toolchain_subpkgs
    ]
    _log.debug("Included toolchains: %s", included_toolchains)

    included_subpkg_modules = {}
    for subpkg in toolchain_subpkgs:
        included_subpkg_modules[subpkg] = [
            x for x in os.listdir(os.path.join(tcs_dir, subpkg))
            if x != '__init__.py'
        ]
        _log.debug("Included toolchain %s components: %s", subpkg,
                   included_subpkg_modules[subpkg])

    # inject path into Python search path, and reload modules to get it 'registered' in sys.modules
    sys.path.insert(0, toolchains_path)

    # reload toolchain modules and hard inject location to included toolchains into Python search path
    # only prepending to sys.path is not enough due to 'pkgutil.extend_path' in toolchains/*/__init__.py
    easybuild.toolchains.__path__.insert(
        0, os.path.join(toolchains_path, 'easybuild', 'toolchains'))
    for subpkg in toolchain_subpkgs:
        tcpkg = 'easybuild.toolchains.%s' % subpkg
        sys.modules[tcpkg].__path__.insert(
            0, os.path.join(toolchains_path, 'easybuild', 'toolchains',
                            subpkg))

    # sanity check: verify that included toolchain modules can be imported (from expected location)
    verify_imports([os.path.splitext(mns)[0] for mns in included_toolchains],
                   'easybuild.toolchains', tcs_dir)
    for subpkg in toolchain_subpkgs:
        pkg = '.'.join(['easybuild', 'toolchains', subpkg])
        loc = os.path.join(tcs_dir, subpkg)
        verify_imports([
            os.path.splitext(tcmod)[0]
            for tcmod in included_subpkg_modules[subpkg]
        ], pkg, loc)

    return toolchains_path
示例#40
0
    def configure_step(self):
        """Custom configuration procedure for Molpro: use 'configure -batch'."""

        if not os.path.isfile(self.license_token):
            if self.cfg['license_file'] is not None and os.path.isfile(self.cfg['license_file']):
                # put symlink in place to specified license file in $HOME/.molpro/token
                # other approaches (like defining $MOLPRO_KEY) don't seem to work
                self.cleanup_token_symlink = True
                mkdir(os.path.dirname(self.license_token))
                symlink(self.cfg['license_file'], self.license_token)
                self.log.debug("Symlinked %s to %s", self.cfg['license_file'], self.license_token)
            else:
                self.log.warning("No licence token found at either {0} or via 'license_file'".format(self.license_token))
        
        # Only do the rest of the configuration if we're building from source 
        if not self.cfg['precompiled_binaries']:
            # installation prefix
            self.cfg.update('configopts', "-prefix %s" % self.installdir)

            # compilers

            # compilers & MPI
            if self.toolchain.options.get('usempi', None):
                self.cfg.update('configopts', "-%s -%s" % (os.environ['CC_SEQ'], os.environ['F90_SEQ']))
                if 'MPI_INC_DIR' in os.environ:
                    self.cfg.update('configopts', "-mpp -mppbase %s" % os.environ['MPI_INC_DIR'])
                else:
                    raise EasyBuildError("$MPI_INC_DIR not defined")
            else:
                self.cfg.update('configopts', "-%s -%s" % (os.environ['CC'], os.environ['F90']))

            # BLAS/LAPACK
            if 'BLAS_LIB_DIR' in os.environ:
                self.cfg.update('configopts', "-blas -blaspath %s" % os.environ['BLAS_LIB_DIR'])
            else:
                raise EasyBuildError("$BLAS_LIB_DIR not defined")

            if 'LAPACK_LIB_DIR' in os.environ:
                self.cfg.update('configopts', "-lapack -lapackpath %s" % os.environ['LAPACK_LIB_DIR'])
            else:
                raise EasyBuildError("$LAPACK_LIB_DIR not defined")

            # 32 vs 64 bit
            if self.toolchain.options.get('32bit', None):
                self.cfg.update('configopts', '-i4')
            else:
                self.cfg.update('configopts', '-i8')

            run_cmd("./configure -batch %s" % self.cfg['configopts'])

            cfgfile = os.path.join(self.cfg['start_dir'], 'CONFIG')
            cfgtxt = read_file(cfgfile)

            # determine original LAUNCHER value
            launcher_regex = re.compile('^LAUNCHER=(.*)$', re.M)
            res = launcher_regex.search(cfgtxt)
            if res:
                self.orig_launcher = res.group(1)
                self.log.debug("Found original value for LAUNCHER: %s", self.orig_launcher)
            else:
                raise EasyBuildError("Failed to determine LAUNCHER value")

            # determine full installation prefix
            prefix_regex = re.compile('^PREFIX=(.*)$', re.M)
            res = prefix_regex.search(cfgtxt)
            if res:
                self.full_prefix = res.group(1)
                self.log.debug("Found full installation prefix: %s", self.full_prefix)
            else:
                raise EasyBuildError("Failed to determine full installation prefix")

            # determine MPI launcher command that can be used during build/test
            # obtain command with specific number of cores (required by mpi_cmd_for), then replace that number with '%n'
            launcher = self.toolchain.mpi_cmd_for('%x', self.cfg['parallel'])
            launcher = launcher.replace(' %s' % self.cfg['parallel'], ' %n')

            # patch CONFIG file to change LAUNCHER definition, in order to avoid having to start mpd
            apply_regex_substitutions(cfgfile, [(r"^(LAUNCHER\s*=\s*).*$", r"\1 %s" % launcher)])

            # reread CONFIG and log contents
            cfgtxt = read_file(cfgfile)
            self.log.info("Contents of CONFIG file:\n%s", cfgtxt)
示例#41
0
    def test_step(self):
        """Build and run tests included in the WRF distribution."""
        if self.cfg['runtest']:

            if self.cfg[
                    'buildtype'] in self.parallel_build_types and not build_option(
                        'mpi_tests'):
                self.log.info(
                    "Skipping testing of WRF with build type '%s' since MPI testing is disabled",
                    self.cfg['buildtype'])
                return

            # get list of WRF test cases
            self.testcases = []
            if os.path.exists('test'):
                self.testcases = os.listdir('test')

            elif not self.dry_run:
                raise EasyBuildError(
                    "Test directory not found, failed to determine list of test cases"
                )

            # exclude 2d testcases in parallel WRF builds
            if self.cfg['buildtype'] in self.parallel_build_types:
                self.testcases = [
                    test for test in self.testcases if '2d_' not in test
                ]

            # exclude real testcases
            self.testcases = [
                test for test in self.testcases if not test.endswith("_real")
            ]

            self.log.debug("intermediate list of testcases: %s" %
                           self.testcases)

            # exclude tests that should not be run
            for test in ["em_esmf_exp", "em_scm_xy", "nmm_tropical_cyclone"]:
                if test in self.testcases:
                    self.testcases.remove(test)

            # some tests hang when WRF is built with Intel compilers
            if self.comp_fam == toolchain.INTELCOMP:  # @UndefinedVariable
                for test in ["em_heldsuarez"]:
                    if test in self.testcases:
                        self.testcases.remove(test)

            # determine parallel setting (1/2 of available processors + 1)
            n = self.cfg['parallel'] / 2 + 1

            # prepare run command

            # stack limit needs to be set to unlimited for WRF to work well
            if self.cfg['buildtype'] in self.parallel_build_types:
                test_cmd = "ulimit -s unlimited && %s && %s" % (
                    self.toolchain.mpi_cmd_for("./ideal.exe", 1),
                    self.toolchain.mpi_cmd_for("./wrf.exe", n))
            else:
                test_cmd = "ulimit -s unlimited && ./ideal.exe && ./wrf.exe >rsl.error.0000 2>&1"

            def run_test():
                """Run a single test and check for success."""

                # regex to check for successful test run
                re_success = re.compile("SUCCESS COMPLETE WRF")

                # run test
                run_cmd(test_cmd, log_all=True, simple=True)

                # check for success
                txt = read_file('rsl.error.0000')
                if re_success.search(txt):
                    self.log.info("Test %s ran successfully." % test)

                else:
                    raise EasyBuildError(
                        "Test %s failed, pattern '%s' not found.", test,
                        re_success.pattern)

                # clean up stuff that gets in the way
                fn_prefs = [
                    "wrfinput_", "namelist.output", "wrfout_", "rsl.out.",
                    "rsl.error."
                ]
                for filename in os.listdir('.'):
                    for pref in fn_prefs:
                        if filename.startswith(pref):
                            remove_file(filename)
                            self.log.debug("Cleaned up file %s", filename)

            # build and run each test case individually
            for test in self.testcases:

                self.log.debug("Building and running test %s" % test)

                # build and install
                cmd = "tcsh ./compile %s %s" % (self.par, test)
                run_cmd(cmd, log_all=True, simple=True)

                # run test
                try:
                    prev_dir = change_dir('run')

                    if test in ["em_fire"]:

                        # handle tests with subtests seperately
                        testdir = os.path.join("..", "test", test)

                        for subtest in [
                                x for x in os.listdir(testdir)
                                if os.path.isdir(x)
                        ]:

                            subtestdir = os.path.join(testdir, subtest)

                            # link required files
                            for filename in os.listdir(subtestdir):
                                if os.path.exists(filename):
                                    remove_file(filename)
                                symlink(os.path.join(subtestdir, filename),
                                        filename)

                            # run test
                            run_test()

                    else:

                        # run test
                        run_test()

                    change_dir(prev_dir)

                except OSError as err:
                    raise EasyBuildError(
                        "An error occured when running test %s: %s", test, err)
示例#42
0
    def test_step(self):
        """Run WPS test (requires large dataset to be downloaded). """

        wpsdir = None

        def run_wps_cmd(cmdname, mpi_cmd=True):
            """Run a WPS command, and check for success."""

            cmd = os.path.join(wpsdir, "%s.exe" % cmdname)

            if mpi_cmd:
                if build_option('mpi_tests'):
                    cmd = self.toolchain.mpi_cmd_for(cmd, 1)
                else:
                    self.log.info(
                        "Skipping MPI test for %s, since MPI tests are disabled",
                        cmd)
                    return

            (out, _) = run_cmd(cmd, log_all=True, simple=False)

            re_success = re.compile("Successful completion of %s" % cmdname)
            if not re_success.search(out):
                raise EasyBuildError("%s.exe failed (pattern '%s' not found)?",
                                     cmdname, re_success.pattern)

        if self.cfg['runtest']:
            if not self.cfg['testdata']:
                raise EasyBuildError("List of URLs for testdata not provided.")

            wpsdir = os.path.join(self.builddir, self.wps_subdir)

            try:
                # create temporary directory
                tmpdir = tempfile.mkdtemp()
                change_dir(tmpdir)

                # download data
                testdata_paths = []
                for testdata in self.cfg['testdata']:
                    path = self.obtain_file(testdata)
                    if not path:
                        raise EasyBuildError(
                            "Downloading file from %s failed?", testdata)
                    testdata_paths.append(path)

                # unpack data
                for path in testdata_paths:
                    srcdir = extract_file(path, tmpdir, change_into_dir=False)
                    change_dir(srcdir)

                namelist_file = os.path.join(tmpdir, 'namelist.wps')

                # GEOGRID

                # setup directories and files
                if LooseVersion(self.version) < LooseVersion("4.0"):
                    geog_data_dir = "geog"
                else:
                    geog_data_dir = "WPS_GEOG"
                for dir_name in os.listdir(os.path.join(tmpdir,
                                                        geog_data_dir)):
                    symlink(os.path.join(tmpdir, geog_data_dir, dir_name),
                            os.path.join(tmpdir, dir_name))

                # copy namelist.wps file and patch it for geogrid
                copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file)
                regex_subs = [(r"^(\s*geog_data_path\s*=\s*).*$",
                               r"\1 '%s'" % tmpdir)]
                apply_regex_substitutions(namelist_file, regex_subs)

                # GEOGRID.TBL
                geogrid_dir = os.path.join(tmpdir, 'geogrid')
                mkdir(geogrid_dir)
                symlink(os.path.join(wpsdir, 'geogrid', 'GEOGRID.TBL.ARW'),
                        os.path.join(geogrid_dir, 'GEOGRID.TBL'))

                # run geogrid.exe
                run_wps_cmd("geogrid")

                # UNGRIB

                # determine start and end time stamps of grib files
                grib_file_prefix = "fnl_"
                k = len(grib_file_prefix)
                fs = [
                    f for f in sorted(os.listdir('.'))
                    if f.startswith(grib_file_prefix)
                ]
                start = "%s:00:00" % fs[0][k:]
                end = "%s:00:00" % fs[-1][k:]

                # copy namelist.wps file and patch it for ungrib
                copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file)
                regex_subs = [
                    (r"^(\s*start_date\s*=\s*).*$",
                     r"\1 '%s','%s'," % (start, start)),
                    (r"^(\s*end_date\s*=\s*).*$",
                     r"\1 '%s','%s'," % (end, end)),
                ]
                apply_regex_substitutions(namelist_file, regex_subs)

                # copy correct Vtable
                vtable_dir = os.path.join(wpsdir, 'ungrib', 'Variable_Tables')
                if os.path.exists(os.path.join(vtable_dir, 'Vtable.ARW')):
                    copy_file(os.path.join(vtable_dir, 'Vtable.ARW'),
                              os.path.join(tmpdir, 'Vtable'))
                elif os.path.exists(os.path.join(vtable_dir,
                                                 'Vtable.ARW.UPP')):
                    copy_file(os.path.join(vtable_dir, 'Vtable.ARW.UPP'),
                              os.path.join(tmpdir, 'Vtable'))
                else:
                    raise EasyBuildError(
                        "Could not find Vtable file to use for testing ungrib")

                # run link_grib.csh script
                cmd = "%s %s*" % (os.path.join(
                    wpsdir, "link_grib.csh"), grib_file_prefix)
                run_cmd(cmd, log_all=True, simple=True)

                # run ungrib.exe
                run_wps_cmd("ungrib", mpi_cmd=False)

                # METGRID.TBL

                metgrid_dir = os.path.join(tmpdir, 'metgrid')
                mkdir(metgrid_dir)
                symlink(os.path.join(wpsdir, 'metgrid', 'METGRID.TBL.ARW'),
                        os.path.join(metgrid_dir, 'METGRID.TBL'))

                # run metgrid.exe
                run_wps_cmd('metgrid')

                # clean up
                change_dir(self.builddir)
                remove_dir(tmpdir)

            except OSError as err:
                raise EasyBuildError("Failed to run WPS test: %s", err)
    def configure_step(self):
        """Set some extra environment variables before configuring."""

        # enable verbose output if desired
        if self.cfg['verbose']:
            for x in ["CONFIGURE", "MAKEFILE"]:
                self.cfg.update('configopts', "-DTrilinos_VERBOSE_%s:BOOL=ON" % x)

        # compiler flags
        cflags = [os.getenv('CFLAGS')]
        cxxflags = [os.getenv('CXXFLAGS')]
        fflags = [os.getenv('FFLAGS')]

        ignore_cxx_seek_mpis = [toolchain.INTELMPI, toolchain.MPICH, toolchain.MPICH2, toolchain.MVAPICH2]  #@UndefinedVariable
        ignore_cxx_seek_flag = "-DMPICH_IGNORE_CXX_SEEK"
        if self.toolchain.mpi_family() in ignore_cxx_seek_mpis:
            cflags.append(ignore_cxx_seek_flag)
            cxxflags.append(ignore_cxx_seek_flag)
            fflags.append(ignore_cxx_seek_flag)

        self.cfg.update('configopts', '-DCMAKE_C_FLAGS="%s"' % ' '.join(cflags))
        self.cfg.update('configopts', '-DCMAKE_CXX_FLAGS="%s"' % ' '.join(cxxflags))
        self.cfg.update('configopts', '-DCMAKE_Fortran_FLAGS="%s"' % ' '.join(fflags))

        # OpenMP
        if self.cfg['openmp']:
            self.cfg.update('configopts', "-DTrilinos_ENABLE_OpenMP:BOOL=ON")

        # MPI
        if self.toolchain.options.get('usempi', None):
            self.cfg.update('configopts', "-DTPL_ENABLE_MPI:BOOL=ON")

        # shared libraries
        if self.cfg['shared_libs']:
            self.cfg.update('configopts', "-DBUILD_SHARED_LIBS:BOOL=ON")
        else:
            self.cfg.update('configopts', "-DBUILD_SHARED_LIBS:BOOL=OFF")

        # release or debug get_version
        if self.toolchain.options['debug']:
            self.cfg.update('configopts', "-DCMAKE_BUILD_TYPE:STRING=DEBUG")
        else:
            self.cfg.update('configopts', "-DCMAKE_BUILD_TYPE:STRING=RELEASE")

        # enable full testing
        self.cfg.update('configopts', "-DTrilinos_ENABLE_TESTS:BOOL=ON")
        self.cfg.update('configopts', "-DTrilinos_ENABLE_ALL_FORWARD_DEP_PACKAGES:BOOL=ON")

        lib_re = re.compile("^lib(.*).a$")

        # BLAS, LAPACK
        for dep in ["BLAS", "LAPACK"]:
            self.cfg.update('configopts', '-DTPL_ENABLE_%s:BOOL=ON' % dep)
            libdirs = os.getenv('%s_LIB_DIR' % dep)
            if self.toolchain.comp_family() == toolchain.GCC:  #@UndefinedVariable
                libdirs += ";%s/lib64" % get_software_root('GCC')
            self.cfg.update('configopts', '-D%s_LIBRARY_DIRS="%s"' % (dep, libdirs))
            libs = os.getenv('%s_MT_STATIC_LIBS' % dep).split(',')
            lib_names = ';'.join([lib_re.search(l).group(1) for l in libs])
            if self.toolchain.comp_family() == toolchain.GCC:  #@UndefinedVariable
                # explicitely specify static lib!
                lib_names += ";libgfortran.a"
            self.cfg.update('configopts', '-D%s_LIBRARY_NAMES="%s"' % (dep, lib_names))

        # MKL
        if get_software_root('imkl') and LooseVersion(self.version) >= LooseVersion('12.12'):
            self.cfg.update('configopts', "-DTPL_ENABLE_MKL:BOOL=ON")
            self.cfg.update('configopts', '-DMKL_LIBRARY_DIRS:PATH="%s/lib/intel64"' % os.getenv('MKLROOT'))
            self.cfg.update('configopts', '-DMKL_INCLUDE_DIRS:PATH="%s/include"' % os.getenv('MKLROOT'))

        # UMFPACK is part of SuiteSparse
        suitesparse = get_software_root('SuiteSparse')
        if suitesparse:
            self.cfg.update('configopts', "-DTPL_ENABLE_UMFPACK:BOOL=ON")
            incdirs, libdirs, libnames = [], [], []
            for lib in ["UMFPACK", "CHOLMOD", "COLAMD", "AMD", "CCOLAMD", "CAMD"]:
                incdirs.append(os.path.join(suitesparse, lib, "Include"))
                libdirs.append(os.path.join(suitesparse, lib, "Lib"))
                libnames.append(lib.lower())

            # add SuiteSparse config lib, it is in recent versions of suitesparse
            libdirs.append(os.path.join(suitesparse, 'SuiteSparse_config'))
            libnames.append('suitesparseconfig')
            # because of "SuiteSparse_config.c:function SuiteSparse_tic: error: undefined reference to 'clock_gettime'"
            libnames.append('rt')

            # required to resolve METIS symbols in SuiteSparse's libcholmod.a
            # doesn't need to be full location, probably because it can be found via $LIBRARY_PATH
            # not easy to know whether it should come from METIS or ParMETIS...
            # see https://answers.launchpad.net/dorsal/+question/223167
            libnames.append('libmetis.a')

            self.cfg.update('configopts', '-DUMFPACK_INCLUDE_DIRS:PATH="%s"' % ';'.join(incdirs))
            self.cfg.update('configopts', '-DUMFPACK_LIBRARY_DIRS:PATH="%s"' % ';'.join(libdirs))
            self.cfg.update('configopts', '-DUMFPACK_LIBRARY_NAMES:STRING="%s"' % ';'.join(libnames))

        # BLACS
        if get_software_root('BLACS'):
            self.cfg.update('configopts', "-DTPL_ENABLE_BLACS:BOOL=ON")
            self.cfg.update('configopts', '-DBLACS_INCLUDE_DIRS:PATH="%s"' % os.getenv('BLACS_INC_DIR'))
            self.cfg.update('configopts', '-DBLACS_LIBRARY_DIRS:PATH="%s"' % os.getenv('BLACS_LIB_DIR'))
            blacs_lib_names = os.getenv('BLACS_STATIC_LIBS').split(',')
            blacs_lib_names = [lib_re.search(x).group(1) for x in blacs_lib_names]
            self.cfg.update('configopts', '-DBLACS_LIBRARY_NAMES:STRING="%s"' % (';'.join(blacs_lib_names)))

        # ScaLAPACK
        if get_software_root('ScaLAPACK'):
            self.cfg.update('configopts', "-DTPL_ENABLE_SCALAPACK:BOOL=ON")
            self.cfg.update('configopts', '-DSCALAPACK_INCLUDE_DIRS:PATH="%s"' % os.getenv('SCALAPACK_INC_DIR'))
            self.cfg.update('configopts', '-DSCALAPACK_LIBRARY_DIRS:PATH="%s;%s"' % (os.getenv('SCALAPACK_LIB_DIR'),
                                                                                    os.getenv('BLACS_LIB_DIR')))
        # PETSc
        petsc = get_software_root('PETSc')
        if petsc:
            self.cfg.update('configopts', "-DTPL_ENABLE_PETSC:BOOL=ON")
            incdirs = [os.path.join(petsc, "include")]
            self.cfg.update('configopts', '-DPETSC_INCLUDE_DIRS:PATH="%s"' % ';'.join(incdirs))
            petsc_libdirs = [
                             os.path.join(petsc, "lib"),
                             os.path.join(suitesparse, "UMFPACK", "Lib"),
                             os.path.join(suitesparse, "CHOLMOD", "Lib"),
                             os.path.join(suitesparse, "COLAMD", "Lib"),
                             os.path.join(suitesparse, "AMD", "Lib"),
                             os.getenv('FFTW_LIB_DIR'),
                             os.path.join(get_software_root('ParMETIS'), "Lib")
                             ]
            self.cfg.update('configopts', '-DPETSC_LIBRARY_DIRS:PATH="%s"' % ';'.join(petsc_libdirs))
            petsc_libnames = ["petsc", "umfpack", "cholmod", "colamd", "amd", "parmetis", "metis"]
            petsc_libnames += [lib_re.search(x).group(1) for x in os.getenv('FFTW_STATIC_LIBS').split(',')]
            self.cfg.update('configopts', '-DPETSC_LIBRARY_NAMES:STRING="%s"' % ';'.join(petsc_libnames))

        # other Third-Party Libraries (TPLs)
        deps = self.cfg.dependencies()
        builddeps = self.cfg.builddependencies() + ["SuiteSparse"]
        deps = [dep['name'] for dep in deps if not dep['name'] in builddeps]
        for dep in deps:
            deproot = get_software_root(dep)
            if deproot:
                depmap = {
                          'SCOTCH': 'Scotch',
                          }
                dep = depmap.get(dep, dep)
                self.cfg.update('configopts', "-DTPL_ENABLE_%s:BOOL=ON" % dep)
                incdir = os.path.join(deproot, "include")
                self.cfg.update('configopts', '-D%s_INCLUDE_DIRS:PATH="%s"' % (dep, incdir))
                libdir = os.path.join(deproot, "lib")
                self.cfg.update('configopts', '-D%s_LIBRARY_DIRS:PATH="%s"' % (dep, libdir))

        # extensions_step
        if self.cfg['all_exts']:
            self.cfg.update('configopts', "-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON")

        else:
            for ext in self.cfg['exts_list']:
                self.cfg.update('configopts', "-DTrilinos_ENABLE_%s=ON" % ext)

        # packages to skip
        skip_exts = self.cfg['skip_exts']
        if skip_exts:
            for ext in skip_exts:
                self.cfg.update('configopts', "-DTrilinos_ENABLE_%s:BOOL=OFF" % ext)

        # building in source dir not supported
        # + if the build directory is a long path, problems like "Argument list too long" may occur
        # cfr. https://github.com/trilinos/Trilinos/issues/2434
        # so, try to create build directory with shorter path length to build in
        salt = ''.join(random.choice(string.letters) for _ in range(5))
        self.short_start_dir = os.path.join(build_path(), self.name + '-' + salt)
        if os.path.exists(self.short_start_dir):
            raise EasyBuildError("Short start directory %s for Trilinos already exists?!", self.short_start_dir)

        self.log.info("Length of path to original start directory: %s", len(self.start_dir))
        self.log.info("Short start directory: %s (length: %d)", self.short_start_dir, len(self.short_start_dir))

        mkdir(self.short_start_dir)
        short_src_dir = os.path.join(self.short_start_dir, 'src')
        symlink(self.start_dir, short_src_dir)
        short_build_dir = os.path.join(self.short_start_dir, 'obj')
        obj_dir = os.path.join(self.builddir, 'obj')
        mkdir(obj_dir)
        symlink(obj_dir, short_build_dir)

        # configure using cmake
        super(EB_Trilinos, self).configure_step(srcdir=short_src_dir, builddir=short_build_dir)
示例#44
0
    def test_check_linked_shared_libs(self):
        """Test for check_linked_shared_libs function."""

        txt_path = os.path.join(self.test_prefix, 'test.txt')
        write_file(txt_path, "some text")

        broken_symlink_path = os.path.join(self.test_prefix, 'broken_symlink')
        symlink('/doesnotexist', broken_symlink_path, use_abspath_source=False)

        # result is always None for anything other than dynamically linked binaries or shared libraries
        self.assertEqual(check_linked_shared_libs(self.test_prefix), None)
        self.assertEqual(check_linked_shared_libs(txt_path), None)
        self.assertEqual(check_linked_shared_libs(broken_symlink_path), None)

        bin_ls_path = which('ls')

        os_type = get_os_type()
        if os_type == LINUX:
            out, _ = run_cmd("ldd %s" % bin_ls_path)
        elif os_type == DARWIN:
            out, _ = run_cmd("otool -L %s" % bin_ls_path)
        else:
            raise EasyBuildError("Unknown OS type: %s" % os_type)

        shlib_ext = get_shared_lib_ext()
        lib_path_regex = re.compile(
            r'(?P<lib_path>[^\s]*/lib[^ ]+\.%s[^ ]*)' % shlib_ext, re.M)
        lib_path = lib_path_regex.search(out).group(1)

        test_pattern_named_args = [
            # if no patterns are specified, result is always True
            {},
            {
                'required_patterns': ['/lib', shlib_ext]
            },
            {
                'banned_patterns': ['this_pattern_should_not_match']
            },
            {
                'required_patterns': ['/lib', shlib_ext],
                'banned_patterns': ['weirdstuff']
            },
        ]
        for pattern_named_args in test_pattern_named_args:
            # result is always None for anything other than dynamically linked binaries or shared libraries
            self.assertEqual(
                check_linked_shared_libs(self.test_prefix,
                                         **pattern_named_args), None)
            self.assertEqual(
                check_linked_shared_libs(txt_path, **pattern_named_args), None)
            self.assertEqual(
                check_linked_shared_libs(broken_symlink_path,
                                         **pattern_named_args), None)
            for path in (bin_ls_path, lib_path):
                error_msg = "Check on linked libs should pass for %s with %s" % (
                    path, pattern_named_args)
                self.assertTrue(
                    check_linked_shared_libs(path, **pattern_named_args),
                    error_msg)

        # also test with input that should result in failing check
        test_pattern_named_args = [
            {
                'required_patterns': ['this_pattern_will_not_match']
            },
            {
                'banned_patterns': ['/lib']
            },
            {
                'required_patterns': ['weirdstuff'],
                'banned_patterns': ['/lib', shlib_ext]
            },
        ]
        for pattern_named_args in test_pattern_named_args:
            # result is always None for anything other than dynamically linked binaries or shared libraries
            self.assertEqual(
                check_linked_shared_libs(self.test_prefix,
                                         **pattern_named_args), None)
            self.assertEqual(
                check_linked_shared_libs(txt_path, **pattern_named_args), None)
            self.assertEqual(
                check_linked_shared_libs(broken_symlink_path,
                                         **pattern_named_args), None)
            for path in (bin_ls_path, lib_path):
                error_msg = "Check on linked libs should fail for %s with %s" % (
                    path, pattern_named_args)
                self.assertFalse(
                    check_linked_shared_libs(path, **pattern_named_args),
                    error_msg)
示例#45
0
    def configure_step(self):
        """Custom configuration procedure for Molpro: use 'configure -batch'."""

        if not os.path.isfile(self.license_token):
            if self.cfg['license_file'] is not None and os.path.isfile(self.cfg['license_file']):
                # put symlink in place to specified license file in $HOME/.molpro/token
                # other approaches (like defining $MOLPRO_KEY) don't seem to work
                self.cleanup_token_symlink = True
                mkdir(os.path.dirname(self.license_token))
                symlink(self.cfg['license_file'], self.license_token)
                self.log.debug("Symlinked %s to %s", self.cfg['license_file'], self.license_token)
            else:
                self.log.warning("No licence token found at either {0} or via 'license_file'".format(self.license_token))
        
        # Only do the rest of the configuration if we're building from source 
        if not self.cfg['precompiled_binaries']:
            # installation prefix
            self.cfg.update('configopts', "-prefix %s" % self.installdir)

            # compilers

            # compilers & MPI
            if self.toolchain.options.get('usempi', None):
                self.cfg.update('configopts', "-%s -%s" % (os.environ['CC_SEQ'], os.environ['F90_SEQ']))
                if 'MPI_INC_DIR' in os.environ:
                    self.cfg.update('configopts', "-mpp -mppbase %s" % os.environ['MPI_INC_DIR'])
                else:
                    raise EasyBuildError("$MPI_INC_DIR not defined")
            else:
                self.cfg.update('configopts', "-%s -%s" % (os.environ['CC'], os.environ['F90']))

            # BLAS/LAPACK
            if 'BLAS_LIB_DIR' in os.environ:
                self.cfg.update('configopts', "-blas -blaspath %s" % os.environ['BLAS_LIB_DIR'])
            else:
                raise EasyBuildError("$BLAS_LIB_DIR not defined")

            if 'LAPACK_LIB_DIR' in os.environ:
                self.cfg.update('configopts', "-lapack -lapackpath %s" % os.environ['LAPACK_LIB_DIR'])
            else:
                raise EasyBuildError("$LAPACK_LIB_DIR not defined")

            # 32 vs 64 bit
            if self.toolchain.options.get('32bit', None):
                self.cfg.update('configopts', '-i4')
            else:
                self.cfg.update('configopts', '-i8')

            run_cmd("./configure -batch %s" % self.cfg['configopts'])

            cfgfile = os.path.join(self.cfg['start_dir'], 'CONFIG')
            cfgtxt = read_file(cfgfile)

            # determine original LAUNCHER value
            launcher_regex = re.compile('^LAUNCHER=(.*)$', re.M)
            res = launcher_regex.search(cfgtxt)
            if res:
                self.orig_launcher = res.group(1)
                self.log.debug("Found original value for LAUNCHER: %s", self.orig_launcher)
            else:
                raise EasyBuildError("Failed to determine LAUNCHER value")

            # determine full installation prefix
            prefix_regex = re.compile('^PREFIX=(.*)$', re.M)
            res = prefix_regex.search(cfgtxt)
            if res:
                self.full_prefix = res.group(1)
                self.log.debug("Found full installation prefix: %s", self.full_prefix)
            else:
                raise EasyBuildError("Failed to determine full installation prefix")

            # determine MPI launcher command that can be used during build/test
            # obtain command with specific number of cores (required by mpi_cmd_for), then replace that number with '%n'
            launcher = self.toolchain.mpi_cmd_for('%x', self.cfg['parallel'])
            launcher = launcher.replace(' %s' % self.cfg['parallel'], ' %n')

            # patch CONFIG file to change LAUNCHER definition, in order to avoid having to start mpd
            apply_regex_substitutions(cfgfile, [(r"^(LAUNCHER\s*=\s*).*$", r"\1 %s" % launcher)])

            # reread CONFIG and log contents
            cfgtxt = read_file(cfgfile)
            self.log.info("Contents of CONFIG file:\n%s", cfgtxt)
示例#46
0
    def configure_step(self):
        """Custom configuration procedure for NWChem."""

        # check whether a (valid) symlink to a .nwchemrc config file exists (via a dummy file if necessary)
        # fail early if the link is not what's we expect, since running the test cases will likely fail in this case
        try:
            if os.path.exists(self.home_nwchemrc) or os.path.islink(
                    self.home_nwchemrc):
                # create a dummy file to check symlink
                if not os.path.exists(self.local_nwchemrc):
                    write_file(self.local_nwchemrc, 'dummy')

                self.log.debug(
                    "Contents of %s: %s", os.path.dirname(self.local_nwchemrc),
                    os.listdir(os.path.dirname(self.local_nwchemrc)))

                if os.path.islink(self.home_nwchemrc):
                    home_nwchemrc_target = os.readlink(self.home_nwchemrc)
                    if home_nwchemrc_target != self.local_nwchemrc:
                        raise EasyBuildError(
                            "Found %s, but it's not a symlink to %s. "
                            "Please (re)move %s while installing NWChem; it can be restored later",
                            self.home_nwchemrc, self.local_nwchemrc,
                            self.home_nwchemrc)
                # ok to remove, we'll recreate it anyway
                remove_file(self.local_nwchemrc)
        except (IOError, OSError) as err:
            raise EasyBuildError("Failed to validate %s symlink: %s",
                                 self.home_nwchemrc, err)

        # building NWChem in a long path name is an issue, so let's try to make sure we have a short one
        try:
            # NWChem insists that version is in name of build dir
            tmpdir = tempfile.mkdtemp(suffix='-%s-%s' %
                                      (self.name, self.version))
            # remove created directory, since we're not going to use it as is
            os.rmdir(tmpdir)
            # avoid having '['/']' characters in build dir name, NWChem doesn't like that
            start_dir = tmpdir.replace('[', '_').replace(']', '_')
            mkdir(os.path.dirname(start_dir), parents=True)
            symlink(self.cfg['start_dir'], start_dir)
            change_dir(start_dir)
            self.cfg['start_dir'] = start_dir
        except OSError as err:
            raise EasyBuildError(
                "Failed to symlink build dir to a shorter path name: %s", err)

        # change to actual build dir
        change_dir('src')

        nwchem_modules = self.cfg['modules']

        # set required NWChem environment variables
        env.setvar('NWCHEM_TOP', self.cfg['start_dir'])
        if len(self.cfg['start_dir']) > 64:
            # workaround for:
            # "The directory name chosen for NWCHEM_TOP is longer than the maximum allowed value of 64 characters"
            # see also https://svn.pnl.gov/svn/nwchem/trunk/src/util/util_nwchem_srcdir.F
            self.setvar_env_makeopt('NWCHEM_LONG_PATHS', 'Y')

        env.setvar('NWCHEM_TARGET', self.cfg['target'])

        garoot = get_software_root('GlobalArrays')
        if garoot:
            self.setvar_env_makeopt('EXTERNAL_GA_PATH', garoot)
        else:
            env.setvar('MSG_COMMS', self.cfg['msg_comms'])
            env.setvar('ARMCI_NETWORK', self.cfg['armci_network'])
            if self.cfg['armci_network'] in ["OPENIB"]:
                env.setvar('IB_INCLUDE', "/usr/include")
                env.setvar('IB_LIB', "/usr/lib64")
                env.setvar('IB_LIB_NAME', "-libumad -libverbs -lpthread")

        if 'python' in self.cfg['modules']:
            python_root = get_software_root('Python')
            if not python_root:
                raise EasyBuildError(
                    "Python module not loaded, you should add Python as a dependency."
                )
            env.setvar('PYTHONHOME', python_root)
            pyver = '.'.join(get_software_version('Python').split('.')[0:2])
            env.setvar('PYTHONVERSION', pyver)
            # if libreadline is loaded, assume it was a dependency for Python
            # pass -lreadline to avoid linking issues (libpython2.7.a doesn't include readline symbols)
            libreadline = get_software_root('libreadline')
            if libreadline:
                libreadline_libdir = os.path.join(
                    libreadline, get_software_libdir('libreadline'))
                ncurses = get_software_root('ncurses')
                if not ncurses:
                    raise EasyBuildError(
                        "ncurses is not loaded, but required to link with libreadline"
                    )
                ncurses_libdir = os.path.join(ncurses,
                                              get_software_libdir('ncurses'))
                readline_libs = ' '.join([
                    os.path.join(libreadline_libdir, 'libreadline.a'),
                    os.path.join(ncurses_libdir, 'libcurses.a'),
                ])
                extra_libs = os.environ.get('EXTRA_LIBS', '')
                env.setvar('EXTRA_LIBS', ' '.join([extra_libs, readline_libs]))

        env.setvar('LARGE_FILES', 'TRUE')
        env.setvar('USE_NOFSCHECK', 'TRUE')
        env.setvar('CCSDTLR', 'y')  # enable CCSDTLR
        env.setvar(
            'CCSDTQ',
            'y')  # enable CCSDTQ (compilation is long, executable is big)

        if LooseVersion(self.version) >= LooseVersion("6.2"):
            env.setvar('MRCC_METHODS',
                       'y')  # enable multireference coupled cluster capability

        if LooseVersion(self.version) >= LooseVersion("6.5"):
            env.setvar(
                'EACCSD', 'y'
            )  # enable EOM electron-attachemnt coupled cluster capability
            env.setvar(
                'IPCCSD', 'y'
            )  # enable EOM ionization-potential coupled cluster capability
            env.setvar(
                'USE_NOIO',
                'TRUE')  # avoid doing I/O for the ddscf, mp2 and ccsd modules

        for var in ['USE_MPI', 'USE_MPIF', 'USE_MPIF4']:
            env.setvar(var, 'y')
        for var in ['CC', 'CXX', 'F90']:
            env.setvar('MPI_%s' % var, os.getenv('MPI%s' % var))

        libmpi = ""

        # for NWChem 6.6 and newer, $LIBMPI & co should no longer be
        # set, the correct values are determined by the NWChem build
        # procedure automatically, see
        # http://www.nwchem-sw.org/index.php/Compiling_NWChem#MPI_variables
        if LooseVersion(self.version) < LooseVersion("6.6"):
            env.setvar('MPI_LOC', os.path.dirname(os.getenv('MPI_INC_DIR')))
            env.setvar('MPI_LIB', os.getenv('MPI_LIB_DIR'))
            env.setvar('MPI_INCLUDE', os.getenv('MPI_INC_DIR'))

            mpi_family = self.toolchain.mpi_family()
            if mpi_family in toolchain.OPENMPI:
                ompi_ver = get_software_version('OpenMPI')
                if LooseVersion(ompi_ver) < LooseVersion("1.10"):
                    if LooseVersion(ompi_ver) < LooseVersion("1.8"):
                        libmpi = "-lmpi_f90 -lmpi_f77 -lmpi -ldl -Wl,--export-dynamic -lnsl -lutil"
                    else:
                        libmpi = "-lmpi_usempi -lmpi_mpifh -lmpi"
                else:
                    libmpi = "-lmpi_usempif08 -lmpi_usempi_ignore_tkr -lmpi_mpifh -lmpi"
            elif mpi_family in [toolchain.INTELMPI]:
                if self.cfg['armci_network'] in ["MPI-MT"]:
                    libmpi = "-lmpigf -lmpigi -lmpi_ilp64 -lmpi_mt"
                else:
                    libmpi = "-lmpigf -lmpigi -lmpi_ilp64 -lmpi"
            elif mpi_family in [toolchain.MPICH, toolchain.MPICH2]:
                libmpi = "-lmpichf90 -lmpich -lopa -lmpl -lrt -lpthread"
            else:
                raise EasyBuildError("Don't know how to set LIBMPI for %s",
                                     mpi_family)
            env.setvar('LIBMPI', libmpi)

        if not garoot:
            if self.cfg['armci_network'] in ["OPENIB"]:
                libmpi += " -libumad -libverbs -lpthread"

        # compiler optimization flags: set environment variables _and_ add them to list of make options
        self.setvar_env_makeopt('COPTIMIZE', os.getenv('CFLAGS'))
        self.setvar_env_makeopt('FOPTIMIZE', os.getenv('FFLAGS'))

        # BLAS and ScaLAPACK
        mpi_lib_dirs = ' '.join('-L' + d
                                for d in os.getenv('MPI_LIB_DIR').split())
        self.setvar_env_makeopt(
            'BLASOPT', ' '.join([
                os.getenv('LDFLAGS'), mpi_lib_dirs,
                os.getenv('LIBSCALAPACK_MT'), libmpi
            ]))

        # Setting LAPACK_LIB is required from 7.0.0 onwards.
        self.setvar_env_makeopt('LAPACK_LIB', os.getenv('LIBLAPACK'))

        self.setvar_env_makeopt(
            'SCALAPACK',
            '%s %s' % (os.getenv('LDFLAGS'), os.getenv('LIBSCALAPACK_MT')))
        if self.toolchain.options['i8']:
            size = 8
            self.setvar_env_makeopt('USE_SCALAPACK_I8', 'y')
            self.cfg.update('lib_defines', '-DSCALAPACK_I8')
        else:
            self.setvar_env_makeopt('HAS_BLAS', 'yes')
            self.setvar_env_makeopt('USE_SCALAPACK', 'y')
            size = 4

        # set sizes
        for lib in ['BLAS', 'LAPACK', 'SCALAPACK']:
            self.setvar_env_makeopt('%s_SIZE' % lib, str(size))

        env.setvar('NWCHEM_MODULES', nwchem_modules)

        env.setvar('LIB_DEFINES', self.cfg['lib_defines'])

        # clean first (why not)
        run_cmd("make clean", simple=True, log_all=True, log_ok=True)

        # configure build
        cmd = "make %s nwchem_config" % self.cfg['buildopts']
        run_cmd(cmd, simple=True, log_all=True, log_ok=True, log_output=True)
示例#47
0
    def test_cases_step(self):
        """Run provided list of test cases, or provided examples is no test cases were specified."""

        # run all examples if no test cases were specified
        # order and grouping is important for some of these tests (e.g., [o]h3tr*
        # Some of the examples are deleted
        # missing md parameter files: dna.nw, mache.nw, 18c6NaK.nw, membrane.nw, sdm.nw
        # method not implemented (unknown thory) or keyword not found: triplet.nw, C2H6.nw, pspw_MgO.nw
        #                                                              ccsdt_polar_small.nw, CG.nw
        # no convergence: diamond.nw
        # Too much memory required: ccsd_polar_big.nw
        if isinstance(self.cfg['tests'], bool):
            examples = [
                ('qmd', ['3carbo_dft.nw', '3carbo.nw', 'h2o_scf.nw']),
                ('pspw',
                 ['C2.nw', 'C6.nw', 'Carbene.nw', 'Na16.nw', 'NaCl.nw']),
                ('tcepolar', ['ccsd_polar_small.nw']),
                ('dirdyvtst/h3', ['h3tr1.nw', 'h3tr2.nw']),
                ('dirdyvtst/h3', ['h3tr3.nw']), ('dirdyvtst/h3', ['h3tr4.nw']),
                ('dirdyvtst/h3', ['h3tr5.nw']),
                ('dirdyvtst/oh3', ['oh3tr1.nw', 'oh3tr2.nw']),
                ('dirdyvtst/oh3', ['oh3tr3.nw']),
                ('dirdyvtst/oh3', ['oh3tr4.nw']),
                ('dirdyvtst/oh3', ['oh3tr5.nw']),
                ('pspw/session1', [
                    'band.nw', 'si4.linear.nw', 'si4.rhombus.nw',
                    'S2-drift.nw', 'silicon.nw', 'S2.nw', 'si4.rectangle.nw'
                ]), ('md/myo', ['myo.nw']), ('md/nak', ['NaK.nw']),
                ('md/crown', ['crown.nw']), ('md/hrc', ['hrc.nw']),
                ('md/benzene', ['benzene.nw'])
            ]

            self.cfg['tests'] = [(os.path.join(self.examples_dir, d), l)
                                 for (d, l) in examples]
            self.log.info("List of examples to be run as test cases: %s" %
                          self.cfg['tests'])

        try:
            # symlink $HOME/.nwchemrc to local copy of default nwchemrc
            default_nwchemrc = os.path.join(self.installdir, 'data',
                                            'default.nwchemrc')

            # make a local copy of the default .nwchemrc file at a fixed path, so we can symlink to it
            # this makes sure that multiple parallel builds can reuse the same symlink, even for different builds
            # there is apparently no way to point NWChem to a particular config file other that $HOME/.nwchemrc
            try:
                local_nwchemrc_dir = os.path.dirname(self.local_nwchemrc)
                if not os.path.exists(local_nwchemrc_dir):
                    os.makedirs(local_nwchemrc_dir)
                shutil.copy2(default_nwchemrc, self.local_nwchemrc)

                # only try to create symlink if it's not there yet
                # we've verified earlier that the symlink is what we expect it to be if it's there
                if not os.path.islink(self.home_nwchemrc):
                    symlink(self.local_nwchemrc, self.home_nwchemrc)
            except OSError as err:
                raise EasyBuildError("Failed to symlink %s to %s: %s",
                                     self.home_nwchemrc, self.local_nwchemrc,
                                     err)

            # run tests, keep track of fail ratio
            cwd = os.getcwd()

            fail = 0.0
            tot = 0.0

            success_regexp = re.compile(r"Total times\s*cpu:.*wall:.*")

            test_cases_logfn = os.path.join(self.installdir, config.log_path(),
                                            'test_cases.log')
            test_cases_log = open(test_cases_logfn, "w")

            for (testdir, tests) in self.cfg['tests']:

                # run test in a temporary dir
                tmpdir = tempfile.mkdtemp(prefix='nwchem_test_')
                change_dir(tmpdir)

                # copy all files in test case dir
                for item in os.listdir(testdir):
                    test_file = os.path.join(testdir, item)
                    if os.path.isfile(test_file):
                        self.log.debug("Copying %s to %s" %
                                       (test_file, tmpdir))
                        shutil.copy2(test_file, tmpdir)

                # run tests
                for testx in tests:
                    cmd = "nwchem %s" % testx
                    msg = "Running test '%s' (from %s) in %s..." % (
                        cmd, testdir, tmpdir)
                    self.log.info(msg)
                    test_cases_log.write("\n%s\n" % msg)
                    (out, ec) = run_cmd(cmd,
                                        simple=False,
                                        log_all=False,
                                        log_ok=False,
                                        log_output=True)

                    # check exit code and output
                    if ec:
                        msg = "Test %s failed (exit code: %s)!" % (testx, ec)
                        self.log.warning(msg)
                        test_cases_log.write('FAIL: %s' % msg)
                        fail += 1
                    else:
                        if success_regexp.search(out):
                            msg = "Test %s successful!" % testx
                            self.log.info(msg)
                            test_cases_log.write('SUCCESS: %s' % msg)
                        else:
                            msg = "No 'Total times' found for test %s (but exit code is %s)!" % (
                                testx, ec)
                            self.log.warning(msg)
                            test_cases_log.write('FAIL: %s' % msg)
                            fail += 1

                    test_cases_log.write("\nOUTPUT:\n\n%s\n\n" % out)

                    tot += 1

                # go back
                change_dir(cwd)
                shutil.rmtree(tmpdir)

            fail_ratio = fail / tot
            fail_pcnt = fail_ratio * 100

            msg = "%d of %d tests failed (%s%%)!" % (fail, tot, fail_pcnt)
            self.log.info(msg)
            test_cases_log.write('\n\nSUMMARY: %s' % msg)

            test_cases_log.close()
            self.log.info("Log for test cases saved at %s" % test_cases_logfn)

            if fail_ratio > self.cfg['max_fail_ratio']:
                max_fail_pcnt = self.cfg['max_fail_ratio'] * 100
                raise EasyBuildError(
                    "Over %s%% of test cases failed, assuming broken build.",
                    max_fail_pcnt)

            # cleanup
            try:
                shutil.rmtree(self.examples_dir)
                shutil.rmtree(local_nwchemrc_dir)
            except OSError as err:
                raise EasyBuildError("Cleanup failed: %s", err)

            # set post msg w.r.t. cleaning up $HOME/.nwchemrc symlink
            self.postmsg += "\nRemember to clean up %s after all NWChem builds are finished." % self.home_nwchemrc

        except OSError as err:
            raise EasyBuildError("Failed to run test cases: %s", err)
示例#48
0
    def configure_step(self):
        """Set some extra environment variables before configuring."""

        # enable verbose output if desired
        if self.cfg['verbose']:
            for x in ["CONFIGURE", "MAKEFILE"]:
                self.cfg.update('configopts', "-DTrilinos_VERBOSE_%s:BOOL=ON" % x)

        # compiler flags
        cflags = [os.getenv('CFLAGS')]
        cxxflags = [os.getenv('CXXFLAGS')]
        fflags = [os.getenv('FFLAGS')]

        ignore_cxx_seek_mpis = [toolchain.INTELMPI, toolchain.MPICH, toolchain.MPICH2, toolchain.MVAPICH2]  #@UndefinedVariable
        ignore_cxx_seek_flag = "-DMPICH_IGNORE_CXX_SEEK"
        if self.toolchain.mpi_family() in ignore_cxx_seek_mpis:
            cflags.append(ignore_cxx_seek_flag)
            cxxflags.append(ignore_cxx_seek_flag)
            fflags.append(ignore_cxx_seek_flag)

        self.cfg.update('configopts', '-DCMAKE_C_FLAGS="%s"' % ' '.join(cflags))
        self.cfg.update('configopts', '-DCMAKE_CXX_FLAGS="%s"' % ' '.join(cxxflags))
        self.cfg.update('configopts', '-DCMAKE_Fortran_FLAGS="%s"' % ' '.join(fflags))

        # OpenMP
        if self.cfg['openmp']:
            self.cfg.update('configopts', "-DTrilinos_ENABLE_OpenMP:BOOL=ON")

        # MPI
        if self.toolchain.options.get('usempi', None):
            self.cfg.update('configopts', "-DTPL_ENABLE_MPI:BOOL=ON")

        # shared libraries
        if self.cfg['shared_libs']:
            self.cfg.update('configopts', "-DBUILD_SHARED_LIBS:BOOL=ON")
        else:
            self.cfg.update('configopts', "-DBUILD_SHARED_LIBS:BOOL=OFF")

        # release or debug get_version
        if self.toolchain.options['debug']:
            self.cfg.update('configopts', "-DCMAKE_BUILD_TYPE:STRING=DEBUG")
        else:
            self.cfg.update('configopts', "-DCMAKE_BUILD_TYPE:STRING=RELEASE")

        # enable full testing
        self.cfg.update('configopts', "-DTrilinos_ENABLE_TESTS:BOOL=ON")
        self.cfg.update('configopts', "-DTrilinos_ENABLE_ALL_FORWARD_DEP_PACKAGES:BOOL=ON")

        lib_re = re.compile("^lib(.*).a$")

        # BLAS, LAPACK
        for dep in ["BLAS", "LAPACK"]:
            self.cfg.update('configopts', '-DTPL_ENABLE_%s:BOOL=ON' % dep)
            libdirs = os.getenv('%s_LIB_DIR' % dep)
            if self.toolchain.comp_family() == toolchain.GCC:  #@UndefinedVariable
                libdirs += ";%s/lib64" % get_software_root('GCC')
            self.cfg.update('configopts', '-D%s_LIBRARY_DIRS="%s"' % (dep, libdirs))
            libs = os.getenv('%s_MT_STATIC_LIBS' % dep).split(',')
            lib_names = ';'.join([lib_re.search(l).group(1) for l in libs])
            if self.toolchain.comp_family() == toolchain.GCC:  #@UndefinedVariable
                # explicitely specify static lib!
                lib_names += ";libgfortran.a"
            self.cfg.update('configopts', '-D%s_LIBRARY_NAMES="%s"' % (dep, lib_names))

        # MKL
        if get_software_root('imkl') and LooseVersion(self.version) >= LooseVersion('12.12'):
            self.cfg.update('configopts', "-DTPL_ENABLE_MKL:BOOL=ON")
            self.cfg.update('configopts', '-DMKL_LIBRARY_DIRS:PATH="%s/lib/intel64"' % os.getenv('MKLROOT'))
            self.cfg.update('configopts', '-DMKL_INCLUDE_DIRS:PATH="%s/include"' % os.getenv('MKLROOT'))

        # UMFPACK is part of SuiteSparse
        suitesparse = get_software_root('SuiteSparse')
        if suitesparse:
            self.cfg.update('configopts', "-DTPL_ENABLE_UMFPACK:BOOL=ON")
            incdirs, libdirs, libnames = [], [], []
            for lib in ["UMFPACK", "CHOLMOD", "COLAMD", "AMD", "CCOLAMD", "CAMD"]:
                incdirs.append(os.path.join(suitesparse, lib, "Include"))
                libdirs.append(os.path.join(suitesparse, lib, "Lib"))
                libnames.append(lib.lower())

            # add SuiteSparse config lib, it is in recent versions of suitesparse
            libdirs.append(os.path.join(suitesparse, 'SuiteSparse_config'))
            libnames.append('suitesparseconfig')
            # because of "SuiteSparse_config.c:function SuiteSparse_tic: error: undefined reference to 'clock_gettime'"
            libnames.append('rt')

            # required to resolve METIS symbols in SuiteSparse's libcholmod.a
            # doesn't need to be full location, probably because it can be found via $LIBRARY_PATH
            # not easy to know whether it should come from METIS or ParMETIS...
            # see https://answers.launchpad.net/dorsal/+question/223167
            libnames.append('libmetis.a')

            self.cfg.update('configopts', '-DUMFPACK_INCLUDE_DIRS:PATH="%s"' % ';'.join(incdirs))
            self.cfg.update('configopts', '-DUMFPACK_LIBRARY_DIRS:PATH="%s"' % ';'.join(libdirs))
            self.cfg.update('configopts', '-DUMFPACK_LIBRARY_NAMES:STRING="%s"' % ';'.join(libnames))

        # BLACS
        if get_software_root('BLACS'):
            self.cfg.update('configopts', "-DTPL_ENABLE_BLACS:BOOL=ON")
            self.cfg.update('configopts', '-DBLACS_INCLUDE_DIRS:PATH="%s"' % os.getenv('BLACS_INC_DIR'))
            self.cfg.update('configopts', '-DBLACS_LIBRARY_DIRS:PATH="%s"' % os.getenv('BLACS_LIB_DIR'))
            blacs_lib_names = os.getenv('BLACS_STATIC_LIBS').split(',')
            blacs_lib_names = [lib_re.search(x).group(1) for x in blacs_lib_names]
            self.cfg.update('configopts', '-DBLACS_LIBRARY_NAMES:STRING="%s"' % (';'.join(blacs_lib_names)))

        # ScaLAPACK
        if get_software_root('ScaLAPACK'):
            self.cfg.update('configopts', "-DTPL_ENABLE_SCALAPACK:BOOL=ON")
            self.cfg.update('configopts', '-DSCALAPACK_INCLUDE_DIRS:PATH="%s"' % os.getenv('SCALAPACK_INC_DIR'))
            self.cfg.update('configopts', '-DSCALAPACK_LIBRARY_DIRS:PATH="%s;%s"' % (os.getenv('SCALAPACK_LIB_DIR'),
                                                                                    os.getenv('BLACS_LIB_DIR')))
        # PETSc
        petsc = get_software_root('PETSc')
        if petsc:
            self.cfg.update('configopts', "-DTPL_ENABLE_PETSC:BOOL=ON")
            incdirs = [os.path.join(petsc, "include")]
            self.cfg.update('configopts', '-DPETSC_INCLUDE_DIRS:PATH="%s"' % ';'.join(incdirs))
            petsc_libdirs = [
                             os.path.join(petsc, "lib"),
                             os.path.join(suitesparse, "UMFPACK", "Lib"),
                             os.path.join(suitesparse, "CHOLMOD", "Lib"),
                             os.path.join(suitesparse, "COLAMD", "Lib"),
                             os.path.join(suitesparse, "AMD", "Lib"),
                             os.getenv('FFTW_LIB_DIR'),
                             os.path.join(get_software_root('ParMETIS'), "Lib")
                             ]
            self.cfg.update('configopts', '-DPETSC_LIBRARY_DIRS:PATH="%s"' % ';'.join(petsc_libdirs))
            petsc_libnames = ["petsc", "umfpack", "cholmod", "colamd", "amd", "parmetis", "metis"]
            petsc_libnames += [lib_re.search(x).group(1) for x in os.getenv('FFTW_STATIC_LIBS').split(',')]
            self.cfg.update('configopts', '-DPETSC_LIBRARY_NAMES:STRING="%s"' % ';'.join(petsc_libnames))

        # other Third-Party Libraries (TPLs)
        deps = self.cfg.dependencies()
        builddeps = self.cfg.builddependencies() + ["SuiteSparse"]
        deps = [dep['name'] for dep in deps if not dep['name'] in builddeps]
        for dep in deps:
            deproot = get_software_root(dep)
            if deproot:
                depmap = {
                          'SCOTCH': 'Scotch',
                          }
                dep = depmap.get(dep, dep)
                self.cfg.update('configopts', "-DTPL_ENABLE_%s:BOOL=ON" % dep)
                incdir = os.path.join(deproot, "include")
                self.cfg.update('configopts', '-D%s_INCLUDE_DIRS:PATH="%s"' % (dep, incdir))
                libdir = os.path.join(deproot, "lib")
                self.cfg.update('configopts', '-D%s_LIBRARY_DIRS:PATH="%s"' % (dep, libdir))

        # extensions_step
        if self.cfg['all_exts']:
            self.cfg.update('configopts', "-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON")

        else:
            for ext in self.cfg['exts_list']:
                self.cfg.update('configopts', "-DTrilinos_ENABLE_%s=ON" % ext)

        # packages to skip
        skip_exts = self.cfg['skip_exts']
        if skip_exts:
            for ext in skip_exts:
                self.cfg.update('configopts', "-DTrilinos_ENABLE_%s:BOOL=OFF" % ext)

        # building in source dir not supported
        # + if the build directory is a long path, problems like "Argument list too long" may occur
        # cfr. https://github.com/trilinos/Trilinos/issues/2434
        # so, try to create build directory with shorter path length to build in
        salt = ''.join(random.choice(string.letters) for _ in range(5))
        self.short_start_dir = os.path.join(build_path(), self.name + '-' + salt)
        if os.path.exists(self.short_start_dir):
            raise EasyBuildError("Short start directory %s for Trilinos already exists?!", self.short_start_dir)

        self.log.info("Length of path to original start directory: %s", len(self.start_dir))
        self.log.info("Short start directory: %s (length: %d)", self.short_start_dir, len(self.short_start_dir))

        mkdir(self.short_start_dir)
        short_src_dir = os.path.join(self.short_start_dir, 'src')
        symlink(self.start_dir, short_src_dir)
        short_build_dir = os.path.join(self.short_start_dir, 'obj')
        obj_dir = os.path.join(self.builddir, 'obj')
        mkdir(obj_dir)
        symlink(obj_dir, short_build_dir)

        # configure using cmake
        super(EB_Trilinos, self).configure_step(srcdir=short_src_dir, builddir=short_build_dir)
示例#49
0
    def test_step(self):
        """Run WPS test (requires large dataset to be downloaded). """

        wpsdir = None

        def run_wps_cmd(cmdname, mpi_cmd=True):
            """Run a WPS command, and check for success."""

            cmd = os.path.join(wpsdir, "%s.exe" % cmdname)

            if mpi_cmd:
                if build_option('mpi_tests'):
                    cmd = self.toolchain.mpi_cmd_for(cmd, 1)
                else:
                    self.log.info("Skipping MPI test for %s, since MPI tests are disabled", cmd)
                    return

            (out, _) = run_cmd(cmd, log_all=True, simple=False)

            re_success = re.compile("Successful completion of %s" % cmdname)
            if not re_success.search(out):
                raise EasyBuildError("%s.exe failed (pattern '%s' not found)?", cmdname, re_success.pattern)

        if self.cfg['runtest']:
            if not self.cfg['testdata']:
                raise EasyBuildError("List of URLs for testdata not provided.")

            wpsdir = os.path.join(self.builddir, self.wps_subdir)

            try:
                # create temporary directory
                tmpdir = tempfile.mkdtemp()
                change_dir(tmpdir)

                # download data
                testdata_paths = []
                for testdata in self.cfg['testdata']:
                    path = self.obtain_file(testdata)
                    if not path:
                        raise EasyBuildError("Downloading file from %s failed?", testdata)
                    testdata_paths.append(path)

                # unpack data
                for path in testdata_paths:
                    extract_file(path, tmpdir)

                namelist_file = os.path.join(tmpdir, 'namelist.wps')

                # GEOGRID

                # setup directories and files
                if LooseVersion(self.version) < LooseVersion("4.0"):
                    geog_data_dir = "geog"
                else:
                    geog_data_dir = "WPS_GEOG"
                for dir_name in os.listdir(os.path.join(tmpdir, geog_data_dir)):
                    symlink(os.path.join(tmpdir, geog_data_dir, dir_name), os.path.join(tmpdir, dir_name))

                # copy namelist.wps file and patch it for geogrid
                copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file)
                regex_subs = [(r"^(\s*geog_data_path\s*=\s*).*$", r"\1 '%s'" % tmpdir)]
                apply_regex_substitutions(namelist_file, regex_subs)

                # GEOGRID.TBL
                geogrid_dir = os.path.join(tmpdir, 'geogrid')
                mkdir(geogrid_dir)
                symlink(os.path.join(wpsdir, 'geogrid', 'GEOGRID.TBL.ARW'),
                        os.path.join(geogrid_dir, 'GEOGRID.TBL'))

                # run geogrid.exe
                run_wps_cmd("geogrid")

                # UNGRIB

                # determine start and end time stamps of grib files
                grib_file_prefix = "fnl_"
                k = len(grib_file_prefix)
                fs = [f for f in sorted(os.listdir('.')) if f.startswith(grib_file_prefix)]
                start = "%s:00:00" % fs[0][k:]
                end = "%s:00:00" % fs[-1][k:]

                # copy namelist.wps file and patch it for ungrib
                copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file)
                regex_subs = [
                    (r"^(\s*start_date\s*=\s*).*$", r"\1 '%s','%s'," % (start, start)),
                    (r"^(\s*end_date\s*=\s*).*$", r"\1 '%s','%s'," % (end, end)),
                ]
                apply_regex_substitutions(namelist_file, regex_subs)

                # copy correct Vtable
                vtable_dir = os.path.join(wpsdir, 'ungrib', 'Variable_Tables')
                if os.path.exists(os.path.join(vtable_dir, 'Vtable.ARW')):
                    copy_file(os.path.join(vtable_dir, 'Vtable.ARW'), os.path.join(tmpdir, 'Vtable'))
                elif os.path.exists(os.path.join(vtable_dir, 'Vtable.ARW.UPP')):
                    copy_file(os.path.join(vtable_dir, 'Vtable.ARW.UPP'), os.path.join(tmpdir, 'Vtable'))
                else:
                    raise EasyBuildError("Could not find Vtable file to use for testing ungrib")

                # run link_grib.csh script
                cmd = "%s %s*" % (os.path.join(wpsdir, "link_grib.csh"), grib_file_prefix)
                run_cmd(cmd, log_all=True, simple=True)

                # run ungrib.exe
                run_wps_cmd("ungrib", mpi_cmd=False)

                # METGRID.TBL

                metgrid_dir = os.path.join(tmpdir, 'metgrid')
                mkdir(metgrid_dir)
                symlink(os.path.join(wpsdir, 'metgrid', 'METGRID.TBL.ARW'),
                        os.path.join(metgrid_dir, 'METGRID.TBL'))

                # run metgrid.exe
                run_wps_cmd('metgrid')

                # clean up
                change_dir(self.builddir)
                remove_dir(tmpdir)

            except OSError as err:
                raise EasyBuildError("Failed to run WPS test: %s", err)
示例#50
0
    def test_step(self):
        """Build and run tests included in the WRF distribution."""
        if self.cfg['runtest']:

            if self.cfg['buildtype'] in self.parallel_build_types and not build_option('mpi_tests'):
                self.log.info("Skipping testing of WRF with build type '%s' since MPI testing is disabled",
                              self.cfg['buildtype'])
                return

            # get list of WRF test cases
            self.testcases = []
            if os.path.exists('test'):
                self.testcases = os.listdir('test')

            elif not self.dry_run:
                raise EasyBuildError("Test directory not found, failed to determine list of test cases")

            # exclude 2d testcases in parallel WRF builds
            if self.cfg['buildtype'] in self.parallel_build_types:
                self.testcases = [test for test in self.testcases if '2d_' not in test]

            # exclude real testcases
            self.testcases = [test for test in self.testcases if not test.endswith("_real")]

            self.log.debug("intermediate list of testcases: %s" % self.testcases)

            # exclude tests that should not be run
            for test in ["em_esmf_exp", "em_scm_xy", "nmm_tropical_cyclone"]:
                if test in self.testcases:
                    self.testcases.remove(test)

            # some tests hang when WRF is built with Intel compilers
            if self.comp_fam == toolchain.INTELCOMP:  # @UndefinedVariable
                for test in ["em_heldsuarez"]:
                    if test in self.testcases:
                        self.testcases.remove(test)

            # determine parallel setting (1/2 of available processors + 1)
            n = self.cfg['parallel'] / 2 + 1

            # prepare run command

            # stack limit needs to be set to unlimited for WRF to work well
            if self.cfg['buildtype'] in self.parallel_build_types:
                test_cmd = "ulimit -s unlimited && %s && %s" % (self.toolchain.mpi_cmd_for("./ideal.exe", 1),
                                                                self.toolchain.mpi_cmd_for("./wrf.exe", n))
            else:
                test_cmd = "ulimit -s unlimited && ./ideal.exe && ./wrf.exe >rsl.error.0000 2>&1"

            def run_test():
                """Run a single test and check for success."""

                # regex to check for successful test run
                re_success = re.compile("SUCCESS COMPLETE WRF")

                # run test
                run_cmd(test_cmd, log_all=True, simple=True)

                # check for success
                txt = read_file('rsl.error.0000')
                if re_success.search(txt):
                    self.log.info("Test %s ran successfully." % test)

                else:
                    raise EasyBuildError("Test %s failed, pattern '%s' not found.", test, re_success.pattern)

                # clean up stuff that gets in the way
                fn_prefs = ["wrfinput_", "namelist.output", "wrfout_", "rsl.out.", "rsl.error."]
                for filename in os.listdir('.'):
                    for pref in fn_prefs:
                        if filename.startswith(pref):
                            remove_file(filename)
                            self.log.debug("Cleaned up file %s", filename)

            # build and run each test case individually
            for test in self.testcases:

                self.log.debug("Building and running test %s" % test)

                # build and install
                cmd = "tcsh ./compile %s %s" % (self.par, test)
                run_cmd(cmd, log_all=True, simple=True)

                # run test
                try:
                    prev_dir = change_dir('run')

                    if test in ["em_fire"]:

                        # handle tests with subtests seperately
                        testdir = os.path.join("..", "test", test)

                        for subtest in [x for x in os.listdir(testdir) if os.path.isdir(x)]:

                            subtestdir = os.path.join(testdir, subtest)

                            # link required files
                            for filename in os.listdir(subtestdir):
                                if os.path.exists(filename):
                                    remove_file(filename)
                                symlink(os.path.join(subtestdir, filename), filename)

                            # run test
                            run_test()

                    else:

                        # run test
                        run_test()

                    change_dir(prev_dir)

                except OSError as err:
                    raise EasyBuildError("An error occured when running test %s: %s", test, err)
示例#51
0
    def install_step(self):
        """Custom install procedure for QScintilla."""

        super(EB_QScintilla, self).install_step()

        # also install Python bindings if Python is included as a dependency
        python = get_software_root('Python')
        if python:
            pydir = os.path.join(self.cfg['start_dir'], 'Python')
            try:
                os.chdir(pydir)
            except OSError as err:
                raise EasyBuildError("Failed to change to %s: %s", pydir, err)

            # apparently this directory has to be there
            qsci_sipdir = os.path.join(self.installdir, 'share', 'sip',
                                       self.pyqt_pkg_name)
            mkdir(qsci_sipdir, parents=True)

            pylibdir = os.path.join(det_pylibdir(), self.pyqt_pkg_name)
            pyshortver = '.'.join(
                get_software_version('Python').split('.')[:2])

            sip_incdir = find_glob_pattern(
                os.path.join(self.pyqt_root, 'include',
                             'python%s*' % pyshortver), False)
            # in case PyQt5's sip was installed in directories that are specific to each version of python
            # as could happen with multi_deps
            pyqt_sipdir = find_glob_pattern(
                os.path.join(self.pyqt_root, 'share', 'python%s*' % pyshortver,
                             'site-packages', 'sip', self.pyqt_pkg_name),
                False)
            # fall back to a single sipdir
            if not pyqt_sipdir:
                pyqt_sipdir = os.path.join(self.pyqt_root, 'share', 'sip',
                                           self.pyqt_pkg_name)

            cfgopts = [
                '--destdir %s' % os.path.join(self.installdir, pylibdir),
                '--qsci-sipdir %s' % qsci_sipdir,
                '--qsci-incdir %s' % os.path.join(self.installdir, 'include'),
                '--qsci-libdir %s' % os.path.join(self.installdir, 'lib'),
                '--pyqt-sipdir %s' % pyqt_sipdir,
                '--apidir %s' %
                os.path.join(self.installdir, 'qsci', 'api', 'python'),
                '--no-stubs',
            ]
            if sip_incdir:
                cfgopts += ['--sip-incdir %s' % sip_incdir]

            if LooseVersion(self.version) >= LooseVersion('2.10.7'):
                cfgopts.append('--no-dist-info')

            # This flag was added in version 2.11
            if LooseVersion(self.version) >= LooseVersion('2.11'):
                cfgopts.append("--pyqt=%s" % self.pyqt_pkg_name)

            run_cmd("python configure.py %s" % ' '.join(cfgopts))

            super(EB_QScintilla, self).build_step()
            super(EB_QScintilla, self).install_step()

            target_dir = os.path.join(self.installdir, pylibdir)
            pyqt_pylibdir = os.path.join(self.pyqt_root, pylibdir)
            try:
                os.chdir(target_dir)
                for entry in [
                        x for x in os.listdir(pyqt_pylibdir)
                        if not x.startswith('__init__.py')
                ]:
                    symlink(os.path.join(pyqt_pylibdir, entry),
                            os.path.join(target_dir, entry))
            except OSError as err:
                raise EasyBuildError(
                    "Failed to symlink PyQt Python bindings in %s: %s",
                    target_dir, err)

            # also requires empty __init__.py file to ensure Python modules can be imported from this location
            write_file(os.path.join(target_dir, '__init__.py'), '')