コード例 #1
0
    def install_step(self):
        """Custom install procedure for wxPython."""
        # wxPython configure, build, and install with one script
        preinst_opts = self.cfg['preinstallopts']
        INSTALL_CMD = "%(preinst_opts)s %(pycmd)s %(script)s --prefix=%(prefix)s"
        if LooseVersion(self.version) >= LooseVersion("4"):
            script = 'build.py'
            cmd = INSTALL_CMD % {
                'preinst_opts': preinst_opts,
                'pycmd': self.python_cmd,
                'script': script,
                'prefix': self.installdir,
            }
            cmd = cmd + " %s -v install" % self.wxflag
        else:
            script = os.path.join('wxPython', 'build-wxpython.py')
            cmd = INSTALL_CMD % {
                'preinst_opts': preinst_opts,
                'pycmd': self.python_cmd,
                'script': script,
                'prefix': self.installdir,
            }
            cmd = cmd + " --wxpy_installdir=%s --install" % self.installdir

        run_cmd(cmd, log_all=True, simple=True)
コード例 #2
0
    def build_step(self, *args, **kwargs):
        """Custom build procedure for Python, ensure stack size limit is set to 'unlimited' (if desired)."""

        if self.cfg['ulimit_unlimited']:
            # determine current stack size limit
            (out, _) = run_cmd("ulimit -s")
            curr_ulimit_s = out.strip()

            # figure out hard limit for stack size limit;
            # this determines whether or not we can use "ulimit -s unlimited"
            (out, _) = run_cmd("ulimit -s -H")
            max_ulimit_s = out.strip()

            if curr_ulimit_s == UNLIMITED:
                self.log.info("Current stack size limit is %s: OK", curr_ulimit_s)
            elif max_ulimit_s == UNLIMITED:
                self.log.info("Current stack size limit is %s, setting it to %s for build...",
                              curr_ulimit_s, UNLIMITED)
                self.cfg.update('prebuildopts', "ulimit -s %s && " % UNLIMITED)
            else:
                msg = "Current stack size limit is %s, and can not be set to %s due to hard limit of %s;"
                msg += " setting stack size limit to %s instead, "
                msg += " this may break part of the compilation (e.g. hashlib)..."
                print_warning(msg % (curr_ulimit_s, UNLIMITED, max_ulimit_s, max_ulimit_s))
                self.cfg.update('prebuildopts', "ulimit -s %s && " % max_ulimit_s)

        super(EB_Python, self).build_step(*args, **kwargs)
コード例 #3
0
    def install_step(self):
        """Custom install procedure for VSC-tools."""

        args = "install --prefix=%(path)s --install-lib=%(path)s/lib" % {'path': self.installdir}

        pylibdir = os.path.join(self.installdir, 'lib')
        env.setvar('PYTHONPATH', '%s:%s' % (pylibdir, os.getenv('PYTHONPATH')))

        try:
            os.mkdir(pylibdir)

            pwd = os.getcwd()

            pkg_list = ['-'.join(src['name'].split('-')[0:-1]) for src in self.src if src['name'].startswith('vsc')]
            for pkg in pkg_list:
                os.chdir(self.builddir)
                sel_dirs = [d for d in glob.glob("%s-[0-9][0-9.]*" % pkg)]
                if not len(sel_dirs) == 1:
                    self.log.error("Found none or more than one %s dir in %s: %s" % (pkg, self.builddir, sel_dirs))

                os.chdir(os.path.join(self.builddir, sel_dirs[0]))
                cmd = "python setup.py %s" % args
                run_cmd(cmd, log_all=True, simple=True, log_output=True)

            os.chdir(pwd)

        except OSError, err:
            self.log.error("Failed to install: %s" % err)
コード例 #4
0
ファイル: go.py プロジェクト: hpcugent/easybuild-easyblocks
    def install_step(self):
        """
        Execute the all.bash script to build and install the Go compiler,
        specifying the final installation prefix by setting $GOROOT_FINAL.
        """
        srcdir = os.path.join(self.cfg['start_dir'], 'src')
        try:
            os.chdir(srcdir)
        except OSError as err:
            raise EasyBuildError("Failed to move to %s: %s", srcdir, err)

        # $GOROOT_FINAL only specifies the location of the final installation, which gets baked into the binaries
        # the installation itself is *not* done by the all.bash script, that needs to be done manually
        # $GOROOT_BOOTSTRAP needs to specify a Go installation directory to build the go toolchain for versions
        # 1.5 and later.
        if LooseVersion(self.version) >= LooseVersion('1.5'):
            go_root = get_software_root('Go')
            if go_root:
                cmd = "GOROOT_BOOTSTRAP=%s GOROOT_FINAL=%s ./all.bash" % (go_root, self.installdir)
            else:
                raise EasyBuildError("Go is required as a build dependency for installing Go since version 1.5")
        else:
            cmd = "GOROOT_FINAL=%s ./all.bash" % self.installdir

        run_cmd(cmd, log_all=True, simple=False)

        try:
            rmtree2(self.installdir)
            shutil.copytree(self.cfg['start_dir'], self.installdir, symlinks=self.cfg['keepsymlinks'])
        except OSError as err:
            raise EasyBuildError("Failed to copy installation to %s: %s", self.installdir, err)
コード例 #5
0
def check_os_dependency(dep):
    """
    Check if dependency is available from OS.
    """
    # - uses rpm -q and dpkg -s --> can be run as non-root!!
    # - fallback on which
    # - should be extended to files later?
    found = None
    cmd = None
    if which('rpm'):
        cmd = "rpm -q %s" % dep
        found = run_cmd(cmd, simple=True, log_all=False, log_ok=False, force_in_dry_run=True)

    if not found and which('dpkg'):
        cmd = "dpkg -s %s" % dep
        found = run_cmd(cmd, simple=True, log_all=False, log_ok=False, force_in_dry_run=True)

    if cmd is None:
        # fallback for when os-dependency is a binary/library
        found = which(dep)

        # try locate if it's available
        if not found and which('locate'):
            cmd = 'locate --regexp "/%s$"' % dep
            found = run_cmd(cmd, simple=True, log_all=False, log_ok=False, force_in_dry_run=True)

    return found
コード例 #6
0
    def build_step(self):
        """Build OpenFOAM using make after sourcing script to set environment."""

        precmd = "source %s" % os.path.join(self.builddir, self.openfoamdir, "etc", "bashrc")

        # make directly in install directory
        cmd_tmpl = "%(precmd)s && %(prebuildopts)s %(makecmd)s" % {
            'precmd': precmd,
            'prebuildopts': self.cfg['prebuildopts'],
            'makecmd': os.path.join(self.builddir, self.openfoamdir, '%s'),
        }
        if 'extend' in self.name.lower() and LooseVersion(self.version) >= LooseVersion('3.0'):
            qa = {
                "Proceed without compiling ParaView [Y/n]": 'Y',
                "Proceed without compiling cudaSolvers? [Y/n]": 'Y',
            }
            noqa = [
                ".* -o .*",
                "checking .*",
                "warning.*",
                "configure: creating.*",
                "%s .*" % os.environ['CC'],
                "wmake .*",
                "Making dependency list for source file.*",
                "\s*\^\s*",  # warning indicator
                "Cleaning .*",
            ]
            run_cmd_qa(cmd_tmpl % 'Allwmake.firstInstall', qa, no_qa=noqa, log_all=True, simple=True)
        else:
            run_cmd(cmd_tmpl % 'Allwmake', log_all=True, simple=True, log_output=True)
コード例 #7
0
    def install_step(self):
        """Install using chimera.bin."""

        try:
            os.chdir(self.cfg['start_dir'])
        except OSError as err:
            raise EasyBuildError("Failed to change to %s: %s", self.cfg['start_dir'], err)

        # Chimera comes bundled with its dependencies, and follows a
        # UNIX file system layout with 'bin', 'include', 'lib', etc.  To
        # avoid conflicts with other modules, the Chimera module must
        # not add the 'bin', 'include', 'lib', etc. directories to PATH,
        # CPATH, LD_LIBRARY_PATH, etc.  We achieve this by installing
        # Chimera in a subdirectory (called 'chimera') instead of the
        # root directory.
        cmd = "./chimera.bin -q -d %s" % os.path.join(self.installdir,
                                                      'chimera')
        run_cmd(cmd, log_all=True, simple=True)

        # Create a symlink to the Chimera startup script; this symlink
        # will end up in PATH.  The startup script sets up the
        # environment, so that Chimera finds its dependencies.
        mkdir(os.path.join(self.installdir, 'bin'))
        symlink(os.path.join(self.installdir, 'chimera', 'bin', 'chimera'),
                os.path.join(self.installdir, 'bin', 'chimera'))
コード例 #8
0
    def build_step(self, verbose=False):
        """Build ParMETIS (and METIS) using build_step."""

        paracmd = ''
        if self.cfg['parallel']:
            paracmd = "-j %s" % self.cfg['parallel']

        self.cfg.update('buildopts', 'LIBDIR=""')

        if self.toolchain.options['usempi']:
            if self.toolchain.options['pic']:
                self.cfg.update('buildopts', 'CC="$MPICC -fPIC"')
            else:
                self.cfg.update('buildopts', 'CC="$MPICC"')

        cmd = "%s make %s %s" % (self.cfg['prebuildopts'], paracmd, self.cfg['buildopts'])

        # run make in build dir as well for recent version
        if LooseVersion(self.version) >= LooseVersion("4"):
            try:
                os.chdir(self.parmetis_builddir)
                run_cmd(cmd, log_all=True, simple=True, log_output=verbose)
                os.chdir(self.cfg['start_dir'])
            except OSError as err:
                raise EasyBuildError("Running cmd '%s' in %s failed: %s", cmd, self.parmetis_builddir, err)
        else:
            run_cmd(cmd, log_all=True, simple=True, log_output=verbose)
コード例 #9
0
    def install_perl_module(self):
        """Install procedure for Perl modules: using either Makefile.Pl or Build.PL."""

        # Perl modules have two possible installation procedures: using Makefile.PL and Build.PL
        # configure, build, test, install
        if os.path.exists('Makefile.PL'):
            install_cmd = ' '.join([
                self.cfg['preconfigopts'],
                'perl',
                'Makefile.PL',
                'PREFIX=%s' % self.installdir,
                self.cfg['configopts'],
            ])
            run_cmd(install_cmd)

            ConfigureMake.build_step(self)
            ConfigureMake.test_step(self)
            ConfigureMake.install_step(self)

        elif os.path.exists('Build.PL'):
            install_cmd = ' '.join([
                self.cfg['preconfigopts'],
                'perl',
                'Build.PL',
                '--prefix',
                self.installdir,
                self.cfg['configopts'],
            ])
            run_cmd(install_cmd)

            run_cmd("%s perl Build build %s" % (self.cfg['prebuildopts'], self.cfg['buildopts']))

            if self.cfg['runtest']:
                run_cmd('perl Build %s' % self.cfg['runtest'])
            run_cmd('%s perl Build install %s' % (self.cfg['preinstallopts'], self.cfg['installopts']))
コード例 #10
0
    def install_step(self):
        """MATLAB install procedure using 'install' command."""

        src = os.path.join(self.cfg['start_dir'], 'install')

        # make sure install script is executable
        adjust_permissions(src, stat.S_IXUSR)

        if LooseVersion(self.version) >= LooseVersion('2016b'):
            jdir = os.path.join(self.cfg['start_dir'], 'sys', 'java', 'jre', 'glnxa64', 'jre', 'bin')
            for perm_dir in [os.path.join(self.cfg['start_dir'], 'bin', 'glnxa64'), jdir]:
                adjust_permissions(perm_dir, stat.S_IXUSR)

        # make sure $DISPLAY is not defined, which may lead to (hard to trace) problems
        # this is a workaround for not being able to specify --nodisplay to the install scripts
        if 'DISPLAY' in os.environ:
            os.environ.pop('DISPLAY')

        if not '_JAVA_OPTIONS' in self.cfg['preinstallopts']:
            self.cfg['preinstallopts'] = ('export _JAVA_OPTIONS="%s" && ' % self.cfg['java_options']) + self.cfg['preinstallopts']
        if LooseVersion(self.version) >= LooseVersion('2016b'):
            change_dir(self.builddir)

        cmd = "%s %s -v -inputFile %s %s" % (self.cfg['preinstallopts'], src, self.configfile, self.cfg['installopts'])
        run_cmd(cmd, log_all=True, simple=True)
コード例 #11
0
    def configure_step(self):
        """Configure ParMETIS build.
        For versions of ParMETIS < 4 , METIS is a seperate build
        New versions of ParMETIS include METIS
        
        Run 'cmake' in the build dir to get rid of a 'user friendly' 
        help message that is displayed without this step.
        """
        if LooseVersion(self.version) >= LooseVersion("4"):
            # tested with 4.0.2, now actually requires cmake to be run first
            # for both parmetis and metis

            self.cfg.update('configopts', '-DMETIS_PATH=../metis -DGKLIB_PATH=../metis/GKlib')

            self.cfg.update('configopts', '-DOPENMP="%s"' % self.toolchain.get_flag('openmp'))

            if self.toolchain.options.get('usempi', None):
                self.cfg.update('configopts', '-DCMAKE_C_COMPILER="$MPICC"')

            if self.toolchain.options['pic']:
                self.cfg.update('configopts', '-DCMAKE_C_FLAGS="-fPIC"')

            self.parmetis_builddir = 'build'
            try:
                os.chdir(self.parmetis_builddir)
                cmd = 'cmake .. %s -DCMAKE_INSTALL_PREFIX="%s"' % (self.cfg['configopts'],
                                                                   self.installdir)
                run_cmd(cmd, log_all=True, simple=True)
                os.chdir(self.cfg['start_dir'])
            except OSError as err:
                raise EasyBuildError("Running cmake in %s failed: %s", self.parmetis_builddir, err)
コード例 #12
0
    def build_step(self):
        """Build by running build_step, but with some special options for SCOTCH depending on the compiler."""

        ccs = os.environ['CC']
        ccp = os.environ['MPICC']
        ccd = os.environ['MPICC']

        cflags = "-fPIC -O3 -DCOMMON_FILE_COMPRESS_GZ -DCOMMON_PTHREAD -DCOMMON_RANDOM_FIXED_SEED -DSCOTCH_RENAME"
        if self.toolchain.comp_family() == toolchain.GCC:  #@UndefinedVariable
            cflags += " -Drestrict=__restrict"
        else:
            cflags += " -restrict -DIDXSIZE64"

        #USE 64 bit index
        if self.toolchain.options['i8']:
            cflags += " -DINTSIZE64"

        if not self.toolchain.mpi_family() in [toolchain.INTELMPI, toolchain.QLOGICMPI]:  #@UndefinedVariable
            cflags += " -DSCOTCH_PTHREAD"

        # actually build
        apps = ['scotch', 'ptscotch']
        if LooseVersion(self.version) >= LooseVersion('6.0'):
            # separate target for esmumps in recent versions
            apps.extend(['esmumps', 'ptesmumps'])
        for app in apps:
            cmd = 'make CCS="%s" CCP="%s" CCD="%s" CFLAGS="%s" %s' % (ccs, ccp, ccd, cflags, app)
            run_cmd(cmd, log_all=True, simple=True)
コード例 #13
0
ファイル: cuda.py プロジェクト: hpcugent/easybuild-easyblocks
    def install_step(self):
        """Install CUDA using Perl install script."""

        # define how to run the installer
        # script has /usr/bin/perl hardcoded, but we want to have control over which perl is being used
        if LooseVersion(self.version) <= LooseVersion("5"):
            install_interpreter = "perl"
            install_script = "install-linux.pl"
            self.cfg.update('installopts', '--prefix=%s' % self.installdir)
        elif LooseVersion(self.version) > LooseVersion("5") and LooseVersion(self.version) < LooseVersion("10.1"):
            install_interpreter = "perl"
            install_script = "cuda-installer.pl"
            # note: also including samples (via "-samplespath=%(installdir)s -samples") would require libglut
            self.cfg.update('installopts', "-verbose -silent -toolkitpath=%s -toolkit" % self.installdir)
        else:
            install_interpreter = ""
            install_script = "./cuda-installer"
            # note: also including samples (via "-samplespath=%(installdir)s -samples") would require libglut
            self.cfg.update('installopts', "--silent --toolkit --toolkitpath=%s --defaultroot=%s" % (
                            self.installdir, self.installdir))

        cmd = "%(preinstallopts)s %(interpreter)s %(script)s %(installopts)s" % {
            'preinstallopts': self.cfg['preinstallopts'],
            'interpreter': install_interpreter,
            'script': install_script,
            'installopts': self.cfg['installopts']
        }

        # prepare for running install script autonomously
        qanda = {}
        stdqa = {
            # this question is only asked if CUDA tools are already available system-wide
            r"Would you like to remove all CUDA files under .*? (yes/no/abort): ": "no",
        }
        noqanda = [
            r"^Configuring",
            r"Installation Complete",
            r"Verifying archive integrity.*",
            r"^Uncompressing NVIDIA CUDA",
            r".* -> .*",
        ]

        # patch install script to handle Q&A autonomously
        if install_interpreter == "perl":
            patch_perl_script_autoflush(os.path.join(self.builddir, install_script))

        # make sure $DISPLAY is not defined, which may lead to (weird) problems
        # this is workaround for not being able to specify --nox11 to the Perl install scripts
        if 'DISPLAY' in os.environ:
            os.environ.pop('DISPLAY')

        # overriding maxhits default value to 300 (300s wait for nothing to change in the output without seeing a known
        # question)
        run_cmd_qa(cmd, qanda, std_qa=stdqa, no_qa=noqanda, log_all=True, simple=True, maxhits=300)

        # check if there are patches to apply
        if len(self.src) > 1:
            for patch in self.src[1:]:
                self.log.debug("Running patch %s", patch['name'])
                run_cmd("/bin/sh " + patch['path'] + " --accept-eula --silent --installdir=" + self.installdir)
コード例 #14
0
    def configure_step(self):
        """Configure MUMmer build by running make check and setting make options."""

        cmd = "%s make check %s" % (self.cfg['preconfigopts'], self.cfg['configopts'])
        run_cmd(cmd, log_all=True, simple=True, log_output=True)

        self.cfg.update('buildopts', 'all')
コード例 #15
0
 def test_step(self):
     """
     Test with SCons 
     """
     if self.cfg['runtest']:
         cmd = "scons %s" % (self.cfg['runtest'])
         run_cmd(cmd, log_all=True)
コード例 #16
0
    def build_step(self):
        """Customize the build step by adding compiler-specific flags to the build command."""

        comp_fam = self.toolchain.comp_family()

        if comp_fam == toolchain.INTELCOMP:  # @UndefinedVariable
            cmd = "python setup.py build --compiler=intel --fcompiler=intelem"

        elif comp_fam in [toolchain.GCC, toolchain.CLANGGCC]:  # @UndefinedVariable
            cmdprefix = ""
            ldflags = os.getenv("LDFLAGS")
            if ldflags:
                # LDFLAGS should not be set when building numpy/scipy, because it overwrites whatever numpy/scipy sets
                # see http://projects.scipy.org/numpy/ticket/182
                # don't unset it with os.environ.pop('LDFLAGS'), doesn't work in Python 2.4,
                # see http://bugs.python.org/issue1287
                cmdprefix = "unset LDFLAGS && "
                self.log.debug(
                    "LDFLAGS was %s, will be cleared before %s build with '%s'" % (self.name, ldflags, cmdprefix)
                )

            cmd = "%s python setup.py build --fcompiler=gnu95" % cmdprefix

        else:
            raise EasyBuildError("Unknown family of compilers being used: %s", comp_fam)

        run_cmd(cmd, log_all=True, simple=True)
コード例 #17
0
ファイル: mcr.py プロジェクト: jas02/easybuild-easyblocks
    def install_step(self):
        """MCR install procedure using 'install' command."""

        src = os.path.join(self.cfg['start_dir'], 'install')

        # make sure install script is executable
        adjust_permissions(src, stat.S_IXUSR)

        # make sure $DISPLAY is not defined, which may lead to (hard to trace) problems
        # this is a workaround for not being able to specify --nodisplay to the install scripts
        if 'DISPLAY' in os.environ:
            os.environ.pop('DISPLAY')

        if not '_JAVA_OPTIONS' in self.cfg['preinstallopts']:
            java_options = 'export _JAVA_OPTIONS="%s" && ' % self.cfg['java_options']
            self.cfg['preinstallopts'] = java_options + self.cfg['preinstallopts']

        configfile = "%s/%s" % (self.builddir, self.configfilename)
        cmd = "%s ./install -v -inputFile %s %s" % (self.cfg['preinstallopts'], configfile, self.cfg['installopts'])
        run_cmd(cmd, log_all=True, simple=True)

        # determine subdirectory (e.g. v84 (2014a, 2014b), v85 (2015a), ...)
        subdirs = os.listdir(self.installdir)
        if len(subdirs) == 1:
            self.subdir = subdirs[0]
        else:
            raise EasyBuildError("Found multiple subdirectories, don't know which one to pick: %s", subdirs)
コード例 #18
0
ファイル: run.py プロジェクト: cyiops/easybuild-framework
    def test_run_cmd_log(self):
        """Test logging of executed commands."""
        fd, logfile = tempfile.mkstemp(suffix='.log', prefix='eb-test-')
        os.close(fd)

        regex = re.compile('cmd "echo hello" exited with exit code [0-9]* and output:')

        # command output is not logged by default without debug logging
        init_logging(logfile, silent=True)
        self.assertTrue(run_cmd("echo hello"))
        stop_logging(logfile)
        self.assertEqual(len(regex.findall(read_file(logfile))), 0)
        write_file(logfile, '')

        init_logging(logfile, silent=True)
        self.assertTrue(run_cmd("echo hello", log_all=True))
        stop_logging(logfile)
        self.assertEqual(len(regex.findall(read_file(logfile))), 1)
        write_file(logfile, '')

        # with debugging enabled, exit code and output of command should only get logged once
        setLogLevelDebug()

        init_logging(logfile, silent=True)
        self.assertTrue(run_cmd("echo hello"))
        stop_logging(logfile)
        self.assertEqual(len(regex.findall(read_file(logfile))), 1)
        write_file(logfile, '')

        init_logging(logfile, silent=True)
        self.assertTrue(run_cmd("echo hello", log_all=True))
        stop_logging(logfile)
        self.assertEqual(len(regex.findall(read_file(logfile))), 1)
        write_file(logfile, '')
コード例 #19
0
    def install_step(self):
        """
        Install by copying files over to the right places.

        Also create symlinks where expected by other software (Lib directory).
        """
        includedir = os.path.join(self.installdir, 'include')
        libdir = os.path.join(self.installdir, 'lib')

        if LooseVersion(self.version) >= LooseVersion("4"):
            # includedir etc changed in v4, use a normal make install
            cmd = "make install %s" % self.cfg['installopts']
            try:
                os.chdir(self.parmetis_builddir)
                run_cmd(cmd, log_all=True, simple=True)
                os.chdir(self.cfg['start_dir'])
            except OSError, err:
                raise EasyBuildError("Running '%s' in %s failed: %s", cmd, self.parmetis_builddir, err)

            # libraries
            try:
                src = os.path.join(self.cfg['start_dir'], 'build' ,'libmetis' ,'libmetis.a')
                dst = os.path.join(libdir, 'libmetis.a')
                shutil.copy2(src, dst)
            except OSError, err:
                raise EasyBuildError("Copying files to installation dir failed: %s", err)
コード例 #20
0
ファイル: run.py プロジェクト: cyiops/easybuild-framework
    def test_run_cmd_trace(self):
        """Test run_cmd under --trace"""
        # replace log.experimental with log.warning to allow experimental code
        easybuild.tools.utilities._log.experimental = easybuild.tools.utilities._log.warning

        init_config(build_options={'trace': True})

        self.mock_stdout(True)
        self.mock_stderr(True)
        (out, ec) = run_cmd("echo hello")
        stdout = self.get_stdout()
        stderr = self.get_stderr()
        self.mock_stdout(False)
        self.mock_stderr(False)
        self.assertEqual(stderr, '')
        pattern = "^  >> running command:\n"
        pattern += "\t\[started at: .*\]\n"
        pattern += "\t\[output logged in .*\]\n"
        pattern += "\techo hello\n"
        pattern += '  >> command completed: exit 0, ran in .*'
        regex = re.compile(pattern)
        self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout))

        # trace output can be disabled on a per-command basis
        self.mock_stdout(True)
        self.mock_stderr(True)
        (out, ec) = run_cmd("echo hello", trace=False)
        stdout = self.get_stdout()
        stderr = self.get_stderr()
        self.mock_stdout(False)
        self.mock_stderr(False)
        self.assertEqual(stdout, '')
        self.assertEqual(stderr, '')
コード例 #21
0
ファイル: run.py プロジェクト: cyiops/easybuild-framework
    def test_run_cmd_cache(self):
        """Test caching for run_cmd"""
        (first_out, ec) = run_cmd("ulimit -u")
        self.assertEqual(ec, 0)
        (cached_out, ec) = run_cmd("ulimit -u")
        self.assertEqual(ec, 0)
        self.assertEqual(first_out, cached_out)

        # inject value into cache to check whether executing command again really returns cached value
        run_cmd.update_cache({("ulimit -u", None): ("123456", 123)})
        (cached_out, ec) = run_cmd("ulimit -u")
        self.assertEqual(ec, 123)
        self.assertEqual(cached_out, "123456")

        # also test with command that uses stdin
        (out, ec) = run_cmd("cat", inp='foo')
        self.assertEqual(ec, 0)
        self.assertEqual(out, 'foo')

        # inject different output for cat with 'foo' as stdin to check whether cached value is used
        run_cmd.update_cache({('cat', 'foo'): ('bar', 123)})
        (cached_out, ec) = run_cmd("cat", inp='foo')
        self.assertEqual(ec, 123)
        self.assertEqual(cached_out, 'bar')

        run_cmd.clear_cache()
コード例 #22
0
ファイル: imod.py プロジェクト: ULHPC/easybuild-easyblocks
    def install_step(self):
        """Install IMOD using install script."""

        # -dir: Choose location of installation directory
        # -skip: do not attempt to deploy resource files in /etc
        # -yes: do not prompt for confirmation
        script = '{0}_{1}{2}.csh'.format(self.name.lower(), self.version, self.cfg['versionsuffix'])
        cmd = "tcsh {0} -dir {1} -script {1} -skip -yes".format(script, self.installdir)
        run_cmd(cmd, log_all=True, simple=True)
        
        # The assumption by the install script is that installdir will be something
        # like /usr/local. So it creates, within the specified install location, a
        # number of additional directories within which to install IMOD. We will,
        # therefore, move the contents of these directories up and throw away the
        # directories themselves. Doing so apparently is not a problem so long as
        # IMOD_DIR is correctly set in the module.
        link_to_remove = os.path.join(self.installdir, self.name)
        dir_to_remove = os.path.join(self.installdir, "{0}_{1}".format(self.name.lower(), self.version))
        try:
            for entry in os.listdir(dir_to_remove):
                shutil.move(os.path.join(dir_to_remove, entry), self.installdir)
            if os.path.realpath(link_to_remove) != os.path.realpath(dir_to_remove):
                raise EasyBuildError("Something went wrong: %s doesn't point to %s", link_to_remove, dir_to_remove)
            rmtree2(dir_to_remove)
            os.remove(link_to_remove)
        except OSError as err:
            raise EasyBuildError("Failed to clean up install dir: %s", err)
コード例 #23
0
    def install_step(self):
        """Install in non-standard path by passing PREFIX variable to make install."""

        self.cfg.update('installopts', "PREFIX=%s" % self.installdir)
        super(EB_bzip2, self).install_step()

        # also build & install shared libraries, if desired
        if self.cfg['with_shared_libs']:

            cmd = "%s make -f Makefile-libbz2_so %s" % (self.cfg['prebuildopts'], self.cfg['buildopts'])
            run_cmd(cmd, log_all=True, simple=True)

            # copy shared libraries to <install dir>/lib
            shlib_ext = get_shared_lib_ext()
            libdir = os.path.join(self.installdir, 'lib')
            try:
                for lib in glob.glob('libbz2.%s.*' % shlib_ext):
                    # only way to copy a symlink is to check for it,
                    # cfr. http://stackoverflow.com/questions/4847615/copying-a-symbolic-link-in-python
                    if os.path.islink(lib):
                        os.symlink(os.readlink(lib), os.path.join(libdir, lib))
                    else:
                        shutil.copy2(lib, libdir)
            except OSError, err:
                raise EasyBuildError("Copying shared libraries to installation dir %s failed: %s", libdir, err)

            # create symlink libbz2.so >> libbz2.so.1.0.6
            try:
                cwd = os.getcwd()
                os.chdir(libdir)
                os.symlink('libbz2.%s.%s' % (shlib_ext, self.version), 'libbz2.%s' % shlib_ext)
                os.chdir(cwd)
            except OSError, err:
                raise EasyBuildError("Creating symlink for libbz2.so failed: %s", err)
コード例 #24
0
    def prepare_step(self):
        """Generate PLUMED patch if PLUMED is listed as a dependency."""
        super(EB_DL_underscore_POLY_underscore_Classic, self).prepare_step()

        if self.with_plumed:
            # see https://groups.google.com/d/msg/plumed-users/cWaIDU5F6Bw/bZUW3J9cCAAJ
            diff_pat = 'dlpoly-*.diff'
            try:
                diff_hits = glob.glob(os.path.join(self.builddir, diff_pat))
            except OSError as err:
                raise EasyBuildError("Failed to find list of files/dirs that match '%s': %s", diff_pat, err)

            if len(diff_hits) == 1:
                plumed_patch = diff_hits[0]
            elif not self.dry_run:
                raise EasyBuildError("Expected to find exactly one match for '%s' in %s, found: %s",
                                     diff_pat, self.builddir, diff_hits)

            if not self.dry_run:
                try:
                    os.rename('source', 'srcmod')
                except OSError as err:
                    raise EasyBuildError("Failed to move 'source' directory to 'srcmod': %s", err)

            engine = os.path.splitext(os.path.basename(plumed_patch))[0]
            cmd = "plumed-patch -p --runtime -e %s -d %s" % (engine, plumed_patch)
            run_cmd(cmd, log_all=True, simple=True)
コード例 #25
0
    def run_clang_tests(self, obj_dir):
        """Run Clang tests in specified directory (unless disabled)."""
        if not self.cfg['skip_all_tests']:
            change_dir(obj_dir)

            self.log.info("Running tests")
            run_cmd("make %s check-all" % self.make_parallel_opts, log_all=True)
コード例 #26
0
ファイル: hpcg.py プロジェクト: hpcugent/easybuild-easyblocks
    def test_step(self):
        """Custom built-in test procedure for HPCG."""
        if self.cfg['runtest']:

            if not build_option('mpi_tests'):
                self.log.info("Skipping testing of HPCG since MPI testing is disabled")
                return

            objbindir = os.path.join(self.cfg['start_dir'], 'obj', 'bin')
            # obtain equivalent of 'mpirun -np 2 xhpcg'
            hpcg_mpi_cmd = self.toolchain.mpi_cmd_for("xhpcg", 2)
            # 2 threads per MPI process (4 threads in total)
            cmd = "PATH=%s:$PATH OMP_NUM_THREADS=2 %s" % (objbindir, hpcg_mpi_cmd)
            run_cmd(cmd, simple=True, log_all=True, log_ok=True)

            # find log file, check for success
            success_regex = re.compile(r"Scaled Residual \[[0-9.e-]+\]")
            try:
                hpcg_logs = glob.glob('hpcg_log*txt')
                if len(hpcg_logs) == 1:
                    txt = open(hpcg_logs[0], 'r').read()
                    self.log.debug("Contents of HPCG log file %s: %s" % (hpcg_logs[0], txt))
                    if success_regex.search(txt):
                        self.log.info("Found pattern '%s' in HPCG log file %s, OK!", success_regex.pattern, hpcg_logs[0])
                    else:
                        raise EasyBuildError("Failed to find pattern '%s' in HPCG log file %s",
                                             success_regex.pattern, hpcg_logs[0])
                else:
                    raise EasyBuildError("Failed to find exactly one HPCG log file: %s", hpcg_logs)
            except OSError as err:
                raise EasyBuildError("Failed to check for success in HPCG log file: %s", err)
コード例 #27
0
    def configure_step(self):
        """Configure build: <single-line description how this deviates from standard configure>"""

        # set generic make options
        self.cfg.update('buildopts', 'CC="%s" OPTFLAGS="%s"' % (os.getenv('MPICC'), os.getenv('CFLAGS')))

        if LooseVersion(self.version) >= LooseVersion("3.2"):

            # set correct start_dir dir, and change into it
            # test whether it already contains 'src', since a reprod easyconfig would
            if os.path.basename(self.cfg['start_dir']) != 'src':
                self.cfg['start_dir'] = os.path.join(self.cfg['start_dir'], 'src')
            try:
                os.chdir(self.cfg['start_dir'])
            except OSError, err:
                raise EasyBuildError("Failed to change to correct source dir %s: %s", self.cfg['start_dir'], err)

            # run autoconf to generate configure script
            cmd = "autoconf"
            run_cmd(cmd)

            # set config opts
            beagle = get_software_root('beagle-lib')
            if beagle:
                self.cfg.update('configopts', '--with-beagle=%s' % beagle)
            else:
                if get_software_root('BEAGLE'):
                    self.log.nosupport('BEAGLE module as dependency, should be beagle-lib', '2.0')
                raise EasyBuildError("beagle-lib module not loaded?")

            if self.toolchain.options.get('usempi', None):
                self.cfg.update('configopts', '--enable-mpi')

            # configure
            super(EB_MrBayes, self).configure_step()
コード例 #28
0
ファイル: metis.py プロジェクト: Caylo/easybuild-easyblocks
    def configure_step(self, *args, **kwargs):
        """Configure build using 'make config' (only for recent versions (>= v5))."""

        if LooseVersion(self.version) >= LooseVersion("5"):

            cmd = "make config prefix=%s" % self.installdir
            run_cmd(cmd, log_all=True, simple=True)
コード例 #29
0
ファイル: hadoop.py プロジェクト: Caylo/easybuild-easyblocks
 def build_step(self):
     """Custom build procedure for Hadoop: build native libraries, if requested."""
     if self.cfg['build_native_libs']:
         cmd = "mvn package -DskipTests -Dmaven.javadoc.skip -Dtar -Pdist,native"
         if self.cfg['parallel'] > 1:
             cmd += " -T%d" % self.cfg['parallel']
         run_cmd(cmd, log_all=True, simple=True, log_ok=True)
コード例 #30
0
ファイル: esmf.py プロジェクト: boegel/easybuild-easyblocks
    def configure_step(self):
        """Custom configuration procedure for ESMF through environment variables."""

        env.setvar('ESMF_DIR', self.cfg['start_dir'])
        env.setvar('ESMF_INSTALL_PREFIX', self.installdir)
        env.setvar('ESMF_INSTALL_BINDIR', 'bin')
        env.setvar('ESMF_INSTALL_LIBDIR', 'lib')
        env.setvar('ESMF_INSTALL_MODDIR', 'mod')

        # specify compiler
        comp_family = self.toolchain.comp_family()
        if comp_family in [toolchain.GCC]:
            compiler = 'gfortran'
        else:
            compiler = comp_family.lower()
        env.setvar('ESMF_COMPILER', compiler)

        # specify MPI communications library
        comm = None
        mpi_family = self.toolchain.mpi_family()
        if mpi_family in [toolchain.MPICH, toolchain.QLOGICMPI]:
            # MPICH family for MPICH v3.x, which is MPICH2 compatible
            comm = 'mpich2'
        else:
            comm = mpi_family.lower()
        env.setvar('ESMF_COMM', comm)

        # specify decent LAPACK lib
        env.setvar('ESMF_LAPACK', 'user')
        env.setvar('ESMF_LAPACK_LIBS', '%s %s' % (os.getenv('LDFLAGS'), os.getenv('LIBLAPACK_MT')))

        # specify netCDF
        netcdf = get_software_root('netCDF')
        if netcdf:
            env.setvar('ESMF_NETCDF', 'user')
            netcdf_libs = ['-L%s/lib' % netcdf, '-lnetcdf']

            # Fortran
            netcdff = get_software_root('netCDF-Fortran')
            if netcdff:
                netcdf_libs = ["-L%s/lib" % netcdff] + netcdf_libs + ["-lnetcdff"]
            else:
                netcdf_libs.append('-lnetcdff')

            # C++
            netcdfcxx = get_software_root('netCDF-C++')
            if netcdfcxx:
                netcdf_libs = ["-L%s/lib" % netcdfcxx] + netcdf_libs + ["-lnetcdf_c++"]
            else:
                netcdfcxx = get_software_root('netCDF-C++4')
                if netcdfcxx:
                    netcdf_libs = ["-L%s/lib" % netcdfcxx] + netcdf_libs + ["-lnetcdf_c++4"]
                else:
                    netcdf_libs.append('-lnetcdf_c++')

            env.setvar('ESMF_NETCDF_LIBS', ' '.join(netcdf_libs))

        # 'make info' provides useful debug info
        cmd = "make info"
        run_cmd(cmd, log_all=True, simple=True, log_ok=True)
コード例 #31
0
ファイル: numpy.py プロジェクト: vanzod/easybuild-easyblocks
    def configure_step(self):
        """Configure numpy build by composing site.cfg contents."""

        # see e.g. https://github.com/numpy/numpy/pull/2809/files
        self.sitecfg = '\n'.join([
            "[DEFAULT]",
            "library_dirs = %(libs)s",
            "include_dirs= %(includes)s",
            "search_static_first=True",
        ])

        if get_software_root("imkl"):

            if self.toolchain.comp_family() == toolchain.GCC:
                # see https://software.intel.com/en-us/articles/numpyscipy-with-intel-mkl,
                # section Building with GNU Compiler chain
                extrasiteconfig = '\n'.join([
                    "[mkl]",
                    "lapack_libs = ",
                    "mkl_libs = mkl_rt",
                ])
            else:
                extrasiteconfig = '\n'.join([
                    "[mkl]",
                    "lapack_libs = %(lapack)s",
                    "mkl_libs = %(blas)s",
                ])

        else:
            # [atlas] the only real alternative, even for non-ATLAS BLAS libs (e.g., OpenBLAS, ACML, ...)
            # using only the [blas] and [lapack] sections results in sub-optimal builds that don't provide _dotblas.so;
            # it does require a CBLAS interface to be available for the BLAS library being used
            # e.g. for ACML, the CBLAS module providing a C interface needs to be used
            extrasiteconfig = '\n'.join([
                "[atlas]",
                "atlas_libs = %(lapack)s",
                "[lapack]",
                "lapack_libs = %(lapack)s",  # required by scipy, that uses numpy's site.cfg
            ])

        blas = None
        lapack = None
        fft = None

        if get_software_root("imkl"):
            # with IMKL, no spaces and use '-Wl:'
            # redefine 'Wl,' to 'Wl:' so that the patch file can do its job
            def get_libs_for_mkl(varname):
                """Get list of libraries as required for MKL patch file."""
                libs = self.toolchain.variables['LIB%s' % varname].copy()
                libs.try_remove(['pthread', 'dl'])
                tweaks = {
                    'prefix': '',
                    'prefix_begin_end': '-Wl:',
                    'separator': ',',
                    'separator_begin_end': ',',
                }
                libs.try_function_on_element('change', kwargs=tweaks)
                libs.SEPARATOR = ','
                return str(libs)  # str causes list concatenation and adding prefixes & separators

            blas = get_libs_for_mkl('BLAS_MT')
            lapack = get_libs_for_mkl('LAPACK_MT')
            fft = get_libs_for_mkl('FFT')

            # make sure the patch file is there
            # we check for a typical characteristic of a patch file that cooperates with the above
            # not fool-proof, but better than enforcing a particular patch filename
            patch_found = False
            patch_wl_regex = re.compile(r"replace\(':',\s*','\)")
            for patch in self.patches:
                # patches are either strings (extension) or dicts (easyblock)
                if isinstance(patch, dict):
                    patch = patch['path']
                if patch_wl_regex.search(open(patch, 'r').read()):
                    patch_found = True
                    break
            if not patch_found:
                raise EasyBuildError("Building numpy on top of Intel MKL requires a patch to "
                                     "handle -Wl linker flags correctly, which doesn't seem to be there.")

        else:
            # unless Intel MKL is used, $ATLAS should be set to take full control,
            # and to make sure a fully optimized version is built, including _dotblas.so
            # which is critical for decent performance of the numpy.dot (matrix dot product) function!
            env.setvar('ATLAS', '1')

            lapack = ', '.join([x for x in self.toolchain.get_variable('LIBLAPACK_MT', typ=list) if x != "pthread"])
            fft = ', '.join(self.toolchain.get_variable('LIBFFT', typ=list))

        libs = ':'.join(self.toolchain.get_variable('LDFLAGS', typ=list))
        includes = ':'.join(self.toolchain.get_variable('CPPFLAGS', typ=list))

        # CBLAS is required for ACML, because it doesn't offer a C interface to BLAS
        if get_software_root('ACML'):
            cblasroot = get_software_root('CBLAS')
            if cblasroot:
                lapack = ', '.join([lapack, "cblas"])
                cblaslib = os.path.join(cblasroot, 'lib')
                # with numpy as extension, CBLAS might not be included in LDFLAGS because it's not part of a toolchain
                if not cblaslib in libs:
                    libs = ':'.join([libs, cblaslib])
            else:
                raise EasyBuildError("CBLAS is required next to ACML to provide a C interface to BLAS, "
                                     "but it's not loaded.")

        if fft:
            extrasiteconfig += "\n[fftw]\nlibraries = %s" % fft

        suitesparseroot = get_software_root('SuiteSparse')
        if suitesparseroot:
            amddir = os.path.join(suitesparseroot, 'AMD')
            umfpackdir = os.path.join(suitesparseroot, 'UMFPACK')

            if not os.path.exists(amddir) or not os.path.exists(umfpackdir):
                raise EasyBuildError("Expected SuiteSparse subdirectories are not both there: %s, %s",
                                     amddir, umfpackdir)
            else:
                extrasiteconfig += '\n'.join([
                    "[amd]",
                    "library_dirs = %s" % os.path.join(amddir, 'Lib'),
                    "include_dirs = %s" % os.path.join(amddir, 'Include'),
                    "amd_libs = amd",
                    "[umfpack]",
                    "library_dirs = %s" % os.path.join(umfpackdir, 'Lib'),
                    "include_dirs = %s" % os.path.join(umfpackdir, 'Include'),
                    "umfpack_libs = umfpack",
                ])

        self.sitecfg = '\n'.join([self.sitecfg, extrasiteconfig])

        self.sitecfg = self.sitecfg % {
            'blas': blas,
            'lapack': lapack,
            'libs': libs,
            'includes': includes,
        }

        super(EB_numpy, self).configure_step()

        # check configuration (for debugging purposes)
        cmd = "%s setup.py config" % self.python_cmd
        run_cmd(cmd, log_all=True, simple=True)
コード例 #32
0
 def test_run_cmd_simple(self):
     """Test return value for run_cmd in 'simple' mode."""
     self.assertEqual(True, run_cmd("echo hello", simple=True))
     self.assertEqual(
         False, run_cmd("exit 1", simple=True, log_all=False, log_ok=False))
コード例 #33
0
    def install_step(self):
        """Install CUDA using Perl install script."""

        # define how to run the installer
        # script has /usr/bin/perl hardcoded, but we want to have control over which perl is being used
        if LooseVersion(self.version) <= LooseVersion("5"):
            install_interpreter = "perl"
            install_script = "install-linux.pl"
            self.cfg.update('installopts', '--prefix=%s' % self.installdir)
        elif LooseVersion(self.version) > LooseVersion("5") and LooseVersion(
                self.version) < LooseVersion("10.1"):
            install_interpreter = "perl"
            install_script = "cuda-installer.pl"
            # note: also including samples (via "-samplespath=%(installdir)s -samples") would require libglut
            self.cfg.update(
                'installopts',
                "-verbose -silent -toolkitpath=%s -toolkit" % self.installdir)
        else:
            install_interpreter = ""
            install_script = "./cuda-installer"
            # note: also including samples (via "-samplespath=%(installdir)s -samples") would require libglut
            self.cfg.update(
                'installopts',
                "--silent --toolkit --toolkitpath=%s --defaultroot=%s" %
                (self.installdir, self.installdir))

        if LooseVersion("10.0") < LooseVersion(self.version) < LooseVersion(
                "10.2") and get_cpu_architecture() == POWER:
            # Workaround for
            # https://devtalk.nvidia.com/default/topic/1063995/cuda-setup-and-installation/cuda-10-1-243-10-1-update-2-ppc64le-run-file-installation-issue/
            install_script = " && ".join([
                "mkdir -p %(installdir)s/targets/ppc64le-linux/include",
                "([ -e %(installdir)s/include ] || ln -s targets/ppc64le-linux/include %(installdir)s/include)",
                "cp -r %(builddir)s/builds/cublas/src %(installdir)s/.",
                install_script
            ]) % {
                'installdir': self.installdir,
                'builddir': self.builddir
            }

        # Use C locale to avoid localized questions and crash on CUDA 10.1
        self.cfg.update('preinstallopts', "export LANG=C && ")

        cmd = "%(preinstallopts)s %(interpreter)s %(script)s %(installopts)s" % {
            'preinstallopts': self.cfg['preinstallopts'],
            'interpreter': install_interpreter,
            'script': install_script,
            'installopts': self.cfg['installopts']
        }

        # prepare for running install script autonomously
        qanda = {}
        stdqa = {
            # this question is only asked if CUDA tools are already available system-wide
            r"Would you like to remove all CUDA files under .*? (yes/no/abort): ":
            "no",
        }
        noqanda = [
            r"^Configuring",
            r"Installation Complete",
            r"Verifying archive integrity.*",
            r"^Uncompressing NVIDIA CUDA",
            r".* -> .*",
        ]

        # patch install script to handle Q&A autonomously
        if install_interpreter == "perl":
            patch_perl_script_autoflush(
                os.path.join(self.builddir, install_script))

        # make sure $DISPLAY is not defined, which may lead to (weird) problems
        # this is workaround for not being able to specify --nox11 to the Perl install scripts
        if 'DISPLAY' in os.environ:
            os.environ.pop('DISPLAY')

        # cuda-installer creates /tmp/cuda-installer.log (ignoring TMPDIR)
        # Try to remove it before running the installer.
        # This will fail with a usable error if it can't be removed
        # instead of segfaulting in the cuda-installer.
        remove_file('/tmp/cuda-installer.log')

        # overriding maxhits default value to 1000 (seconds to wait for nothing to change in the output
        # without seeing a known question)
        run_cmd_qa(cmd,
                   qanda,
                   std_qa=stdqa,
                   no_qa=noqanda,
                   log_all=True,
                   simple=True,
                   maxhits=1000)

        # Remove the cuda-installer log file
        remove_file('/tmp/cuda-installer.log')

        # check if there are patches to apply
        if len(self.src) > 1:
            for patch in self.src[1:]:
                self.log.debug("Running patch %s", patch['name'])
                run_cmd("/bin/sh " + patch['path'] +
                        " --accept-eula --silent --installdir=" +
                        self.installdir)
コード例 #34
0
 def extract_step(self):
     """Extract installer to have more control, e.g. options, patching Perl scripts, etc."""
     execpath = self.src[0]['path']
     run_cmd("/bin/sh " + execpath + " --noexec --nox11 --target " +
             self.builddir)
     self.src[0]['finalpath'] = self.builddir
コード例 #35
0
 def test_run_cmd(self):
     """Basic test for run_cmd function."""
     (out, ec) = run_cmd("echo hello")
     self.assertEqual(out, "hello\n")
     # no reason echo hello could fail
     self.assertEqual(ec, 0)
コード例 #36
0
 def test_run_cmd_list(self):
     """Test run_cmd with command specified as a list rather than a string"""
     (out, ec) = run_cmd(['/bin/sh', '-c', "echo hello"], shell=False)
     self.assertEqual(out, "hello\n")
     # no reason echo hello could fail
     self.assertEqual(ec, 0)
コード例 #37
0
 def build_step(self):
     """Run build in build subdirectory."""
     cxx = os.environ['CXX']
     cxxflags = os.environ['CXXFLAGS']
     cmd = "make CXX='%s' CXXFLAGS='$(HPCG_DEFS) %s -DMPICH_IGNORE_CXX_SEEK'" % (cxx, cxxflags)
     run_cmd(cmd, log_all=True, simple=True, log_ok=True, path='obj')
コード例 #38
0
    def configure_step(self):
        """
        Configure PETSc by setting configure options and running configure script.

        Configure procedure is much more concise for older versions (< v3).
        """
        if LooseVersion(self.version) >= LooseVersion("3"):
            # make the install dir first if we are doing a download install, then keep it for the rest of the way
            deps = self.cfg["download_deps"] + self.cfg[
                "download_deps_static"] + self.cfg["download_deps_shared"]
            if deps:
                self.log.info(
                    "Creating the installation directory before the configure."
                )
                self.make_installdir()
                self.cfg["keeppreviousinstall"] = True
                for dep in set(deps):
                    self.cfg.update('configopts', '--download-%s=1' % dep)
                for dep in self.cfg["download_deps_static"]:
                    self.cfg.update('configopts',
                                    '--download-%s-shared=0' % dep)
                for dep in self.cfg["download_deps_shared"]:
                    self.cfg.update('configopts',
                                    '--download-%s-shared=1' % dep)

            # compilers
            self.cfg.update('configopts', '--with-cc="%s"' % os.getenv('CC'))
            self.cfg.update(
                'configopts',
                '--with-cxx="%s" --with-c++-support' % os.getenv('CXX'))
            self.cfg.update('configopts', '--with-fc="%s"' % os.getenv('F90'))

            # compiler flags
            if LooseVersion(self.version) >= LooseVersion("3.5"):
                self.cfg.update('configopts',
                                '--CFLAGS="%s"' % os.getenv('CFLAGS'))
                self.cfg.update('configopts',
                                '--CXXFLAGS="%s"' % os.getenv('CXXFLAGS'))
                self.cfg.update('configopts',
                                '--FFLAGS="%s"' % os.getenv('F90FLAGS'))
            else:
                self.cfg.update('configopts',
                                '--with-cflags="%s"' % os.getenv('CFLAGS'))
                self.cfg.update('configopts',
                                '--with-cxxflags="%s"' % os.getenv('CXXFLAGS'))
                self.cfg.update('configopts',
                                '--with-fcflags="%s"' % os.getenv('F90FLAGS'))

            if not self.toolchain.comp_family(
            ) == toolchain.GCC:  #@UndefinedVariable
                self.cfg.update('configopts', '--with-gnu-compilers=0')

            # MPI
            if self.toolchain.options.get('usempi', None):
                self.cfg.update('configopts', '--with-mpi=1')

            # build options
            self.cfg.update('configopts',
                            '--with-build-step-np=%s' % self.cfg['parallel'])
            self.cfg.update(
                'configopts',
                '--with-shared-libraries=%d' % self.cfg['shared_libs'])
            self.cfg.update(
                'configopts',
                '--with-debugging=%d' % self.toolchain.options['debug'])
            self.cfg.update('configopts',
                            '--with-pic=%d' % self.toolchain.options['pic'])
            self.cfg.update('configopts',
                            '--with-x=0 --with-windows-graphics=0')

            # PAPI support
            if self.cfg['with_papi']:
                papi_inc = self.cfg['papi_inc']
                papi_inc_file = os.path.join(papi_inc, "papi.h")
                papi_lib = self.cfg['papi_lib']
                if os.path.isfile(papi_inc_file) and os.path.isfile(papi_lib):
                    self.cfg.update('configopts', '--with-papi=1')
                    self.cfg.update('configopts',
                                    '--with-papi-include=%s' % papi_inc)
                    self.cfg.update('configopts',
                                    '--with-papi-lib=%s' % papi_lib)
                else:
                    raise EasyBuildError(
                        "PAPI header (%s) and/or lib (%s) not found, can not enable PAPI support?",
                        papi_inc_file, papi_lib)

            # Python extensions_step
            if get_software_root('Python'):
                self.cfg.update('configopts', '--with-numpy=1')
                if self.cfg['shared_libs']:
                    self.cfg.update('configopts', '--with-mpi4py=1')

            # FFTW, ScaLAPACK (and BLACS for older PETSc versions)
            deps = ["FFTW", "ScaLAPACK"]
            if LooseVersion(self.version) < LooseVersion("3.5"):
                deps.append("BLACS")
            for dep in deps:
                inc = os.getenv('%s_INC_DIR' % dep.upper())
                libdir = os.getenv('%s_LIB_DIR' % dep.upper())
                libs = os.getenv('%s_STATIC_LIBS' % dep.upper())
                if inc and libdir and libs:
                    with_arg = "--with-%s" % dep.lower()
                    self.cfg.update('configopts', '%s=1' % with_arg)
                    self.cfg.update('configopts',
                                    '%s-include=%s' % (with_arg, inc))
                    self.cfg.update(
                        'configopts',
                        '%s-lib=[%s/%s]' % (with_arg, libdir, libs))
                else:
                    self.log.info(
                        "Missing inc/lib info, so not enabling %s support." %
                        dep)

            # BLAS, LAPACK libraries
            bl_libdir = os.getenv('BLAS_LAPACK_LIB_DIR')
            bl_libs = os.getenv('BLAS_LAPACK_STATIC_LIBS')
            if bl_libdir and bl_libs:
                self.cfg.update(
                    'configopts',
                    '--with-blas-lapack-lib=[%s/%s]' % (bl_libdir, bl_libs))
            else:
                raise EasyBuildError(
                    "One or more environment variables for BLAS/LAPACK not defined?"
                )

            # additional dependencies
            # filter out deps handled seperately
            depfilter = self.cfg.builddependencies() + [
                "BLACS", "BLAS", "CMake", "FFTW", "LAPACK", "numpy", "mpi4py",
                "papi", "ScaLAPACK", "SuiteSparse"
            ]

            deps = [
                dep['name'] for dep in self.cfg.dependencies()
                if not dep['name'] in depfilter
            ]
            for dep in deps:
                if type(dep) == str:
                    dep = (dep, dep)
                deproot = get_software_root(dep[0])
                if deproot:
                    if (LooseVersion(self.version) >=
                            LooseVersion("3.5")) and (dep[1] == "SCOTCH"):
                        withdep = "--with-pt%s" % dep[1].lower(
                        )  # --with-ptscotch is the configopt PETSc >= 3.5
                    else:
                        withdep = "--with-%s" % dep[1].lower()
                    self.cfg.update(
                        'configopts',
                        '%s=1 %s-dir=%s' % (withdep, withdep, deproot))

            # SuiteSparse options changed in PETSc 3.5,
            suitesparse = get_software_root('SuiteSparse')
            if suitesparse:
                if LooseVersion(self.version) >= LooseVersion("3.5"):
                    withdep = "--with-suitesparse"
                    # specified order of libs matters!
                    ss_libs = [
                        "UMFPACK", "KLU", "CHOLMOD", "BTF", "CCOLAMD",
                        "COLAMD", "CAMD", "AMD"
                    ]

                    suitesparse_inc = [
                        os.path.join(suitesparse, l, "Include")
                        for l in ss_libs
                    ]
                    suitesparse_inc.append(
                        os.path.join(suitesparse, "SuiteSparse_config"))
                    inc_spec = "-include=[%s]" % ','.join(suitesparse_inc)

                    suitesparse_libs = [
                        os.path.join(suitesparse, l, "Lib",
                                     "lib%s.a" % l.lower()) for l in ss_libs
                    ]
                    suitesparse_libs.append(
                        os.path.join(suitesparse, "SuiteSparse_config",
                                     "libsuitesparseconfig.a"))
                    lib_spec = "-lib=[%s]" % ','.join(suitesparse_libs)
                else:
                    # CHOLMOD and UMFPACK are part of SuiteSparse (PETSc < 3.5)
                    withdep = "--with-umfpack"
                    inc_spec = "-include=%s" % os.path.join(
                        suitesparse, "UMFPACK", "Include")
                    # specified order of libs matters!
                    umfpack_libs = [
                        os.path.join(suitesparse, l, "Lib",
                                     "lib%s.a" % l.lower())
                        for l in ["UMFPACK", "CHOLMOD", "COLAMD", "AMD"]
                    ]
                    lib_spec = "-lib=[%s]" % ','.join(umfpack_libs)

                self.cfg.update(
                    'configopts', ' '.join([
                        withdep + spec for spec in ['=1', inc_spec, lib_spec]
                    ]))

            # set PETSC_DIR for configure (env) and build_step
            env.setvar('PETSC_DIR', self.cfg['start_dir'])
            self.cfg.update('buildopts',
                            'PETSC_DIR=%s' % self.cfg['start_dir'])

            if self.cfg['sourceinstall']:
                # run configure without --prefix (required)
                cmd = "%s ./configure %s" % (self.cfg['preconfigopts'],
                                             self.cfg['configopts'])
                (out, _) = run_cmd(cmd, log_all=True, simple=False)
            else:
                out = super(EB_PETSc, self).configure_step()

            # check for errors in configure
            error_regexp = re.compile("ERROR")
            if error_regexp.search(out):
                raise EasyBuildError("Error(s) detected in configure output!")

            if self.cfg['sourceinstall']:
                # figure out PETSC_ARCH setting
                petsc_arch_regex = re.compile("^\s*PETSC_ARCH:\s*(\S+)$", re.M)
                res = petsc_arch_regex.search(out)
                if res:
                    self.petsc_arch = res.group(1)
                    self.cfg.update('buildopts',
                                    'PETSC_ARCH=%s' % self.petsc_arch)
                else:
                    raise EasyBuildError(
                        "Failed to determine PETSC_ARCH setting.")

            self.petsc_subdir = '%s-%s' % (self.name.lower(), self.version)

        else:  # old versions (< 3.x)

            self.cfg.update('configopts', '--prefix=%s' % self.installdir)
            self.cfg.update('configopts', '--with-shared=1')

            # additional dependencies
            for dep in ["SCOTCH"]:
                deproot = get_software_root(dep)
                if deproot:
                    withdep = "--with-%s" % dep.lower()
                    self.cfg.update(
                        'configopts',
                        '%s=1 %s-dir=%s' % (withdep, withdep, deproot))

            cmd = "./config/configure.py %s" % self.get_cfg('configopts')
            run_cmd(cmd, log_all=True, simple=True)

        # PETSc > 3.5, make does not accept -j
        if LooseVersion(self.version) >= LooseVersion("3.5"):
            self.cfg['parallel'] = None
コード例 #39
0
    def extract_step(self, verbose=False):
        """Custom extraction of sources for Chimera: unpack installation file
        to obtain chimera.bin installer."""

        cmd = "unzip -d %s %s" % (self.builddir, self.src[0]['path'])
        run_cmd(cmd, log_all=True, simple=True)
コード例 #40
0
ファイル: numpy.py プロジェクト: vanzod/easybuild-easyblocks
class EB_numpy(FortranPythonPackage):
    """Support for installing the numpy Python package as part of a Python installation."""

    @staticmethod
    def extra_options():
        """Easyconfig parameters specific to numpy."""
        extra_vars = ({
            'blas_test_time_limit': [500, "Time limit (in ms) for 1000x1000 matrix dot product BLAS test", CUSTOM],
        })
        return FortranPythonPackage.extra_options(extra_vars=extra_vars)

    def __init__(self, *args, **kwargs):
        """Initialize numpy-specific class variables."""
        super(EB_numpy, self).__init__(*args, **kwargs)

        self.sitecfg = None
        self.sitecfgfn = 'site.cfg'
        self.testinstall = True
        self.testcmd = "cd .. && %(python)s -c 'import numpy; numpy.test(verbose=2)'"

    def configure_step(self):
        """Configure numpy build by composing site.cfg contents."""

        # see e.g. https://github.com/numpy/numpy/pull/2809/files
        self.sitecfg = '\n'.join([
            "[DEFAULT]",
            "library_dirs = %(libs)s",
            "include_dirs= %(includes)s",
            "search_static_first=True",
        ])

        if get_software_root("imkl"):

            if self.toolchain.comp_family() == toolchain.GCC:
                # see https://software.intel.com/en-us/articles/numpyscipy-with-intel-mkl,
                # section Building with GNU Compiler chain
                extrasiteconfig = '\n'.join([
                    "[mkl]",
                    "lapack_libs = ",
                    "mkl_libs = mkl_rt",
                ])
            else:
                extrasiteconfig = '\n'.join([
                    "[mkl]",
                    "lapack_libs = %(lapack)s",
                    "mkl_libs = %(blas)s",
                ])

        else:
            # [atlas] the only real alternative, even for non-ATLAS BLAS libs (e.g., OpenBLAS, ACML, ...)
            # using only the [blas] and [lapack] sections results in sub-optimal builds that don't provide _dotblas.so;
            # it does require a CBLAS interface to be available for the BLAS library being used
            # e.g. for ACML, the CBLAS module providing a C interface needs to be used
            extrasiteconfig = '\n'.join([
                "[atlas]",
                "atlas_libs = %(lapack)s",
                "[lapack]",
                "lapack_libs = %(lapack)s",  # required by scipy, that uses numpy's site.cfg
            ])

        blas = None
        lapack = None
        fft = None

        if get_software_root("imkl"):
            # with IMKL, no spaces and use '-Wl:'
            # redefine 'Wl,' to 'Wl:' so that the patch file can do its job
            def get_libs_for_mkl(varname):
                """Get list of libraries as required for MKL patch file."""
                libs = self.toolchain.variables['LIB%s' % varname].copy()
                libs.try_remove(['pthread', 'dl'])
                tweaks = {
                    'prefix': '',
                    'prefix_begin_end': '-Wl:',
                    'separator': ',',
                    'separator_begin_end': ',',
                }
                libs.try_function_on_element('change', kwargs=tweaks)
                libs.SEPARATOR = ','
                return str(libs)  # str causes list concatenation and adding prefixes & separators

            blas = get_libs_for_mkl('BLAS_MT')
            lapack = get_libs_for_mkl('LAPACK_MT')
            fft = get_libs_for_mkl('FFT')

            # make sure the patch file is there
            # we check for a typical characteristic of a patch file that cooperates with the above
            # not fool-proof, but better than enforcing a particular patch filename
            patch_found = False
            patch_wl_regex = re.compile(r"replace\(':',\s*','\)")
            for patch in self.patches:
                # patches are either strings (extension) or dicts (easyblock)
                if isinstance(patch, dict):
                    patch = patch['path']
                if patch_wl_regex.search(open(patch, 'r').read()):
                    patch_found = True
                    break
            if not patch_found:
                raise EasyBuildError("Building numpy on top of Intel MKL requires a patch to "
                                     "handle -Wl linker flags correctly, which doesn't seem to be there.")

        else:
            # unless Intel MKL is used, $ATLAS should be set to take full control,
            # and to make sure a fully optimized version is built, including _dotblas.so
            # which is critical for decent performance of the numpy.dot (matrix dot product) function!
            env.setvar('ATLAS', '1')

            lapack = ', '.join([x for x in self.toolchain.get_variable('LIBLAPACK_MT', typ=list) if x != "pthread"])
            fft = ', '.join(self.toolchain.get_variable('LIBFFT', typ=list))

        libs = ':'.join(self.toolchain.get_variable('LDFLAGS', typ=list))
        includes = ':'.join(self.toolchain.get_variable('CPPFLAGS', typ=list))

        # CBLAS is required for ACML, because it doesn't offer a C interface to BLAS
        if get_software_root('ACML'):
            cblasroot = get_software_root('CBLAS')
            if cblasroot:
                lapack = ', '.join([lapack, "cblas"])
                cblaslib = os.path.join(cblasroot, 'lib')
                # with numpy as extension, CBLAS might not be included in LDFLAGS because it's not part of a toolchain
                if not cblaslib in libs:
                    libs = ':'.join([libs, cblaslib])
            else:
                raise EasyBuildError("CBLAS is required next to ACML to provide a C interface to BLAS, "
                                     "but it's not loaded.")

        if fft:
            extrasiteconfig += "\n[fftw]\nlibraries = %s" % fft

        suitesparseroot = get_software_root('SuiteSparse')
        if suitesparseroot:
            amddir = os.path.join(suitesparseroot, 'AMD')
            umfpackdir = os.path.join(suitesparseroot, 'UMFPACK')

            if not os.path.exists(amddir) or not os.path.exists(umfpackdir):
                raise EasyBuildError("Expected SuiteSparse subdirectories are not both there: %s, %s",
                                     amddir, umfpackdir)
            else:
                extrasiteconfig += '\n'.join([
                    "[amd]",
                    "library_dirs = %s" % os.path.join(amddir, 'Lib'),
                    "include_dirs = %s" % os.path.join(amddir, 'Include'),
                    "amd_libs = amd",
                    "[umfpack]",
                    "library_dirs = %s" % os.path.join(umfpackdir, 'Lib'),
                    "include_dirs = %s" % os.path.join(umfpackdir, 'Include'),
                    "umfpack_libs = umfpack",
                ])

        self.sitecfg = '\n'.join([self.sitecfg, extrasiteconfig])

        self.sitecfg = self.sitecfg % {
            'blas': blas,
            'lapack': lapack,
            'libs': libs,
            'includes': includes,
        }

        super(EB_numpy, self).configure_step()

        # check configuration (for debugging purposes)
        cmd = "%s setup.py config" % self.python_cmd
        run_cmd(cmd, log_all=True, simple=True)

    def test_step(self):
        """Run available numpy unit tests, and more."""
        super(EB_numpy, self).test_step()

        # temporarily install numpy, it doesn't alow to be used straight from the source dir
        tmpdir = tempfile.mkdtemp()
        abs_pylibdirs = [os.path.join(tmpdir, pylibdir) for pylibdir in self.all_pylibdirs]
        for pylibdir in abs_pylibdirs:
            mkdir(pylibdir, parents=True)
        pythonpath = "export PYTHONPATH=%s &&" % os.pathsep.join(abs_pylibdirs + ['$PYTHONPATH'])
        cmd = self.compose_install_command(tmpdir, extrapath=pythonpath)
        run_cmd(cmd, log_all=True, simple=True, verbose=False)

        try:
            pwd = os.getcwd()
            os.chdir(tmpdir)
        except OSError, err:
            raise EasyBuildError("Faild to change to %s: %s", tmpdir, err)

        # evaluate performance of numpy.dot (3 runs, 3 loops each)
        size = 1000
        cmd = ' '.join([
            pythonpath,
            '%s -m timeit -n 3 -r 3' % self.python_cmd,
            '-s "import numpy; x = numpy.random.random((%(size)d, %(size)d))"' % {'size': size},
            '"numpy.dot(x, x.T)"',
        ])
        (out, ec) = run_cmd(cmd, simple=False)
        self.log.debug("Test output: %s" % out)

        # fetch result
        time_msec = None
        msec_re = re.compile("\d+ loops, best of \d+: (?P<time>[0-9.]+) msec per loop")
        res = msec_re.search(out)
        if res:
            time_msec = float(res.group('time'))
        else:
            sec_re = re.compile("\d+ loops, best of \d+: (?P<time>[0-9.]+) sec per loop")
            res = sec_re.search(out)
            if res:
                time_msec = 1000 * float(res.group('time'))
            elif self.dry_run:
                # use fake value during dry run
                time_msec = 123
                self.log.warning("Using fake value for time required for %dx%d matrix dot product under dry run: %s",
                                 size, size, time_msec)
            else:
                raise EasyBuildError("Failed to determine time for numpy.dot test run.")

        # make sure we observe decent performance
        if time_msec < self.cfg['blas_test_time_limit']:
            self.log.info("Time for %dx%d matrix dot product: %d msec < %d msec => OK",
                          size, size, time_msec, self.cfg['blas_test_time_limit'])
        else:
            raise EasyBuildError("Time for %dx%d matrix dot product: %d msec >= %d msec => ERROR",
                                 size, size, time_msec, self.cfg['blas_test_time_limit'])
        try:
            os.chdir(pwd)
            rmtree2(tmpdir)
        except OSError, err:
            raise EasyBuildError("Failed to change back to %s: %s", pwd, err)
コード例 #41
0
ファイル: tinker.py プロジェクト: clement-parisot/modules
            try:
                os.chdir(testdir)
            except OSError, err:
                raise EasyBuildError("Failed to move to %s to run tests: %s",
                                     testdir, err)

            # run all tests via the provided 'run' scripts
            tests = glob.glob(os.path.join(testdir, '*.run'))
            # gpcr takes too logn (~1h), ifabp fails due to input issues (?)
            tests = [
                t for t in tests
                if not (t.endswith('gpcr.run') or t.endswith('ifabp.run'))
            ]
            for test in tests:
                run_cmd(test)

    def install_step(self):
        """Custom install procedure for TINKER."""
        source_dir = os.path.join(self.cfg['start_dir'], 'source')
        try:
            os.chdir(source_dir)
        except OSError, err:
            raise EasyBuildError("Failed to move to %s: %s", source_dir, err)

        mkdir(os.path.join(self.cfg['start_dir'], 'bin'))
        run_cmd(
            os.path.join(self.cfg['start_dir'], self.build_subdir,
                         'rename.make'))

    def sanity_check_step(self):
コード例 #42
0
    def configure_step(self):
        """Custom configuration for ROOT, add configure options."""

        # using ./configure is deprecated/broken in recent versions, need to use CMake instead
        if LooseVersion(self.version.lstrip('v')) >= LooseVersion('6.10'):
            if self.cfg['arch']:
                raise EasyBuildError(
                    "Specified value '%s' for 'arch' is not used, should not be set",
                    self.cfg['arch'])

            cfitsio_root = get_software_root('CFITSIO')
            if cfitsio_root:
                self.cfg.update('configopts', '-DCFITSIO=%s' % cfitsio_root)

            fftw_root = get_software_root('FFTW')
            if fftw_root:
                self.cfg.update(
                    'configopts',
                    '-Dbuiltin_fftw3=OFF -DFFTW_DIR=%s' % fftw_root)

            gsl_root = get_software_root('GSL')
            if gsl_root:
                self.cfg.update('configopts', '-DGSL_DIR=%s' % gsl_root)

            mesa_root = get_software_root('Mesa')
            if mesa_root:
                self.cfg.update(
                    'configopts', '-DDOPENGL_INCLUDE_DIR=%s' %
                    os.path.join(mesa_root, 'include'))
                self.cfg.update(
                    'configopts', '-DOPENGL_gl_LIBRARY=%s' %
                    os.path.join(mesa_root, 'lib', 'libGL.so'))

            python_root = get_software_root('Python')
            if python_root:
                pyshortver = '.'.join(
                    get_software_version('Python').split('.')[:2])
                self.cfg.update(
                    'configopts', '-DPYTHON_EXECUTABLE=%s' %
                    os.path.join(python_root, 'bin', 'python'))
                python_inc_dir = os.path.join(python_root, 'include',
                                              'python%s' % pyshortver)
                self.cfg.update('configopts',
                                '-DPYTHON_INCLUDE_DIR=%s' % python_inc_dir)
                python_lib = os.path.join(python_root, 'lib',
                                          'libpython%s.so' % pyshortver)
                self.cfg.update('configopts',
                                '-DPYTHON_LIBRARY=%s' % python_lib)

            if get_software_root('X11'):
                self.cfg.update('configopts', '-Dx11=ON')

            self.cfg['separate_build_dir'] = True
            CMakeMake.configure_step(self)
        else:
            if self.cfg['arch'] is None:
                raise EasyBuildError(
                    "No architecture specified to pass to configure script")

            self.cfg.update('configopts',
                            "--etcdir=%s/etc/root " % self.installdir)

            cmd = "%s ./configure %s --prefix=%s %s" % (
                self.cfg['preconfigopts'], self.cfg['arch'], self.installdir,
                self.cfg['configopts'])

            run_cmd(cmd, log_all=True, log_ok=True, simple=True)
コード例 #43
0
    def configure_common(self):
        """Common configuration for all toolchains"""

        # openmp introduces 2 major differences
        # -automatic is default: -noautomatic -auto-scalar
        # some mem-bandwidth optimisation
        if self.cfg['type'] == 'psmp':
            self.openmp = self.toolchain.get_flag('openmp')

        # determine which opt flags to use
        if self.cfg['typeopt']:
            optflags = 'OPT'
            regflags = 'OPT2'
        else:
            optflags = 'NOOPT'
            regflags = 'NOOPT'

        # make sure a MPI-2 able MPI lib is used
        mpi2 = False
        if hasattr(self.toolchain,
                   'MPI_FAMILY') and self.toolchain.MPI_FAMILY is not None:
            known_mpi2_fams = [
                toolchain.MPICH, toolchain.MPICH2, toolchain.MVAPICH2,
                toolchain.OPENMPI, toolchain.INTELMPI
            ]
            mpi_fam = self.toolchain.mpi_family()
            if mpi_fam in known_mpi2_fams:
                mpi2 = True
                self.log.debug(
                    "Determined MPI2 compatibility based on MPI toolchain component: %s"
                    % mpi_fam)
            else:
                self.log.debug(
                    "Cannot determine MPI2 compatibility based on MPI toolchain component: %s"
                    % mpi_fam)
        else:
            # can't use toolchain.mpi_family, because of dummy toolchain
            mpi2libs = ['impi', 'MVAPICH2', 'OpenMPI', 'MPICH2', 'MPICH']
            for mpi2lib in mpi2libs:
                if get_software_root(mpi2lib):
                    mpi2 = True
                    self.log.debug(
                        "Determined MPI2 compatibility based on loaded MPI module: %s"
                    )
                else:
                    self.log.debug(
                        "MPI-2 supporting MPI library %s not loaded.")

        if not mpi2:
            raise EasyBuildError(
                "CP2K needs MPI-2, no known MPI-2 supporting library loaded?")

        cppflags = os.getenv('CPPFLAGS')
        ldflags = os.getenv('LDFLAGS')
        cflags = os.getenv('CFLAGS')
        fflags = os.getenv('FFLAGS')
        fflags_lowopt = re.sub('-O[0-9]', '-O1', fflags)
        options = {
            'CC':
            os.getenv('MPICC'),
            'CPP':
            '',
            'FC':
            '%s %s' % (os.getenv('MPIF90'), self.openmp),
            'LD':
            '%s %s' % (os.getenv('MPIF90'), self.openmp),
            'AR':
            'ar -r',
            'CPPFLAGS':
            '',
            'FPIC':
            self.fpic,
            'DEBUG':
            self.debug,
            'FCFLAGS':
            '$(FCFLAGS%s)' % optflags,
            'FCFLAGS2':
            '$(FCFLAGS%s)' % regflags,
            'CFLAGS':
            ' %s %s %s $(FPIC) $(DEBUG) %s ' %
            (cflags, cppflags, ldflags, self.cfg['extracflags']),
            'DFLAGS':
            ' -D__parallel -D__BLACS -D__SCALAPACK -D__FFTSG %s' %
            self.cfg['extradflags'],
            'LIBS':
            os.getenv('LIBS', ''),
            'FCFLAGSNOOPT':
            '$(DFLAGS) $(CFLAGS) -O0  $(FREE) $(FPIC) $(DEBUG)',
            'FCFLAGSOPT':
            '%s $(FREE) $(SAFE) $(FPIC) $(DEBUG)' % fflags,
            'FCFLAGSOPT2':
            '%s $(FREE) $(SAFE) $(FPIC) $(DEBUG)' % fflags_lowopt,
        }

        libint = get_software_root('LibInt')
        if libint:
            options['DFLAGS'] += ' -D__LIBINT'

            libintcompiler = "%s %s" % (os.getenv('CC'), os.getenv('CFLAGS'))

            # Build libint-wrapper, if required
            libint_wrapper = ''

            # required for old versions of GCC
            if not self.compilerISO_C_BINDING:
                options['DFLAGS'] += ' -D__HAS_NO_ISO_C_BINDING'

                # determine path for libint_tools dir
                libinttools_paths = [
                    'libint_tools', 'tools/hfx_tools/libint_tools'
                ]
                libinttools_path = None
                for path in libinttools_paths:
                    path = os.path.join(self.cfg['start_dir'], path)
                    if os.path.isdir(path):
                        libinttools_path = path
                        change_dir(libinttools_path)
                if not libinttools_path:
                    raise EasyBuildError("No libinttools dir found")

                # build libint wrapper
                cmd = "%s -c libint_cpp_wrapper.cpp -I%s/include" % (
                    libintcompiler, libint)
                if not run_cmd(cmd, log_all=True, simple=True):
                    raise EasyBuildError("Building the libint wrapper failed")
                libint_wrapper = '%s/libint_cpp_wrapper.o' % libinttools_path

            # determine Libint libraries based on major version number
            libint_maj_ver = get_software_version('Libint').split('.')[0]
            if libint_maj_ver == '1':
                libint_libs = "$(LIBINTLIB)/libderiv.a $(LIBINTLIB)/libint.a $(LIBINTLIB)/libr12.a"
            elif libint_maj_ver == '2':
                libint_libs = "$(LIBINTLIB)/libint2.a"
            else:
                raise EasyBuildError(
                    "Don't know how to handle libint version %s",
                    libint_maj_ver)
            self.log.info("Using Libint version %s" % (libint_maj_ver))

            options['LIBINTLIB'] = '%s/lib' % libint
            options['LIBS'] += ' %s -lstdc++ %s' % (libint_libs,
                                                    libint_wrapper)

        else:
            # throw a warning, since CP2K without Libint doesn't make much sense
            self.log.warning(
                "Libint module not loaded, so building without Libint support")

        libxc = get_software_root('libxc')
        if libxc:
            cur_libxc_version = get_software_version('libxc')
            if LooseVersion(self.version) >= LooseVersion('6.1'):
                libxc_min_version = '4.0.3'
                options['DFLAGS'] += ' -D__LIBXC'
            else:
                libxc_min_version = '2.0.1'
                options['DFLAGS'] += ' -D__LIBXC2'

            if LooseVersion(cur_libxc_version) < LooseVersion(
                    libxc_min_version):
                raise EasyBuildError(
                    "This version of CP2K is not compatible with libxc < %s" %
                    libxc_min_version)

            if LooseVersion(cur_libxc_version) >= LooseVersion('4.0.3'):
                # cfr. https://www.cp2k.org/howto:compile#k_libxc_optional_wider_choice_of_xc_functionals
                options['LIBS'] += ' -L%s/lib -lxcf03 -lxc' % libxc
            elif LooseVersion(cur_libxc_version) >= LooseVersion('2.2'):
                options['LIBS'] += ' -L%s/lib -lxcf90 -lxc' % libxc
            else:
                options['LIBS'] += ' -L%s/lib -lxc' % libxc
            self.log.info("Using Libxc-%s" % cur_libxc_version)
        else:
            self.log.info(
                "libxc module not loaded, so building without libxc support")

        return options
コード例 #44
0
ファイル: tinker.py プロジェクト: clement-parisot/modules
class EB_TINKER(EasyBlock):
    """Support for building/installing TINKER."""
    def __init__(self, *args, **kwargs):
        """Custom easyblock constructor for TINKER: initialise class variables."""
        super(EB_TINKER, self).__init__(*args, **kwargs)

        self.build_subdir = None
        self.build_in_installdir = True

    def configure_step(self):
        """Custom configuration procedure for TINKER."""
        # make sure FFTW is available
        if get_software_root('FFTW') is None:
            raise EasyBuildError("FFTW dependency is not available.")

        os_dirs = {
            LINUX: 'linux',
            DARWIN: 'macosx',
        }
        os_type = get_os_type()
        os_dir = os_dirs.get(os_type)
        if os_dir is None:
            raise EasyBuildError(
                "Failed to determine OS directory for %s (known: %s)", os_type,
                os_dirs)

        comp_dirs = {
            toolchain.INTELCOMP: 'intel',
            toolchain.GCC: 'gfortran',
        }
        comp_fam = self.toolchain.comp_family()
        comp_dir = comp_dirs.get(comp_fam)
        if comp_dir is None:
            raise EasyBuildError(
                "Failed to determine compiler directory for %s (known: %s)",
                comp_fam, comp_dirs)

        self.build_subdir = os.path.join(os_dir, comp_dir)
        self.log.info("Using build scripts from %s subdirectory" %
                      self.build_subdir)

        # patch 'link.make' script to use FFTW provided via EasyBuild
        link_make_fp = os.path.join(self.cfg['start_dir'], self.build_subdir,
                                    'link.make')
        for line in fileinput.input(link_make_fp, inplace=1, backup='.orig'):
            line = re.sub(r"libfftw3_threads.a libfftw3.a",
                          r"-L$EBROOTFFTW/lib -lfftw3_threads -lfftw3", line)
            sys.stdout.write(line)

    def build_step(self):
        """Custom build procedure for TINKER."""
        source_dir = os.path.join(self.cfg['start_dir'], 'source')
        try:
            os.chdir(source_dir)
        except OSError, err:
            raise EasyBuildError("Failed to move to %s: %s", source_dir, err)

        run_cmd(
            os.path.join(self.cfg['start_dir'], self.build_subdir,
                         'compile.make'))
        run_cmd(
            os.path.join(self.cfg['start_dir'], self.build_subdir,
                         'library.make'))
        run_cmd(
            os.path.join(self.cfg['start_dir'], self.build_subdir,
                         'link.make'))
コード例 #45
0
    def build_step(self):
        """Build in install dir using compile script."""

        cmd = "./%s" % self.compile_script
        run_cmd(cmd, log_all=True, simple=True)
コード例 #46
0
    def test_step(self):
        """Run regression test."""

        if self.cfg['runtest']:

            # we need to specify location of 'data' directory in *build* dir,
            # since we've configured CP2K to look into the installation directory
            # (where 'data' will be copied to in install step)
            setvar('CP2K_DATA_DIR', os.path.join(self.cfg['start_dir'],
                                                 'data'))

            if not build_option('mpi_tests'):
                self.log.info(
                    "Skipping testing of CP2K since MPI testing is disabled")
                return

            if self.cfg['omp_num_threads']:
                setvar('OMP_NUM_THREADS', self.cfg['omp_num_threads'])

            # change to root of build dir
            change_dir(self.builddir)

            # use regression test reference output if available
            # try and find an unpacked directory that starts with 'LAST-'
            regtest_refdir = None
            for d in os.listdir(self.builddir):
                if d.startswith("LAST-"):
                    regtest_refdir = d
                    break

            # location of do_regtest script
            cfg_fn = "cp2k_regtest.cfg"
            regtest_script = os.path.join(self.cfg['start_dir'], 'tools',
                                          'regtesting', 'do_regtest')
            regtest_cmd = "%s -nosvn -nobuild -config %s" % (regtest_script,
                                                             cfg_fn)
            # older version of CP2K
            if not os.path.exists(regtest_script):
                regtest_script = os.path.join(self.cfg['start_dir'], 'tools',
                                              'do_regtest')
                regtest_cmd = "%s -nocvs -quick -nocompile -config %s" % (
                    regtest_script, cfg_fn)

            # patch do_regtest so that reference output is used
            if regtest_refdir:
                self.log.info("Using reference output available in %s" %
                              regtest_refdir)
                try:
                    for line in fileinput.input(regtest_script,
                                                inplace=1,
                                                backup='.orig.refout'):
                        line = re.sub(r"^(dir_last\s*=\${dir_base})/.*$",
                                      r"\1/%s" % regtest_refdir, line)
                        sys.stdout.write(line)
                except IOError as err:
                    raise EasyBuildError("Failed to modify '%s': %s",
                                         regtest_script, err)

            else:
                self.log.info(
                    "No reference output found for regression test, just continuing without it..."
                )

            # prefer using 4 cores, since some tests require/prefer square (n^2) numbers or powers of 2 (2^n)
            test_core_cnt = min(self.cfg.get('parallel', sys.maxint), 4)
            if get_avail_core_count() < test_core_cnt:
                raise EasyBuildError(
                    "Cannot run MPI tests as not enough cores (< %s) are available",
                    test_core_cnt)
            else:
                self.log.info("Using %s cores for the MPI tests" %
                              test_core_cnt)

            # configure regression test
            cfg_txt = '\n'.join([
                'FORT_C_NAME="%(f90)s"',
                'dir_base=%(base)s',
                'cp2k_version=%(cp2k_version)s',
                'dir_triplet=%(triplet)s',
                'export ARCH=${dir_triplet}',
                'cp2k_dir=%(cp2k_dir)s',
                'leakcheck="YES"',
                'maxtasks=%(maxtasks)s',
                'cp2k_run_prefix="%(mpicmd_prefix)s"',
            ]) % {
                'f90':
                os.getenv('F90'),
                'base':
                os.path.dirname(os.path.normpath(self.cfg['start_dir'])),
                'cp2k_version':
                self.cfg['type'],
                'triplet':
                self.typearch,
                'cp2k_dir':
                os.path.basename(os.path.normpath(self.cfg['start_dir'])),
                'maxtasks':
                self.cfg['maxtasks'],
                'mpicmd_prefix':
                self.toolchain.mpi_cmd_for('', test_core_cnt),
            }

            write_file(cfg_fn, cfg_txt)
            self.log.debug("Contents of %s: %s" % (cfg_fn, cfg_txt))

            # run regression test
            (regtest_output, ec) = run_cmd(regtest_cmd,
                                           log_all=True,
                                           simple=False,
                                           log_output=True)

            if ec == 0:
                self.log.info("Regression test output:\n%s" % regtest_output)
            else:
                raise EasyBuildError(
                    "Regression test failed (non-zero exit code): %s",
                    regtest_output)

            # pattern to search for regression test summary
            re_pattern = "number\s+of\s+%s\s+tests\s+(?P<cnt>[0-9]+)"

            # find total number of tests
            regexp = re.compile(re_pattern % "", re.M | re.I)
            res = regexp.search(regtest_output)
            tot_cnt = None
            if res:
                tot_cnt = int(res.group('cnt'))
            else:
                raise EasyBuildError(
                    "Finding total number of tests in regression test summary failed"
                )

            # function to report on regtest results
            def test_report(test_result):
                """Report on tests with given result."""

                postmsg = ''

                test_result = test_result.upper()
                regexp = re.compile(re_pattern % test_result, re.M | re.I)

                cnt = None
                res = regexp.search(regtest_output)
                if not res:
                    raise EasyBuildError(
                        "Finding number of %s tests in regression test summary failed",
                        test_result.lower())
                else:
                    cnt = int(res.group('cnt'))

                logmsg = "Regression test reported %s / %s %s tests"
                logmsg_values = (cnt, tot_cnt, test_result.lower())

                # failed tests indicate problem with installation
                # wrong tests are only an issue when there are excessively many
                if (test_result == "FAILED"
                        and cnt > 0) or (test_result == "WRONG" and
                                         (cnt / tot_cnt) > 0.1):
                    if self.cfg['ignore_regtest_fails']:
                        self.log.warning(logmsg, *logmsg_values)
                        self.log.info(
                            "Ignoring failures in regression test, as requested."
                        )
                    else:
                        raise EasyBuildError(logmsg, *logmsg_values)
                elif test_result == "CORRECT" or cnt == 0:
                    self.log.info(logmsg, *logmsg_values)
                else:
                    self.log.warning(logmsg, *logmsg_values)

                return postmsg

            # number of failed/wrong tests, will report error if count is positive
            self.postmsg += test_report("FAILED")
            self.postmsg += test_report("WRONG")

            # number of new tests, will be high if a non-suitable regtest reference was used
            # will report error if count is positive (is that what we want?)
            self.postmsg += test_report("NEW")

            # number of correct tests: just report
            test_report("CORRECT")
コード例 #47
0
    def configure_step(self):
        """
        Configure for GCC build:
        - prepare extra source dirs (GMP, MPFR, MPC, ...)
        - create obj dir to build in (GCC doesn't like to be built in source dir)
        - add configure and make options, according to .eb spec file
        - decide whether or not to do a staged build (which is required to enable PPL/CLooG support)
        - set platform_lib based on config.guess output
        """

        # self.configopts will be reused in a 3-staged build,
        # configopts is only used in first configure
        self.configopts = self.cfg['configopts']

        # I) prepare extra source dirs, e.g. for GMP, MPFR, MPC (if required), so GCC can build them
        stage1_info = self.prep_extra_src_dirs("stage1")
        configopts = stage1_info['configopts']

        # II) update config options

        # enable specified language support
        if self.cfg['languages']:
            self.configopts += " --enable-languages=%s" % ','.join(self.cfg['languages'])

        # enable building of libiberty, if desired
        if self.cfg['withlibiberty']:
            self.configopts += " --enable-install-libiberty"

        # enable link-time-optimization (LTO) support, if desired
        if self.cfg['withlto']:
            self.configopts += " --enable-lto"
        else:
            self.configopts += " --disable-lto"

        # configure for a release build
        self.configopts += " --enable-checking=release "
        # enable multilib: allow both 32 and 64 bit
        if self.cfg['multilib']:
            glibc_32bit = [
                "glibc.i686",  # Fedora, RedHat-based
                "glibc.ppc",   # "" on Power
                "libc6-dev-i386",  # Debian-based
                "gcc-c++-32bit",  # OpenSuSE, SLES
            ]
            if not any([check_os_dependency(dep) for dep in glibc_32bit]):
                raise EasyBuildError("Using multilib requires 32-bit glibc (install one of %s, depending on your OS)",
                                     ', '.join(glibc_32bit))
            self.configopts += " --enable-multilib --with-multilib-list=m32,m64"
        else:
            self.configopts += " --disable-multilib"
        # build both static and dynamic libraries (???)
        self.configopts += " --enable-shared=yes --enable-static=yes "

        # use POSIX threads
        self.configopts += " --enable-threads=posix "

        # enable plugin support
        self.configopts += " --enable-plugins "

        # use GOLD as default linker
        if self.cfg['use_gold_linker']:
            self.configopts += " --enable-gold=default --enable-ld --with-plugin-ld=ld.gold"
        else:
            self.configopts += " --enable-gold --enable-ld=default"

        # enable bootstrap build for self-containment (unless for staged build)
        if not self.stagedbuild:
            configopts += " --enable-bootstrap"
        else:
            configopts += " --disable-bootstrap"

        if self.stagedbuild:
            #
            # STAGE 1: configure GCC build that will be used to build PPL/CLooG
            #
            self.log.info("Starting with stage 1 of 3-staged build to enable CLooG and/or PPL, ISL support...")
            self.stage1installdir = os.path.join(self.builddir, 'GCC_stage1_eb')
            configopts += " --prefix=%(p)s --with-local-prefix=%(p)s" % {'p': self.stage1installdir}

        else:
            # unstaged build, so just run standard configure/make/make install
            # set prefixes
            self.log.info("Performing regular GCC build...")
            configopts += " --prefix=%(p)s --with-local-prefix=%(p)s" % {'p': self.installdir}

        # prioritize lib over lib{64,32,x32} for all architectures by overriding default MULTILIB_OSDIRNAMES config
        # only do this when multilib is not enabled
        if self.cfg['prefer_lib_subdir'] and not self.cfg['multilib']:
            cfgfile = 'gcc/config/i386/t-linux64'
            multilib_osdirnames = "MULTILIB_OSDIRNAMES = m64=../lib:../lib64 m32=../lib:../lib32 mx32=../lib:../libx32"
            self.log.info("Patching MULTILIB_OSDIRNAMES in %s with '%s'", cfgfile, multilib_osdirnames)
            write_file(cfgfile, multilib_osdirnames, append=True)
        elif self.cfg['multilib']:
            self.log.info("Not patching MULTILIB_OSDIRNAMES since use of --enable-multilib is enabled")

        # III) create obj dir to build in, and change to it
        #     GCC doesn't like to be built in the source dir
        if self.stagedbuild:
            objdir = self.create_dir("stage1_obj")
            self.stage1prefix = objdir
        else:
            objdir = self.create_dir("obj")

        # IV) actual configure, but not on default path
        cmd = "../configure  %s %s" % (self.configopts, configopts)

        # instead of relying on uname, we run the same command GCC uses to
        # determine the platform
        out, ec = run_cmd("../config.guess", simple=False)
        if ec == 0:
            self.platform_lib = out.rstrip()

        self.run_configure_cmd(cmd)

        self.disable_lto_mpfr_old_gcc(objdir)
コード例 #48
0
    def test_step(self):
        """Run WPS test (requires large dataset to be downloaded). """

        wpsdir = None

        def run_wps_cmd(cmdname, mpi_cmd=True):
            """Run a WPS command, and check for success."""

            cmd = os.path.join(wpsdir, "%s.exe" % cmdname)

            if mpi_cmd:
                if build_option('mpi_tests'):
                    cmd = self.toolchain.mpi_cmd_for(cmd, 1)
                else:
                    self.log.info(
                        "Skipping MPI test for %s, since MPI tests are disabled",
                        cmd)
                    return

            (out, _) = run_cmd(cmd, log_all=True, simple=False)

            re_success = re.compile("Successful completion of %s" % cmdname)
            if not re_success.search(out):
                raise EasyBuildError("%s.exe failed (pattern '%s' not found)?",
                                     cmdname, re_success.pattern)

        if self.cfg['runtest']:
            if not self.cfg['testdata']:
                raise EasyBuildError("List of URLs for testdata not provided.")

            wpsdir = os.path.join(self.builddir, "WPS")

            try:
                # create temporary directory
                tmpdir = tempfile.mkdtemp()
                os.chdir(tmpdir)

                # download data
                testdata_paths = []
                for testdata in self.cfg['testdata']:
                    path = self.obtain_file(testdata)
                    if not path:
                        raise EasyBuildError(
                            "Downloading file from %s failed?", testdata)
                    testdata_paths.append(path)

                # unpack data
                for path in testdata_paths:
                    extract_file(path, tmpdir)

                namelist_file = os.path.join(tmpdir, 'namelist.wps')

                # GEOGRID

                # setup directories and files
                for d in os.listdir(os.path.join(tmpdir, "geog")):
                    os.symlink(os.path.join(tmpdir, "geog", d),
                               os.path.join(tmpdir, d))

                # copy namelist.wps file and patch it for geogrid
                copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file)
                for line in fileinput.input(namelist_file,
                                            inplace=1,
                                            backup='.orig.geogrid'):
                    line = re.sub(r"^(\s*geog_data_path\s*=\s*).*$",
                                  r"\1 '%s'" % tmpdir, line)
                    sys.stdout.write(line)

                # GEOGRID.TBL
                geogrid_dir = os.path.join(tmpdir, "geogrid")
                os.mkdir(geogrid_dir)
                os.symlink(os.path.join(wpsdir, "geogrid", "GEOGRID.TBL.ARW"),
                           os.path.join(geogrid_dir, "GEOGRID.TBL"))

                # run geogrid.exe
                run_wps_cmd("geogrid")

                # UNGRIB

                # determine start and end time stamps of grib files
                grib_file_prefix = "fnl_"
                k = len(grib_file_prefix)
                fs = [
                    f for f in sorted(os.listdir('.'))
                    if f.startswith(grib_file_prefix)
                ]
                start = "%s:00:00" % fs[0][k:]
                end = "%s:00:00" % fs[-1][k:]

                # copy namelist.wps file and patch it for ungrib
                copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file)
                for line in fileinput.input(namelist_file,
                                            inplace=1,
                                            backup='.orig.ungrib'):
                    line = re.sub(r"^(\s*start_date\s*=\s*).*$",
                                  r"\1 '%s','%s'," % (start, start), line)
                    line = re.sub(r"^(\s*end_date\s*=\s*).*$",
                                  r"\1 '%s','%s'," % (end, end), line)
                    sys.stdout.write(line)

                # copy correct Vtable
                vtable_dir = os.path.join(wpsdir, 'ungrib', 'Variable_Tables')
                if os.path.exists(os.path.join(vtable_dir, 'Vtable.ARW')):
                    copy_file(os.path.join(vtable_dir, 'Vtable.ARW'),
                              os.path.join(tmpdir, 'Vtable'))
                elif os.path.exists(os.path.join(vtable_dir,
                                                 'Vtable.ARW.UPP')):
                    copy_file(os.path.join(vtable_dir, 'Vtable.ARW.UPP'),
                              os.path.join(tmpdir, 'Vtable'))
                else:
                    raise EasyBuildError(
                        "Could not find Vtable file to use for testing ungrib")

                # run link_grib.csh script
                cmd = "%s %s*" % (os.path.join(
                    wpsdir, "link_grib.csh"), grib_file_prefix)
                run_cmd(cmd, log_all=True, simple=True)

                # run ungrib.exe
                run_wps_cmd("ungrib", mpi_cmd=False)

                # METGRID.TBL

                metgrid_dir = os.path.join(tmpdir, "metgrid")
                os.mkdir(metgrid_dir)
                os.symlink(os.path.join(wpsdir, "metgrid", "METGRID.TBL.ARW"),
                           os.path.join(metgrid_dir, "METGRID.TBL"))

                # run metgrid.exe
                run_wps_cmd('metgrid')

                # clean up
                rmtree2(tmpdir)

                os.chdir(self.builddir)

            except OSError, err:
                raise EasyBuildError("Failed to run WPS test: %s", err)
コード例 #49
0
    def configure_step(self):
        """Configure Python package build/install."""

        # don't add user site directory to sys.path (equivalent to python -s)
        # see https://www.python.org/dev/peps/pep-0370/
        env.setvar('PYTHONNOUSERSITE', '1', verbose=False)

        if self.python_cmd is None:
            self.prepare_python()

        if self.sitecfg is not None:
            # used by some extensions, like numpy, to find certain libs

            finaltxt = self.sitecfg
            if self.sitecfglibdir:
                repl = self.sitecfglibdir
                finaltxt = finaltxt.replace('SITECFGLIBDIR', repl)

            if self.sitecfgincdir:
                repl = self.sitecfgincdir
                finaltxt = finaltxt.replace('SITECFGINCDIR', repl)

            self.log.debug("Using %s: %s" % (self.sitecfgfn, finaltxt))
            try:
                if os.path.exists(self.sitecfgfn):
                    txt = open(self.sitecfgfn).read()
                    self.log.debug("Found %s: %s" % (self.sitecfgfn, txt))
                config = open(self.sitecfgfn, 'w')
                config.write(finaltxt)
                config.close()
            except IOError:
                raise EasyBuildError("Creating %s failed", self.sitecfgfn)

        # conservatively auto-enable checking of $LDSHARED if it is not explicitely enabled or disabled
        # only do this for sufficiently recent Python versions (>= 3.7 or Python 2.x >= 2.7.15)
        if self.cfg.get('check_ldshared') is None:
            pyver = det_python_version(self.python_cmd)
            recent_py2 = pyver.startswith(
                '2') and LooseVersion(pyver) >= LooseVersion('2.7.15')
            if recent_py2 or LooseVersion(pyver) >= LooseVersion('3.7'):
                self.log.info(
                    "Checking of $LDSHARED auto-enabled for sufficiently recent Python version %s",
                    pyver)
                self.cfg['check_ldshared'] = True
            else:
                self.log.info(
                    "Not auto-enabling checking of $LDSHARED, Python version %s is not recent enough",
                    pyver)

        # ensure that LDSHARED uses CC
        if self.cfg.get('check_ldshared', False):
            curr_cc = os.getenv('CC')
            python_ldshared = get_config_vars('LDSHARED')[0]
            if python_ldshared and curr_cc:
                if python_ldshared.split(' ')[0] == curr_cc:
                    self.log.info(
                        "Python's value for $LDSHARED ('%s') uses current $CC value ('%s'), not touching it",
                        python_ldshared, curr_cc)
                else:
                    self.log.info(
                        "Python's value for $LDSHARED ('%s') doesn't use current $CC value ('%s'), fixing",
                        python_ldshared, curr_cc)
                    env.setvar("LDSHARED", curr_cc + " -shared")
            else:
                if curr_cc:
                    self.log.info(
                        "No $LDSHARED found for Python, setting to '%s -shared'",
                        curr_cc)
                    env.setvar("LDSHARED", curr_cc + " -shared")
                else:
                    self.log.info(
                        "No value set for $CC, so not touching $LDSHARED either"
                    )

        # creates log entries for python being used, for debugging
        cmd = "%(python)s -V; %(python)s -c 'import sys; print(sys.executable, sys.path)'"
        run_cmd(cmd % {'python': self.python_cmd}, verbose=False, trace=False)
コード例 #50
0
    def build_step(self):

        if self.stagedbuild:

            # make and install stage 1 build of GCC
            paracmd = ''
            if self.cfg['parallel']:
                paracmd = "-j %s" % self.cfg['parallel']

            cmd = "%s make %s %s" % (self.cfg['prebuildopts'], paracmd, self.cfg['buildopts'])
            run_cmd(cmd, log_all=True, simple=True)

            cmd = "make install %s" % (self.cfg['installopts'])
            run_cmd(cmd, log_all=True, simple=True)

            # register built GCC as compiler to use for stage 2/3
            path = "%s/bin:%s" % (self.stage1installdir, os.getenv('PATH'))
            env.setvar('PATH', path)

            ld_lib_path = "%(dir)s/lib64:%(dir)s/lib:%(val)s" % {
                'dir': self.stage1installdir,
                'val': os.getenv('LD_LIBRARY_PATH')
            }
            env.setvar('LD_LIBRARY_PATH', ld_lib_path)

            #
            # STAGE 2: build GMP/PPL/CLooG for stage 3
            #

            # create dir to build GMP/PPL/CLooG in
            stage2dir = "stage2_stuff"
            stage2prefix = self.create_dir(stage2dir)

            # prepare directories to build GMP/PPL/CLooG
            stage2_info = self.prep_extra_src_dirs("stage2", target_prefix=stage2prefix)
            configopts = stage2_info['configopts']

            # build PPL and CLooG (GMP as dependency)

            for lib in ["gmp"] + self.with_dirs:
                self.log.debug("Building %s in stage 2" % lib)
                if lib == "gmp" or self.cfg['with%s' % lib]:
                    libdir = os.path.join(stage2prefix, lib)
                    try:
                        os.chdir(libdir)
                    except OSError as err:
                        raise EasyBuildError("Failed to change to %s: %s", libdir, err)
                    if lib == "gmp":
                        cmd = "./configure --prefix=%s " % stage2prefix
                        cmd += "--with-pic --disable-shared --enable-cxx "

                        # ensure generic build when 'generic' is set to True or when --optarch=GENERIC is used
                        # non-generic build can be enforced with generic=False if --optarch=GENERIC is used
                        if build_option('optarch') == OPTARCH_GENERIC and self.cfg['generic'] is not False:
                            cmd += "--enable-fat "

                    elif lib == "ppl":
                        self.pplver = LooseVersion(stage2_info['versions']['ppl'])

                        cmd = "./configure --prefix=%s --with-pic -disable-shared " % stage2prefix
                        # only enable C/C++ interfaces (Java interface is sometimes troublesome)
                        cmd += "--enable-interfaces='c c++' "

                        # enable watchdog (or not)
                        if self.pplver <= LooseVersion("0.11"):
                            if self.cfg['pplwatchdog']:
                                cmd += "--enable-watchdog "
                            else:
                                cmd += "--disable-watchdog "
                        elif self.cfg['pplwatchdog']:
                            raise EasyBuildError("Enabling PPL watchdog only supported in PPL <= v0.11 .")

                        # make sure GMP we just built is found
                        cmd += "--with-gmp=%s " % stage2prefix
                    elif lib == "isl":
                        cmd = "./configure --prefix=%s --with-pic --disable-shared " % stage2prefix
                        cmd += "--with-gmp=system --with-gmp-prefix=%s " % stage2prefix

                        # ensure generic build when 'generic' is set to True or when --optarch=GENERIC is used
                        # non-generic build can be enforced with generic=False if --optarch=GENERIC is used
                        if build_option('optarch') == OPTARCH_GENERIC and self.cfg['generic'] is not False:
                            cmd += "--without-gcc-arch "

                    elif lib == "cloog":
                        self.cloogname = stage2_info['names']['cloog']
                        self.cloogver = LooseVersion(stage2_info['versions']['cloog'])
                        v0_15 = LooseVersion("0.15")
                        v0_16 = LooseVersion("0.16")

                        cmd = "./configure --prefix=%s --with-pic --disable-shared " % stage2prefix

                        # use ISL or PPL
                        if self.cfg['clooguseisl']:
                            if self.cfg['withisl']:
                                self.log.debug("Using external ISL for CLooG")
                                cmd += "--with-isl=system --with-isl-prefix=%s " % stage2prefix
                            elif self.cloogver >= v0_16:
                                self.log.debug("Using bundled ISL for CLooG")
                                cmd += "--with-isl=bundled "
                            else:
                                raise EasyBuildError("Using ISL is only supported in CLooG >= v0.16 (detected v%s).",
                                                     self.cloogver)
                        else:
                            if self.cloogname == "cloog-ppl" and self.cloogver >= v0_15 and self.cloogver < v0_16:
                                cmd += "--with-ppl=%s " % stage2prefix
                            else:
                                errormsg = "PPL only supported with CLooG-PPL v0.15.x (detected v%s)" % self.cloogver
                                errormsg += "\nNeither using PPL or ISL-based ClooG, I'm out of options..."
                                raise EasyBuildError(errormsg)

                        # make sure GMP is found
                        if self.cloogver >= v0_15 and self.cloogver < v0_16:
                            cmd += "--with-gmp=%s " % stage2prefix
                        elif self.cloogver >= v0_16:
                            cmd += "--with-gmp=system --with-gmp-prefix=%s " % stage2prefix
                        else:
                            raise EasyBuildError("Don't know how to specify location of GMP to configure of CLooG v%s.",
                                                 self.cloogver)
                    else:
                        raise EasyBuildError("Don't know how to configure for %s", lib)

                    # configure
                    self.run_configure_cmd(cmd)

                    # build and 'install'
                    cmd = "make %s" % paracmd
                    run_cmd(cmd, log_all=True, simple=True)

                    cmd = "make install"
                    run_cmd(cmd, log_all=True, simple=True)

                    if lib == "gmp":
                        # make sure correct GMP is found
                        libpath = os.path.join(stage2prefix, 'lib')
                        incpath = os.path.join(stage2prefix, 'include')

                        cppflags = os.getenv('CPPFLAGS', '')
                        env.setvar('CPPFLAGS', "%s -L%s -I%s " % (cppflags, libpath, incpath))

            #
            # STAGE 3: bootstrap build of final GCC (with PPL/CLooG support)
            #

            # create new obj dir and change into it
            self.create_dir("stage3_obj")

            # reconfigure for stage 3 build
            self.log.info("Stage 2 of 3-staged build completed, continuing with stage 3 "
                          "(with CLooG and/or PPL, ISL support enabled)...")

            stage3_info = self.prep_extra_src_dirs("stage3")
            configopts = stage3_info['configopts']
            configopts += " --prefix=%(p)s --with-local-prefix=%(p)s" % {'p': self.installdir}

            # enable bootstrapping for self-containment
            configopts += " --enable-bootstrap "

            # PPL config options
            if self.cfg['withppl']:
                # for PPL build and CLooG-PPL linking
                for lib in ["lib64", "lib"]:
                    path = os.path.join(self.stage1installdir, lib, "libstdc++.a")
                    if os.path.exists(path):
                        libstdcxxpath = path
                        break
                configopts += "--with-host-libstdcxx='-static-libgcc %s -lm' " % libstdcxxpath

                configopts += "--with-ppl=%s " % stage2prefix

                if self.pplver <= LooseVersion("0.11"):
                    if self.cfg['pplwatchdog']:
                        configopts += "--enable-watchdog "
                    else:
                        configopts += "--disable-watchdog "

            # CLooG config options
            if self.cfg['withcloog']:
                configopts += "--with-cloog=%s " % stage2prefix

                gccver = LooseVersion(self.version)
                if self.cfg['clooguseisl'] and self.cloogver >= LooseVersion('0.16') and gccver < LooseVersion('4.8.0'):
                    configopts += "--enable-cloog-backend=isl "

            if self.cfg['withisl']:
                configopts += "--with-isl=%s " % stage2prefix

            # configure
            cmd = "../configure %s %s" % (self.configopts, configopts)
            self.run_configure_cmd(cmd)

        # build with bootstrapping for self-containment
        if self.cfg['profiled']:
            self.cfg.update('buildopts', 'profiledbootstrap')
        else:
            self.cfg.update('buildopts', 'bootstrap')

        # call standard build_step
        super(EB_GCC, self).build_step()
コード例 #51
0
        os.chdir(absDest)
    except OSError, err:
        _log.error("Can't change to directory %s: %s" % (absDest, err))

    if not cmd:
        cmd = extract_cmd(fn, overwrite=overwrite)
    else:
        # complete command template with filename
        cmd = cmd % fn
    if not cmd:
        _log.error("Can't extract file %s with unknown filetype" % fn)

    if extra_options:
        cmd = "%s %s" % (cmd, extra_options)

    run.run_cmd(cmd, simple=True)

    return find_base_dir()


def which(cmd):
    """Return (first) path in $PATH for specified command, or None if command is not found."""
    paths = os.environ.get('PATH', '').split(os.pathsep)
    for path in paths:
        cmd_path = os.path.join(path, cmd)
        # only accept path is command is there, and both readable and executable
        if os.access(cmd_path, os.R_OK | os.X_OK):
            _log.info("Command %s found at %s" % (cmd, cmd_path))
            return cmd_path
    _log.warning(
        "Could not find command '%s' (with permissions to read/execute it) in $PATH (%s)"
コード例 #52
0
    def sanity_check_step(self, *args, **kwargs):
        """
        Custom sanity check for Python packages
        """

        success, fail_msg = True, ''

        # don't add user site directory to sys.path (equivalent to python -s)
        # see https://www.python.org/dev/peps/pep-0370/;
        # must be set here to ensure that it is defined when running sanity check for extensions,
        # since load_module is not called for every extension,
        # to avoid that any Python packages installed in $HOME/.local/lib affect the sanity check;
        # see also https://github.com/easybuilders/easybuild-easyblocks/issues/1877
        env.setvar('PYTHONNOUSERSITE', '1', verbose=False)

        if self.cfg.get('download_dep_fail', False):
            self.log.info(
                "Detection of downloaded depenencies enabled, checking output of installation command..."
            )
            patterns = [
                'Downloading .*/packages/.*',  # setuptools
                r'Collecting .*',  # pip
            ]
            downloaded_deps = []
            for pattern in patterns:
                downloaded_deps.extend(
                    re.compile(pattern, re.M).findall(self.install_cmd_output))

            if downloaded_deps:
                success = False
                fail_msg = "found one or more downloaded dependencies: %s" % ', '.join(
                    downloaded_deps)
                self.sanity_check_fail_msgs.append(fail_msg)
        else:
            self.log.debug("Detection of downloaded dependencies not enabled")

        # inject directory path that uses %(pyshortver)s template into default value for sanity_check_paths,
        # but only for stand-alone installations, not for extensions;
        # this is relevant for installations of Python packages for multiple Python versions (via multi_deps)
        # (we can not pass this via custom_paths, since then the %(pyshortver)s template value will not be resolved)
        if not self.is_extension and not self.cfg[
                'sanity_check_paths'] and kwargs.get('custom_paths') is None:
            self.cfg['sanity_check_paths'] = {
                'files': [],
                'dirs':
                [os.path.join('lib', 'python%(pyshortver)s', 'site-packages')],
            }

        # make sure 'exts_filter' is defined, which is used for sanity check
        if self.multi_python:
            # when installing for multiple Python versions, we must use 'python', not a full-path 'python' command!
            python_cmd = 'python'
            if 'exts_filter' not in kwargs:
                kwargs.update({'exts_filter': EXTS_FILTER_PYTHON_PACKAGES})
        else:
            # 'python' is replaced by full path to active 'python' command
            # (which is required especially when installing with system Python)
            if self.python_cmd is None:
                self.prepare_python()
            python_cmd = self.python_cmd
            if 'exts_filter' not in kwargs:
                orig_exts_filter = EXTS_FILTER_PYTHON_PACKAGES
                exts_filter = (orig_exts_filter[0].replace(
                    'python', self.python_cmd), orig_exts_filter[1])
                kwargs.update({'exts_filter': exts_filter})

        if self.cfg.get('sanity_pip_check', False):
            pip_version = det_pip_version()
            if pip_version:
                if LooseVersion(pip_version) >= LooseVersion('9.0.0'):

                    if not self.is_extension:
                        # for stand-alone Python package installations (not part of a bundle of extensions),
                        # we need to load the fake module file, otherwise the Python package being installed
                        # is not "in view", and we will overlook missing dependencies...
                        fake_mod_data = self.load_fake_module(purge=True)

                    pip_check_errors = []

                    pip_check_msg, ec = run_cmd("pip check", log_ok=False)
                    if ec:
                        pip_check_errors.append('`pip check` failed:\n%s' %
                                                pip_check_msg)
                    else:
                        self.log.info('`pip check` completed successfully')

                    # Also check for a common issue where the package version shows up as 0.0.0 often caused
                    # by using setup.py as the installation method for a package which is released as a generic wheel
                    # named name-version-py2.py3-none-any.whl. `tox` creates those from version controlled source code
                    # so it will contain a version, but the raw tar.gz does not.
                    pkgs = self.get_installed_python_packages(
                        names_only=False, python_cmd=python_cmd)
                    faulty_version = '0.0.0'
                    faulty_pkg_names = [
                        pkg['name'] for pkg in pkgs
                        if pkg['version'] == faulty_version
                    ]

                    for unversioned_package in self.cfg.get(
                            'unversioned_packages', []):
                        try:
                            faulty_pkg_names.remove(unversioned_package)
                            self.log.debug(
                                'Excluding unversioned package %s from check',
                                unversioned_package)
                        except ValueError:
                            try:
                                version = next(
                                    pkg['version'] for pkg in pkgs
                                    if pkg['name'] == unversioned_package)
                            except StopIteration:
                                msg = (
                                    'Package %s in unversioned_packages was not found in the installed packages. '
                                    'Check that the name from `pip list` is used which may be different than the '
                                    'module name.' % unversioned_package)
                            else:
                                msg = (
                                    'Package %s in unversioned_packages has a version of %s which is valid. '
                                    'Please remove it from unversioned_packages.'
                                    % (unversioned_package, version))
                            pip_check_errors.append(msg)

                    self.log.info(
                        'Found %s invalid packages out of %s packages',
                        len(faulty_pkg_names), len(pkgs))
                    if faulty_pkg_names:
                        msg = (
                            "The following Python packages were likely not installed correctly because they show a "
                            "version of '%s':\n%s\n"
                            "This may be solved by using a *-none-any.whl file as the source instead. "
                            "See e.g. the SOURCE*_WHL templates.\n"
                            "Otherwise you could check if the package provides a version at all or if e.g. poetry is "
                            "required (check the source for a pyproject.toml and see PEP517 for details on that)."
                        ) % (faulty_version, '\n'.join(faulty_pkg_names))
                        pip_check_errors.append(msg)

                    if not self.is_extension:
                        self.clean_up_fake_module(fake_mod_data)

                    if pip_check_errors:
                        raise EasyBuildError('\n'.join(pip_check_errors))
                else:
                    raise EasyBuildError(
                        "pip >= 9.0.0 is required for running 'pip check', found %s",
                        pip_version)
            else:
                raise EasyBuildError("Failed to determine pip version!")

        parent_success, parent_fail_msg = super(PythonPackage,
                                                self).sanity_check_step(
                                                    *args, **kwargs)

        if parent_fail_msg:
            parent_fail_msg += ', '

        return (parent_success and success, parent_fail_msg + fail_msg)
コード例 #53
0
    def test_generate_software_list(self):
        """Test for generate_software_list.py script."""

        # adjust $PYTHONPATH such that test easyblocks are found by the script
        test_dir = os.path.abspath(os.path.dirname(__file__))
        eb_blocks_path = os.path.join(test_dir, 'sandbox')
        pythonpath = os.environ.get('PYTHONPATH', os.path.dirname(test_dir))
        os.environ['PYTHONPATH'] = os.pathsep.join(
            [pythonpath, eb_blocks_path])

        testdir = os.path.dirname(__file__)
        topdir = os.path.dirname(os.path.dirname(testdir))
        script = os.path.join(topdir, 'easybuild', 'scripts',
                              'generate_software_list.py')
        easyconfigs_dir = os.path.join(testdir, 'easyconfigs')

        # copy easyconfig files in format v1 to run the script
        tmpdir = tempfile.mkdtemp()
        for root, subfolders, files in os.walk(easyconfigs_dir):
            if 'v2.0' in subfolders:
                subfolders.remove('v2.0')
            for ec_file in [
                    f for f in files if 'broken' not in os.path.basename(f)
            ]:
                shutil.copy2(os.path.join(root, ec_file), tmpdir)

        cmd = "%s %s --local --quiet --path %s" % (sys.executable, script,
                                                   tmpdir)
        out, ec = run_cmd(cmd, simple=False)

        # make sure output is kind of what we expect it to be
        regex = r"Supported Packages \(32 "
        self.assertTrue(re.search(regex, out),
                        "Pattern '%s' found in output: %s" % (regex, out))
        per_letter = {
            'B': '2',  # binutils, bzip2
            'C': '2',  # CrayCCE, CUDA
            'F': '3',  # foss, fosscuda, FFTW
            'G':
            '9',  # GCC, GCCcore, gcccuda, gmvapich2, golf, golfc, gompic, gompi, gzip
            'H': '1',  # hwloc
            'I':
            '8',  # icc, iccifort, iccifortcuda, intel, ifort, iimpi, imkl, impi
            'M': '1',  # MVAPICH2
            'O': '2',  # OpenMPI, OpenBLAS
            'P': '1',  # Python
            'S': '2',  # ScaLAPACK, SQLite
            'T': '1',  # toy
        }
        self.assertTrue(' - '.join(
            ["[%(l)s](#%(l)s)" % {
                'l': l
            } for l in sorted(per_letter.keys())]))
        for key, val in per_letter.items():
            regex = re.compile(
                r"### %(l)s \(%(n)s packages\) <a name='%(l)s'/>" % {
                    'l': key,
                    'n': val
                })
            self.assertTrue(regex.search(out),
                            "Pattern '%s' found in: %s" % (regex.pattern, out))

        software = [
            'FFTW', 'foss', 'GCC', 'gompi', 'gzip', 'hwloc', 'OpenMPI',
            'OpenBLAS', 'ScaLAPACK', 'toy'
        ]
        for soft in software:
            letter = soft[0].lower()
            pattern = r"^\*.*logo[\s\S]*easyconfigs/%(l)s/%(s)s\)[\s\S]*%(s)s.*\n" % {
                'l': letter,
                's': soft
            }
            self.assertTrue(re.search(pattern, out, re.M),
                            "Pattern '%s' found in: %s" % (pattern, out))

        shutil.rmtree(tmpdir)
        os.environ['PYTHONPATH'] = pythonpath
コード例 #54
0
    def test_generate_software_list(self):
        """Test for generate_software_list.py script."""

        # adjust $PYTHONPATH such that test easyblocks are found by the script
        eb_blocks_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__), 'sandbox'))
        pythonpath = os.environ['PYTHONPATH']
        os.environ['PYTHONPATH'] = "%s:%s" % (pythonpath, eb_blocks_path)

        testdir = os.path.dirname(__file__)
        topdir = os.path.dirname(os.path.dirname(testdir))
        script = os.path.join(topdir, 'easybuild', 'scripts',
                              'generate_software_list.py')
        easyconfigs_dir = os.path.join(testdir, 'easyconfigs')

        # copy easyconfig files in format v1 to run the script
        tmpdir = tempfile.mkdtemp()
        for root, subfolders, files in os.walk(easyconfigs_dir):
            if 'v2.0' in subfolders:
                subfolders.remove('v2.0')
            for ec_file in files:
                shutil.copy2(os.path.join(root, ec_file), tmpdir)

        cmd = "python %s --local --quiet --path %s" % (script, tmpdir)
        out, ec = run_cmd(cmd, simple=False)

        # make sure output is kind of what we expect it to be
        self.assertTrue(re.search(r"Supported Packages \(11", out))
        per_letter = {
            'F': '1',  # FFTW
            'G': '4',  # GCC, gompi, goolf, gzip
            'H': '1',  # hwloc
            'I': '1',  # ictce
            'O': '2',  # OpenMPI, OpenBLAS
            'S': '1',  # ScaLAPACK
            'T': '1',  # toy
        }
        self.assertTrue(' - '.join(
            ["[%(l)s](#%(l)s)" % {
                'l': l
            } for l in sorted(per_letter.keys())]))
        for key, val in per_letter.items():
            self.assertTrue(
                re.search(
                    r"### %(l)s \(%(n)s packages\) <a name='%(l)s'/>" % {
                        'l': key,
                        'n': val
                    }, out))

        software = [
            'FFTW', 'GCC', 'gompi', 'goolf', 'gzip', 'hwloc', 'OpenMPI',
            'OpenBLAS', 'ScaLAPACK', 'toy'
        ]
        for soft in software:
            letter = soft[0].lower()
            pattern = r"^\*.*logo[\s\S]*easyconfigs/%(l)s/%(s)s\)[\s\S]*%(s)s.*\n" % {
                'l': letter,
                's': soft
            }
            self.assertTrue(re.search(pattern, out, re.M))

        shutil.rmtree(tmpdir)
        os.environ['PYTHONPATH'] = pythonpath
コード例 #55
0
    def configure_step(self):
        """Custom configuration procedure for NEURON."""
        if LooseVersion(self.version) < LooseVersion('7.8.1'):

            # make sure we're using the correct configure command
            # (required because custom easyconfig parameters from CMakeMake are picked up)
            self.cfg['configure_cmd'] = "./configure"

            # enable support for distributed simulations if desired
            if self.cfg['paranrn']:
                self.cfg.update('configopts', '--with-paranrn')

            # specify path to InterViews if it is available as a dependency
            interviews_root = get_software_root('InterViews')
            if interviews_root:
                self.cfg.update('configopts', "--with-iv=%s" % interviews_root)
            else:
                self.cfg.update('configopts', "--without-iv")

            # optionally enable support for Python as alternative interpreter
            python_root = get_software_root('Python')
            if python_root:
                self.with_python = True
                self.cfg.update('configopts', "--with-nrnpython=%s/bin/python" % python_root)

            # determine host CPU type
            cmd = "./config.guess"
            (out, ec) = run_cmd(cmd, simple=False)

            self.hostcpu = out.split('\n')[0].split('-')[0]
            self.log.debug("Determined host CPU type as %s" % self.hostcpu)

            # determine Python lib dir
            self.pylibdir = det_pylibdir()

            # complete configuration with configure_method of parent
            ConfigureMake.configure_step(self)
        else:
            # enable support for distributed simulations if desired
            if self.cfg['paranrn']:
                self.cfg.update('configopts', '-DNRN_ENABLE_MPI=ON')
            else:
                self.cfg.update('configopts', '-DNRN_ENABLE_MPI=OFF')

            # specify path to InterViews if it is available as a dependency
            interviews_root = get_software_root('InterViews')
            if interviews_root:
                self.cfg.update('configopts', "-DIV_DIR=%s -DNRN_ENABLE_INTERVIEWS=ON" % interviews_root)
            else:
                self.cfg.update('configopts', "-DNRN_ENABLE_INTERVIEWS=OFF")

            # no longer used it seems
            self.hostcpu = ''

            # optionally enable support for Python as alternative interpreter
            python_root = get_software_root('Python')
            if python_root:
                self.with_python = True
                self.cfg.update('configopts', "-DNRN_ENABLE_PYTHON=ON -DPYTHON_EXECUTABLE=%s/bin/python" % python_root)
                self.cfg.update('configopts', "-DNRN_ENABLE_MODULE_INSTALL=ON "
                                "-DNRN_MODULE_INSTALL_OPTIONS='--prefix=%s'" % self.installdir)
            else:
                self.cfg.update('configopts', "-DNRN_ENABLE_PYTHON=OFF")

            # determine Python lib dir
            self.pylibdir = det_pylibdir()

            # complete configuration with configure_method of parent
            CMakeMake.configure_step(self)
コード例 #56
0
    def run_clang_tests(self, obj_dir):
        os.chdir(obj_dir)

        self.log.info("Running tests")
        run_cmd("make %s check-all" % self.make_parallel_opts, log_all=True)
コード例 #57
0
                self.cfg.update('buildopts', 'CC="$MPICC"')

        cmd = "%s make %s %s" % (self.cfg['prebuildopts'], paracmd,
                                 self.cfg['buildopts'])

        # run make in build dir as well for recent version
        if LooseVersion(self.version) >= LooseVersion("4"):
            try:
                os.chdir(self.parmetis_builddir)
                run_cmd(cmd, log_all=True, simple=True, log_output=verbose)
                os.chdir(self.cfg['start_dir'])
            except OSError, err:
                self.log.error("Running cmd '%s' in %s failed: %s" %
                               (cmd, self.parmetis_builddir, err))
        else:
            run_cmd(cmd, log_all=True, simple=True, log_output=verbose)

    def install_step(self):
        """
        Install by copying files over to the right places.

        Also create symlinks where expected by other software (Lib directory).
        """
        includedir = os.path.join(self.installdir, 'include')
        libdir = os.path.join(self.installdir, 'lib')

        if LooseVersion(self.version) >= LooseVersion("4"):
            # includedir etc changed in v4, use a normal make install
            cmd = "make install %s" % self.cfg['installopts']
            try:
                os.chdir(self.parmetis_builddir)
コード例 #58
0
    def test_fix_broken_easyconfig(self):
        """Test fix_broken_easyconfigs.py script."""
        testdir = os.path.dirname(__file__)
        topdir = os.path.dirname(os.path.dirname(testdir))
        script = os.path.join(topdir, 'easybuild', 'scripts',
                              'fix_broken_easyconfigs.py')
        test_easyblocks = os.path.join(testdir, 'sandbox')

        broken_ec_txt_tmpl = '\n'.join([
            "# licenseheader",
            "%sname = '%s'",
            "version = '1.2.3'",
            '',
            "description = 'foo'",
            "homepage = 'http://example.com'",
            '',
            "toolchain = {'name': 'GCC', 'version': '4.8.2'}",
            '',
            "premakeopts = 'FOO=libfoo.%%s' %% shared_lib_ext",
            "makeopts = 'CC=gcc'",
            '',
            "license = 'foo.lic'",
        ])
        fixed_ec_txt_tmpl = '\n'.join([
            "# licenseheader",
            "%sname = '%s'",
            "version = '1.2.3'",
            '',
            "description = 'foo'",
            "homepage = 'http://example.com'",
            '',
            "toolchain = {'name': 'GCC', 'version': '4.8.2'}",
            '',
            "prebuildopts = 'FOO=libfoo.%%s' %% SHLIB_EXT",
            "buildopts = 'CC=gcc'",
            '',
            "license_file = 'foo.lic'",
        ])
        broken_ec_tmpl = os.path.join(self.test_prefix, '%s.eb')
        script_cmd_tmpl = "PYTHONPATH=%s:$PYTHONPATH:%s %s %%s" % (
            topdir, test_easyblocks, script)

        # don't change it if it isn't broken
        broken_ec = broken_ec_tmpl % 'notbroken'
        script_cmd = script_cmd_tmpl % broken_ec
        fixed_ec_txt = fixed_ec_txt_tmpl % ("easyblock = 'ConfigureMake'\n\n",
                                            'foo')

        write_file(broken_ec, fixed_ec_txt)
        # (dummy) ConfigureMake easyblock is available in test sandbox
        script_cmd = script_cmd_tmpl % broken_ec
        new_ec_txt = read_file(broken_ec)
        self.assertEqual(new_ec_txt, fixed_ec_txt)
        self.assertTrue(EasyConfig(None, rawtxt=new_ec_txt))
        self.assertFalse(os.path.exists(
            '%s.bk' % broken_ec))  # no backup created if nothing was fixed

        broken_ec = broken_ec_tmpl % 'nosuchsoftware'
        script_cmd = script_cmd_tmpl % broken_ec
        broken_ec_txt = broken_ec_txt_tmpl % ('', 'nosuchsoftware')
        fixed_ec_txt = fixed_ec_txt_tmpl % ("easyblock = 'ConfigureMake'\n\n",
                                            'nosuchsoftware')

        # broken easyconfig is fixed in place, original file is backed up
        write_file(broken_ec, broken_ec_txt)
        run_cmd(script_cmd)
        new_ec_txt = read_file(broken_ec)
        self.assertEqual(new_ec_txt, fixed_ec_txt)
        self.assertTrue(EasyConfig(None, rawtxt=new_ec_txt))
        self.assertEqual(read_file('%s.bk' % broken_ec), broken_ec_txt)
        self.assertFalse(os.path.exists('%s.bk1' % broken_ec))

        # broken easyconfig is fixed in place, original file is backed up, existing backup is not overwritten
        write_file(broken_ec, broken_ec_txt)
        write_file('%s.bk' % broken_ec, 'thisshouldnot\nbechanged')
        run_cmd(script_cmd)
        new_ec_txt = read_file(broken_ec)
        self.assertEqual(new_ec_txt, fixed_ec_txt)
        self.assertTrue(EasyConfig(None, rawtxt=new_ec_txt))
        self.assertEqual(read_file('%s.bk' % broken_ec),
                         'thisshouldnot\nbechanged')
        self.assertEqual(read_file('%s.bk1' % broken_ec), broken_ec_txt)

        # if easyblock is specified, that part is left untouched
        broken_ec = broken_ec_tmpl % 'footoy'
        script_cmd = script_cmd_tmpl % broken_ec
        broken_ec_txt = broken_ec_txt_tmpl % ("easyblock = 'EB_toy'\n\n",
                                              'foo')
        fixed_ec_txt = fixed_ec_txt_tmpl % ("easyblock = 'EB_toy'\n\n", 'foo')

        write_file(broken_ec, broken_ec_txt)
        run_cmd(script_cmd)
        new_ec_txt = read_file(broken_ec)
        self.assertEqual(new_ec_txt, fixed_ec_txt)
        self.assertTrue(EasyConfig(None, rawtxt=new_ec_txt))
        self.assertEqual(read_file('%s.bk' % broken_ec), broken_ec_txt)

        # for existing easyblocks, "easyblock = 'ConfigureMake'" should *not* be added
        # EB_toy easyblock is available in test sandbox
        test_easyblocks = os.path.join(testdir, 'sandbox')
        broken_ec = broken_ec_tmpl % 'toy'
        # path to test easyblocks must be *appended* to PYTHONPATH (due to flattening in easybuild-easyblocks repo)
        script_cmd = script_cmd_tmpl % broken_ec
        broken_ec_txt = broken_ec_txt_tmpl % ('', 'toy')
        fixed_ec_txt = fixed_ec_txt_tmpl % ('', 'toy')
        write_file(broken_ec, broken_ec_txt)
        run_cmd(script_cmd)
        new_ec_txt = read_file(broken_ec)
        self.assertEqual(new_ec_txt, fixed_ec_txt)
        self.assertTrue(EasyConfig(None, rawtxt=new_ec_txt))
        self.assertEqual(read_file('%s.bk' % broken_ec), broken_ec_txt)
コード例 #59
0
ファイル: wrf.py プロジェクト: surak/JSC
    def test_step(self):
        """Build and run tests included in the WRF distribution."""
        if self.cfg['runtest']:

            if self.cfg[
                    'buildtype'] in self.parallel_build_types and not build_option(
                        'mpi_tests'):
                self.log.info(
                    "Skipping testing of WRF with build type '%s' since MPI testing is disabled",
                    self.cfg['buildtype'])
                return

            # get list of WRF test cases
            self.testcases = []
            if os.path.exists('test'):
                self.testcases = os.listdir('test')

            elif not self.dry_run:
                raise EasyBuildError(
                    "Test directory not found, failed to determine list of test cases"
                )

            # exclude 2d testcases in non-parallel WRF builds
            if self.cfg['buildtype'] in self.parallel_build_types:
                self.testcases = [
                    test for test in self.testcases if not "2d_" in test
                ]

            # exclude real testcases
            self.testcases = [
                test for test in self.testcases if not test.endswith("_real")
            ]

            self.log.debug("intermediate list of testcases: %s" %
                           self.testcases)

            # exclude tests that should not be run
            for test in ["em_esmf_exp", "em_scm_xy", "nmm_tropical_cyclone"]:
                if test in self.testcases:
                    self.testcases.remove(test)

            # some tests hang when WRF is built with Intel compilers
            if self.comp_fam == toolchain.INTELCOMP:  #@UndefinedVariable
                for test in ["em_heldsuarez"]:
                    if test in self.testcases:
                        self.testcases.remove(test)

            # determine parallel setting (1/2 of available processors + 1)
            n = self.cfg['parallel'] / 2 + 1

            # prepare run command

            # stack limit needs to be set to unlimited for WRF to work well
            if self.cfg['buildtype'] in self.parallel_build_types:
                test_cmd = "ulimit -s unlimited && %s && %s" % (
                    self.toolchain.mpi_cmd_for("./ideal.exe", 1),
                    self.toolchain.mpi_cmd_for("./wrf.exe", n))
            else:
                test_cmd = "ulimit -s unlimited && ./ideal.exe && ./wrf.exe" % n

            def run_test():
                """Run a single test and check for success."""

                # regex to check for successful test run
                re_success = re.compile("SUCCESS COMPLETE WRF")

                # run test
                run_cmd(test_cmd, log_all=True, simple=True)

                # check for success
                fn = "rsl.error.0000"
                try:
                    f = open(fn, "r")
                    txt = f.read()
                    f.close()
                except IOError, err:
                    raise EasyBuildError("Failed to read output file %s: %s",
                                         fn, err)

                if re_success.search(txt):
                    self.log.info("Test %s ran successfully." % test)

                else:
                    raise EasyBuildError(
                        "Test %s failed, pattern '%s' not found.", test,
                        re_success.pattern)

                # clean up stuff that gets in the way
                fn_prefs = [
                    "wrfinput_", "namelist.output", "wrfout_", "rsl.out.",
                    "rsl.error."
                ]
                for f in os.listdir('.'):
                    for p in fn_prefs:
                        if f.startswith(p):
                            os.remove(f)
                            self.log.debug("Cleaned up file %s." % f)

            # build an run each test case individually
            for test in self.testcases:

                self.log.debug("Building and running test %s" % test)

                #build_and_install
                cmd = "tcsh ./compile %s %s" % (self.par, test)
                run_cmd(cmd, log_all=True, simple=True)

                # run test
                try:
                    os.chdir('run')

                    if test in ["em_fire"]:

                        # handle tests with subtests seperately
                        testdir = os.path.join("..", "test", test)

                        for subtest in [
                                x for x in os.listdir(testdir)
                                if os.path.isdir(x)
                        ]:

                            subtestdir = os.path.join(testdir, subtest)

                            # link required files
                            for f in os.listdir(subtestdir):
                                if os.path.exists(f):
                                    os.remove(f)
                                os.symlink(os.path.join(subtestdir, f), f)

                            # run test
                            run_test()

                    else:

                        # run test
                        run_test()

                    os.chdir('..')

                except OSError, err:
                    raise EasyBuildError(
                        "An error occured when running test %s: %s", test, err)
コード例 #60
0
    def sanity_check_step(self):
        """Custom sanity check for NEURON."""
        shlib_ext = get_shared_lib_ext()
        binpath = os.path.join(self.hostcpu, 'bin')
        libpath = os.path.join(self.hostcpu, 'lib', 'lib%s.' + shlib_ext)
        # hoc_ed is not included in the sources of 7.4. However, it is included in the binary distribution.
        # Nevertheless, the binary has a date old enough (June 2014, instead of November 2015 like all the
        # others) to be considered a mistake in the distribution
        binaries = ["neurondemo", "nrngui", "nrniv", "nrnivmodl", "nocmodl", "modlunit", "nrnmech_makefile",
                    "mkthreadsafe"]
        libs = ["nrniv"]
        sanity_check_dirs = ['share/nrn']

        if LooseVersion(self.version) < LooseVersion('7.4'):
            binaries += ["hoc_ed"]

        if LooseVersion(self.version) < LooseVersion('7.8.1'):
            binaries += ["bbswork.sh", "hel2mos1.sh", "ivoc", "memacs", "mos2nrn", "mos2nrn2.sh", "oc"]
            binaries += ["nrn%s" % x for x in ["iv_makefile", "oc", "oc_makefile", "ocmodl"]]
            libs += ["ivoc", "ivos", "memacs", "meschach", "neuron_gnu", "nrnmpi", "nrnoc", "nrnpython",
                     "oc", "ocxt", "scopmath", "sparse13", "sundials"]
            sanity_check_dirs += ['include/nrn']
        # list of included binaries changed with cmake. See
        # https://github.com/neuronsimulator/nrn/issues/899
        else:
            binaries += ["nrnpyenv.sh", "set_nrnpyenv.sh", "sortspike"]
            libs += ["rxdmath"]
            sanity_check_dirs += ['include']
            if self.with_python:
                sanity_check_dirs += [os.path.join("lib", "python"),
                                      os.path.join("lib", "python%(pyshortver)s", "site-packages")]

        # this is relevant for installations of Python packages for multiple Python versions (via multi_deps)
        # (we can not pass this via custom_paths, since then the %(pyshortver)s template value will not be resolved)
        # ensure that we only add to paths specified in the EasyConfig
        sanity_check_files = [os.path.join(binpath, x) for x in binaries] + [libpath % x for x in libs]
        self.cfg['sanity_check_paths'] = {
                'files': sanity_check_files,
                'dirs': sanity_check_dirs,
        }

        super(EB_NEURON, self).sanity_check_step()

        try:
            fake_mod_data = self.load_fake_module()
        except EasyBuildError as err:
            self.log.debug("Loading fake module failed: %s" % err)

        # test NEURON demo
        inp = '\n'.join([
            "demo(3) // load the pyramidal cell model.",
            "init()  // initialise the model",
            "t       // should be zero",
            "soma.v  // will print -65",
            "run()   // run the simulation",
            "t       // should be 5, indicating that 5ms were simulated",
            "soma.v  // will print a value other than -65, indicating that the simulation was executed",
            "quit()",
        ])
        (out, ec) = run_cmd("neurondemo", simple=False, log_all=True, log_output=True, inp=inp)

        validate_regexp = re.compile(r"^\s+-65\s*\n\s+5\s*\n\s+-68.134337", re.M)
        if ec or not validate_regexp.search(out):
            raise EasyBuildError("Validation of NEURON demo run failed.")
        else:
            self.log.info("Validation of NEURON demo OK!")

        if build_option('mpi_tests'):
            nproc = self.cfg['parallel']
            try:
                cwd = os.getcwd()
                os.chdir(os.path.join(self.cfg['start_dir'], 'src', 'parallel'))

                cmd = self.toolchain.mpi_cmd_for("nrniv -mpi test0.hoc", nproc)
                (out, ec) = run_cmd(cmd, simple=False, log_all=True, log_output=True)

                os.chdir(cwd)
            except OSError as err:
                raise EasyBuildError("Failed to run parallel hello world: %s", err)

            valid = True
            for i in range(0, nproc):
                validate_regexp = re.compile("I am %d of %d" % (i, nproc))
                if not validate_regexp.search(out):
                    valid = False
                    break
            if ec or not valid:
                raise EasyBuildError("Validation of parallel hello world run failed.")
            else:
                self.log.info("Parallel hello world OK!")
        else:
            self.log.info("Skipping MPI testing of NEURON since MPI testing is disabled")

        if self.with_python:
            cmd = "python -c 'import neuron; neuron.test()'"
            (out, ec) = run_cmd(cmd, simple=False, log_all=True, log_output=True)

        # cleanup
        self.clean_up_fake_module(fake_mod_data)