def update(self):
        """Update after new modules were added."""
        spider_cmd = os.path.join(os.path.dirname(self.cmd), 'spider')
        cmd = [spider_cmd, '-o', 'moduleT', os.environ['MODULEPATH']]
        self.log.debug("Running command '%s'..." % ' '.join(cmd))

        proc = subprocess.Popen(cmd, stdout=PIPE, stderr=PIPE, env=os.environ)
        (stdout, stderr) = proc.communicate()

        if stderr:
            self.log.error("An error occured when running '%s': %s" % (' '.join(cmd), stderr))

        if self.testing:
            # don't actually update local cache when testing, just return the cache contents
            return stdout
        else:
            try:
                cache_filefn = os.path.join(os.path.expanduser('~'), '.lmod.d', '.cache', 'moduleT.lua')
                self.log.debug("Updating Lmod spider cache %s with output from '%s'" % (cache_filefn, ' '.join(cmd)))
                cache_dir = os.path.dirname(cache_filefn)
                if not os.path.exists(cache_dir):
                    mkdir(cache_dir, parents=True)
                cache_file = open(cache_filefn, 'w')
                cache_file.write(stdout)
                cache_file.close()
            except (IOError, OSError), err:
                self.log.error("Failed to update Lmod spider cache %s: %s" % (cache_filefn, err))
Esempio n. 2
0
    def install_step(self):
        """Install using chimera.bin."""

        try:
            os.chdir(self.cfg['start_dir'])
        except OSError as err:
            raise EasyBuildError("Failed to change to %s: %s", self.cfg['start_dir'], err)

        # Chimera comes bundled with its dependencies, and follows a
        # UNIX file system layout with 'bin', 'include', 'lib', etc.  To
        # avoid conflicts with other modules, the Chimera module must
        # not add the 'bin', 'include', 'lib', etc. directories to PATH,
        # CPATH, LD_LIBRARY_PATH, etc.  We achieve this by installing
        # Chimera in a subdirectory (called 'chimera') instead of the
        # root directory.
        cmd = "./chimera.bin -q -d %s" % os.path.join(self.installdir,
                                                      'chimera')
        run_cmd(cmd, log_all=True, simple=True)

        # Create a symlink to the Chimera startup script; this symlink
        # will end up in PATH.  The startup script sets up the
        # environment, so that Chimera finds its dependencies.
        mkdir(os.path.join(self.installdir, 'bin'))
        symlink(os.path.join(self.installdir, 'chimera', 'bin', 'chimera'),
                os.path.join(self.installdir, 'bin', 'chimera'))
Esempio n. 3
0
    def setup_hierarchical_modules(self):
        """Setup hierarchical modules to run tests on."""
        mod_prefix = os.path.join(self.test_installpath, 'modules', 'all')

        # simply copy module files under 'Core' and 'Compiler' to test install path
        # EasyBuild is responsible for making sure that the toolchain can be loaded using the short module name
        mkdir(mod_prefix, parents=True)
        for mod_subdir in ['Core', 'Compiler', 'MPI']:
            src_mod_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'modules', mod_subdir)
            shutil.copytree(src_mod_path, os.path.join(mod_prefix, mod_subdir))

        # make sure only modules in a hierarchical scheme are available, mixing modules installed with
        # a flat scheme like EasyBuildMNS and a hierarhical one like HierarchicalMNS doesn't work
        self.reset_modulepath([mod_prefix, os.path.join(mod_prefix, 'Core')])

        # tweak use statements in modules to ensure correct paths
        mpi_pref = os.path.join(mod_prefix, 'MPI', 'GCC', '4.7.2', 'OpenMPI', '1.6.4')
        for modfile in [
            os.path.join(mod_prefix, 'Core', 'GCC', '4.7.2'),
            os.path.join(mod_prefix, 'Core', 'GCC', '4.8.3'),
            os.path.join(mod_prefix, 'Core', 'icc', '2013.5.192-GCC-4.8.3'),
            os.path.join(mod_prefix, 'Core', 'ifort', '2013.5.192-GCC-4.8.3'),
            os.path.join(mod_prefix, 'Compiler', 'GCC', '4.7.2', 'OpenMPI', '1.6.4'),
            os.path.join(mod_prefix, 'Compiler', 'intel', '2013.5.192-GCC-4.8.3', 'impi', '4.1.3.049'),
            os.path.join(mpi_pref, 'FFTW', '3.3.3'),
            os.path.join(mpi_pref, 'OpenBLAS', '0.2.6-LAPACK-3.4.2'),
            os.path.join(mpi_pref, 'ScaLAPACK', '2.0.2-OpenBLAS-0.2.6-LAPACK-3.4.2'),
        ]:
            for line in fileinput.input(modfile, inplace=1):
                line = re.sub(r"(module\s*use\s*)/tmp/modules/all",
                              r"\1%s/modules/all" % self.test_installpath,
                              line)
                sys.stdout.write(line)
 def extract_step(self):
     """Extract sources as expected by the OpenFOAM(-Extend) build scripts."""
     super(EB_OpenFOAM, self).extract_step()
     # make sure that the expected subdir is really there after extracting
     # if not, the build scripts (e.g., the etc/bashrc being sourced) will likely fail
     openfoam_installdir = os.path.join(self.installdir, self.openfoamdir)
     if not os.path.exists(openfoam_installdir):
         self.log.warning("Creating expected directory %s, and moving everything there" % openfoam_installdir)
         try:
             contents_installdir = os.listdir(self.installdir)
             # it's one directory but has a wrong name
             if len(contents_installdir) == 1 and os.path.isdir(os.path.join(self.installdir, contents_installdir[0])):
                 source = os.path.join(self.installdir, contents_installdir[0])
                 target = os.path.join(self.installdir, self.openfoamdir)
                 self.log.debug("Renaming %s to %s", source, target)
                 os.rename(source, target)
             else:
                 mkdir(openfoam_installdir)
                 for fil in contents_installdir:
                     if fil != self.openfoamdir:
                         source = os.path.join(self.installdir, fil)
                         target = os.path.join(openfoam_installdir, fil)
                         self.log.debug("Moving %s to %s", source, target)
                         shutil.move(source, target)
                 os.chdir(openfoam_installdir)
         except OSError, err:
             raise EasyBuildError("Failed to move all files to %s: %s", openfoam_installdir, err)
Esempio n. 5
0
    def setup_categorized_hmns_modules(self):
        """Setup categorized hierarchical modules to run tests on."""
        mod_prefix = os.path.join(self.test_installpath, 'modules', 'all')

        # simply copy module files under 'CategorizedHMNS/{Core,Compiler,MPI}' to test install path
        # EasyBuild is responsible for making sure that the toolchain can be loaded using the short module name
        mkdir(mod_prefix, parents=True)
        for mod_subdir in ['Core', 'Compiler', 'MPI']:
            src_mod_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                        'modules', 'CategorizedHMNS', mod_subdir)
            shutil.copytree(src_mod_path, os.path.join(mod_prefix, mod_subdir))
        # create empty module file directory to make C/Tcl modules happy
        mpi_pref = os.path.join(mod_prefix, 'MPI', 'GCC', '4.7.2', 'OpenMPI', '1.6.4')
        mkdir(os.path.join(mpi_pref, 'base'))

        # make sure only modules in the CategorizedHMNS are available
        self.reset_modulepath([os.path.join(mod_prefix, 'Core', 'compiler'),
                               os.path.join(mod_prefix, 'Core', 'toolchain')])

        # tweak use statements in modules to ensure correct paths
        for modfile in [
            os.path.join(mod_prefix, 'Core', 'compiler', 'GCC', '4.7.2'),
            os.path.join(mod_prefix, 'Compiler', 'GCC', '4.7.2', 'mpi', 'OpenMPI', '1.6.4'),
        ]:
            for line in fileinput.input(modfile, inplace=1):
                line = re.sub(r"(module\s*use\s*)/tmp/modules/all",
                              r"\1%s/modules/all" % self.test_installpath,
                              line)
                sys.stdout.write(line)
Esempio n. 6
0
    def install_step(self):
        """Install python but only keep the bits we need"""
        super(EB_Tkinter, self).install_step()

        tmpdir = tempfile.mkdtemp(dir=self.builddir)

        pylibdir = os.path.join(self.installdir, os.path.dirname(det_pylibdir()))
        shlib_ext = get_shared_lib_ext()
        tkinter_so = os.path.join(pylibdir, 'lib-dynload', '_tkinter*.' + shlib_ext)
        tkinter_so_hits = glob.glob(tkinter_so)
        if len(tkinter_so_hits) != 1:
            raise EasyBuildError("Expected to find exactly one _tkinter*.so: %s", tkinter_so_hits)
        self.tkinter_so_basename = os.path.basename(tkinter_so_hits[0])
        if LooseVersion(self.version) >= LooseVersion('3'):
            tkparts = ["tkinter", os.path.join("lib-dynload", self.tkinter_so_basename)]
        else:
            tkparts = ["lib-tk", os.path.join("lib-dynload", self.tkinter_so_basename)]

        copy([os.path.join(pylibdir, x) for x in tkparts], tmpdir)

        rmtree2(self.installdir)

        mkdir(pylibdir, parents=True)
        try:
            shutil.move(os.path.join(tmpdir, tkparts[0]), pylibdir)
            shutil.move(os.path.join(tmpdir, os.path.basename(tkparts[1])), pylibdir)
        except (IOError, OSError) as err:
            raise EasyBuildError("Failed to move Tkinter back to the install directory: %s", err)
Esempio n. 7
0
    def test_cases_step(self):
        """Run test cases, if specified."""

        for test in self.cfg['tests']:

            # check expected format
            if not len(test) == 4:
                raise EasyBuildError("WIEN2k test case not specified in expected format: "
                                     "(testcase_name, init_lapw_args, run_lapw_args, [scf_regexp_pattern])")
            test_name = test[0]
            init_args = test[1]
            run_args = test[2]
            scf_regexp_patterns = test[3]

            try:
                cwd = os.getcwd()
                # WIEN2k enforces that working dir has same name as test case
                tmpdir = os.path.join(tempfile.mkdtemp(), test_name)

                scratch = os.path.join(tmpdir, 'scratch')
                mkdir(scratch, parents=True)
                env.setvar('SCRATCH', scratch)

                os.chdir(tmpdir)
                self.log.info("Running test case %s in %s" % (test_name, tmpdir))
            except OSError as err:
                raise EasyBuildError("Failed to create temporary directory for test %s: %s", test_name, err)

            # try and find struct file for test
            test_fp = self.obtain_file("%s.struct" % test_name)

            try:
                shutil.copy2(test_fp, tmpdir)
            except OSError as err:
                raise EasyBuildError("Failed to copy %s: %s", test_fp, err)

            # run test
            cmd = "init_lapw %s" % init_args
            run_cmd(cmd, log_all=True, simple=True)

            cmd = "run_lapw %s" % run_args
            run_cmd(cmd, log_all=True, simple=True)

            # check output
            scf_fn = "%s.scf" % test_name
            self.log.debug("Checking output of test %s in %s" % (str(test), scf_fn))
            scftxt = read_file(scf_fn)
            for regexp_pat in scf_regexp_patterns:
                regexp = re.compile(regexp_pat, re.M)
                if not regexp.search(scftxt):
                    raise EasyBuildError("Failed to find pattern %s in %s", regexp.pattern, scf_fn)
                else:
                    self.log.debug("Found pattern %s in %s" % (regexp.pattern, scf_fn))

            # cleanup
            try:
                os.chdir(cwd)
                rmtree2(tmpdir)
            except OSError as err:
                raise EasyBuildError("Failed to clean up temporary test dir: %s", err)
Esempio n. 8
0
    def test_import_available_modules(self):
        """Test for import_available_modules function."""

        res = import_available_modules('easybuild.tools.repository')
        self.assertEqual(len(res), 5)
        # don't check all, since some required specific Python packages to be installed...
        self.assertTrue(easybuild.tools.repository.filerepo in res)

        # replicate situation where import_available_modules failed when running in directory where modules are located
        # cfr. https://github.com/easybuilders/easybuild-framework/issues/2659
        #      and https://github.com/easybuilders/easybuild-framework/issues/2742
        test123 = os.path.join(self.test_prefix, 'test123')
        mkdir(test123)
        write_file(os.path.join(test123, '__init__.py'), '')
        write_file(os.path.join(test123, 'one.py'), '')
        write_file(os.path.join(test123, 'two.py'), '')
        write_file(os.path.join(test123, 'three.py'), '')

        change_dir(self.test_prefix)
        res = import_available_modules('test123')

        import test123.one
        import test123.two
        import test123.three
        self.assertEqual([test123.one, test123.three, test123.two], res)
Esempio n. 9
0
    def test_include_mns(self):
        """Test include_module_naming_schemes()."""
        testdir = os.path.dirname(os.path.abspath(__file__))
        test_mns = os.path.join(testdir, 'sandbox', 'easybuild', 'module_naming_scheme')

        my_mns = os.path.join(self.test_prefix, 'my_mns')
        mkdir(my_mns)

        # include __init__.py file that should be ignored, and shouldn't cause trouble (bug #1697)
        write_file(os.path.join(my_mns, '__init__.py'), "# dummy init, should not get included")

        my_mns_txt = '\n'.join([
            "from easybuild.tools.module_naming_scheme import ModuleNamingScheme",
            "class MyMNS(ModuleNamingScheme):",
            "   pass",
        ])
        write_file(os.path.join(my_mns, 'my_mns.py'), my_mns_txt)

        # include custom MNS
        included_mns_path = include_module_naming_schemes(self.test_prefix, [os.path.join(my_mns, '*.py')])

        expected_paths = ['__init__.py', 'tools/__init__.py', 'tools/module_naming_scheme/__init__.py',
                          'tools/module_naming_scheme/my_mns.py']
        for filepath in expected_paths:
            fullpath = os.path.join(included_mns_path, 'easybuild', filepath)
            self.assertTrue(os.path.exists(fullpath), "%s exists" % fullpath)

        # path to included MNSs should be prepended to Python search path
        self.assertEqual(sys.path[0], included_mns_path)

        # importing custom MNS should work
        import easybuild.tools.module_naming_scheme.my_mns
        my_mns_pyc_path = easybuild.tools.module_naming_scheme.my_mns.__file__
        my_mns_real_py_path = os.path.realpath(os.path.join(os.path.dirname(my_mns_pyc_path), 'my_mns.py'))
        self.assertTrue(os.path.samefile(up(my_mns_real_py_path, 1), my_mns))
Esempio n. 10
0
    def update(self):
        """Update after new modules were added."""
        if build_option("update_modules_tool_cache"):
            spider_cmd = os.path.join(os.path.dirname(self.cmd), "spider")
            cmd = [spider_cmd, "-o", "moduleT", os.environ["MODULEPATH"]]
            self.log.debug("Running command '%s'..." % " ".join(cmd))

            proc = subprocess.Popen(cmd, stdout=PIPE, stderr=PIPE, env=os.environ)
            (stdout, stderr) = proc.communicate()

            if stderr:
                raise EasyBuildError("An error occured when running '%s': %s", " ".join(cmd), stderr)

            if self.testing:
                # don't actually update local cache when testing, just return the cache contents
                return stdout
            else:
                try:
                    cache_fp = os.path.join(self.USER_CACHE_DIR, "moduleT.lua")
                    self.log.debug("Updating Lmod spider cache %s with output from '%s'" % (cache_fp, " ".join(cmd)))
                    cache_dir = os.path.dirname(cache_fp)
                    if not os.path.exists(cache_dir):
                        mkdir(cache_dir, parents=True)
                    cache_file = open(cache_fp, "w")
                    cache_file.write(stdout)
                    cache_file.close()
                except (IOError, OSError), err:
                    raise EasyBuildError("Failed to update Lmod spider cache %s: %s", cache_fp, err)
    def test_prepare_step(self):
        """Test prepare step (setting up build environment)."""
        test_easyconfigs = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs', 'test_ecs')
        ec = process_easyconfig(os.path.join(test_easyconfigs, 't', 'toy', 'toy-0.0.eb'))[0]

        mkdir(os.path.join(self.test_buildpath, 'toy', '0.0', 'dummy-dummy'), parents=True)
        eb = EasyBlock(ec['ec'])
        eb.silent = True
        eb.prepare_step()
        self.assertEqual(self.modtool.list(), [])

        os.environ['THIS_IS_AN_UNWANTED_ENV_VAR'] = 'foo'
        eb.cfg['unwanted_env_vars'] = ['THIS_IS_AN_UNWANTED_ENV_VAR']

        eb.cfg['allow_system_deps'] = [('Python', '1.2.3')]

        init_config(build_options={'extra_modules': ['GCC/4.7.2']})

        eb.prepare_step()

        self.assertEqual(os.environ.get('THIS_IS_AN_UNWANTED_ENV_VAR'), None)
        self.assertEqual(os.environ.get('EBROOTPYTHON'), 'Python')
        self.assertEqual(os.environ.get('EBVERSIONPYTHON'), '1.2.3')
        self.assertEqual(len(self.modtool.list()), 1)
        self.assertEqual(self.modtool.list()[0]['mod_name'], 'GCC/4.7.2')
Esempio n. 12
0
    def test_include_easyblocks_priority(self):
        """Test whether easyblocks included via include_easyblocks() get prioroity over others."""
        test_easyblocks = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sandbox', 'easybuild', 'easyblocks')

        # make sure that test 'foo' easyblocks is there
        import easybuild.easyblocks.foo
        foo_path = os.path.dirname(os.path.dirname(easybuild.easyblocks.foo.__file__))
        self.assertTrue(os.path.samefile(foo_path, test_easyblocks))

        # inject custom 'foo' easyblocks
        myeasyblocks = os.path.join(self.test_prefix, 'myeasyblocks')
        mkdir(myeasyblocks)

        # include __init__.py file that should be ignored, and shouldn't cause trouble (bug #1697)
        write_file(os.path.join(myeasyblocks, '__init__.py'), "# dummy init, should not get included")

        # 'undo' import of foo easyblock
        del sys.modules['easybuild.easyblocks.foo']

        foo_easyblock_txt = '\n'.join([
            "from easybuild.framework.easyblock import EasyBlock",
            "class EB_Foo(EasyBlock):",
            "   pass",
        ])
        write_file(os.path.join(myeasyblocks, 'foo.py'), foo_easyblock_txt)
        include_easyblocks(self.test_prefix, [os.path.join(myeasyblocks, 'foo.py')])

        foo_pyc_path = easybuild.easyblocks.foo.__file__
        foo_real_py_path = os.path.realpath(os.path.join(os.path.dirname(foo_pyc_path), 'foo.py'))
        self.assertFalse(os.path.samefile(os.path.dirname(foo_pyc_path), test_easyblocks))
        self.assertTrue(os.path.samefile(foo_real_py_path, os.path.join(myeasyblocks, 'foo.py')))

        # 'undo' import of foo easyblock
        del sys.modules['easybuild.easyblocks.foo']
Esempio n. 13
0
    def test_which(self):
        """Test which function for locating commands."""
        python = ft.which('python')
        self.assertTrue(python and os.path.exists(python) and os.path.isabs(python))

        path = ft.which('i_really_do_not_expect_a_command_with_a_name_like_this_to_be_available')
        self.assertTrue(path is None)

        os.environ['PATH'] = '%s:%s' % (self.test_prefix, os.environ['PATH'])
        # put a directory 'foo' in place (should be ignored by 'which')
        foo = os.path.join(self.test_prefix, 'foo')
        ft.mkdir(foo)
        ft.adjust_permissions(foo, stat.S_IRUSR|stat.S_IXUSR)
        # put executable file 'bar' in place
        bar = os.path.join(self.test_prefix, 'bar')
        ft.write_file(bar, '#!/bin/bash')
        ft.adjust_permissions(bar, stat.S_IRUSR|stat.S_IXUSR)
        self.assertEqual(ft.which('foo'), None)
        self.assertTrue(os.path.samefile(ft.which('bar'), bar))

        # add another location to 'bar', which should only return the first location by default
        barbis = os.path.join(self.test_prefix, 'more', 'bar')
        ft.write_file(barbis, '#!/bin/bash')
        ft.adjust_permissions(barbis, stat.S_IRUSR|stat.S_IXUSR)
        os.environ['PATH'] = '%s:%s' % (os.environ['PATH'], os.path.dirname(barbis))
        self.assertTrue(os.path.samefile(ft.which('bar'), bar))

        # test getting *all* locations to specified command
        res = ft.which('bar', retain_all=True)
        self.assertEqual(len(res), 2)
        self.assertTrue(os.path.samefile(res[0], bar))
        self.assertTrue(os.path.samefile(res[1], barbis))
Esempio n. 14
0
    def test_cases_step(self):
        """Run test cases, if specified."""

        for test in self.cfg['tests']:

            # check expected format
            if not len(test) == 4:
                raise EasyBuildError("WIEN2k test case not specified in expected format: "
                                     "(testcase_name, init_lapw_args, run_lapw_args, [scf_regexp_pattern])")
            test_name = test[0]
            init_args = test[1]
            run_args = test[2]
            scf_regexp_patterns = test[3]

            try:
                cwd = os.getcwd()
                # WIEN2k enforces that working dir has same name as test case
                tmpdir = os.path.join(tempfile.mkdtemp(), test_name)

                scratch = os.path.join(tmpdir, 'scratch')
                mkdir(scratch, parents=True)
                env.setvar('SCRATCH', scratch)

                os.chdir(tmpdir)
                self.log.info("Running test case %s in %s" % (test_name, tmpdir))
            except OSError, err:
                raise EasyBuildError("Failed to create temporary directory for test %s: %s", test_name, err)

            # try and find struct file for test
            test_fp = self.obtain_file("%s.struct" % test_name)

            try:
                shutil.copy2(test_fp, tmpdir)
            except OSError, err:
                raise EasyBuildError("Failed to copy %s: %s", test_fp, err)
    def create_files(self):
        """
        Creates the absolute filename for the module.
        """
        module_path = config.install_path('mod')

        # Fake mode: set installpath to temporary dir
        if self.fake:
            self.tmpdir = tempfile.mkdtemp()
            _log.debug("Fake mode: using %s (instead of %s)" % (self.tmpdir, module_path))
            module_path = self.tmpdir

        # Real file goes in 'all' category
        self.filename = os.path.join(module_path, GENERAL_CLASS, det_full_module_name(self.app.cfg))

        # Make symlink in moduleclass category
        classPathFile = os.path.join(module_path, self.app.cfg['moduleclass'], det_full_module_name(self.app.cfg))

        # Create directories and links
        for path in [os.path.dirname(x) for x in [self.filename, classPathFile]]:
            mkdir(path, parents=True)

        # Make a symlink from classpathFile to self.filename
        try:
            # remove symlink if its there (even if it's broken)
            if os.path.lexists(classPathFile):
                os.remove(classPathFile)
            # remove module file if it's there (it'll be recreated), see Application.make_module
            if os.path.exists(self.filename):
                os.remove(self.filename)
            os.symlink(self.filename, classPathFile)
        except OSError, err:
            _log.exception("Failed to create symlink from %s to %s: %s" % (classPathFile, self.filename, err))
Esempio n. 16
0
 def check_mkdir(path, error=None, **kwargs):
     """Create specified directory with mkdir, and check for correctness."""
     if error is None:
         ft.mkdir(path, **kwargs)
         self.assertTrue(os.path.exists(path) and os.path.isdir(path), "Directory %s exists" % path)
     else:
         self.assertErrorRegex(EasyBuildError, error, ft.mkdir, path, **kwargs)
Esempio n. 17
0
    def configure_step(self):
        """Run CMake for stage 1 Clang."""

        self.llvm_obj_dir_stage1 = os.path.join(self.builddir, 'llvm.obj.1')
        self.llvm_obj_dir_stage2 = os.path.join(self.builddir, 'llvm.obj.2')
        self.llvm_obj_dir_stage3 = os.path.join(self.builddir, 'llvm.obj.3')

        # Create and enter build directory.
        mkdir(self.llvm_obj_dir_stage1)
        os.chdir(self.llvm_obj_dir_stage1)

	# GCC and Clang are installed in different prefixes and Clang will not
	# find the GCC installation on its own.
	self.cfg['configopts'] += "-DGCC_INSTALL_PREFIX='%s' " % get_software_root('GCC')

        self.cfg['configopts'] += "-DCMAKE_BUILD_TYPE=Release "
        if self.cfg['assertions']: 
            self.cfg['configopts'] += "-DLLVM_ENABLE_ASSERTIONS=ON "
        else:
            self.cfg['configopts'] += "-DLLVM_ENABLE_ASSERTIONS=OFF "

        self.cfg['configopts'] += '-DLLVM_TARGETS_TO_BUILD="%s" ' % ';'.join(self.cfg['build_targets'])

        if self.cfg['parallel']:
            self.make_parallel_opts = "-j %s" % self.cfg['parallel']

        self.log.info("Configuring")
        super(EB_Clang, self).configure_step(srcdir=self.llvm_src_dir)
Esempio n. 18
0
    def add_easyconfig(self, cfg, name, version, stats, previous):
        """
        Add the eb-file for software name and version to the repository.
        stats should be a dict containing statistics.
        if previous is true -> append the statistics to the file
        This will return the path to the created file (for use in subclasses)
        """
        # create directory for eb file
        full_path = os.path.join(self.wc, self.subdir, name)
        mkdir(full_path, parents=True)

        # destination
        dest = os.path.join(full_path, "%s-%s.eb" % (name, version))

        txt = "# Built with EasyBuild version %s on %s\n" % (VERBOSE_VERSION, time.strftime("%Y-%m-%d_%H-%M-%S"))

        # copy file
        txt += read_file(cfg)

        # append a line to the eb file so that we don't have git merge conflicts
        if not previous:
            statsprefix = "\n# Build statistics\nbuildstats = ["
            statssuffix = "]\n"
        else:
            # statstemplate = "\nbuildstats.append(%s)\n"
            statsprefix = "\nbuildstats.append("
            statssuffix = ")\n"

        txt += statsprefix + stats_to_str(stats) + statssuffix
        write_file(dest, txt)

        return dest
Esempio n. 19
0
    def install_step(self):
        """
        Install by manually copying files to install dir, for old versions,
        or by running 'make install' for new versions.

        Create symlinks where expected by other applications
        (in Lib instead of lib)
        """

        if LooseVersion(self.version) < LooseVersion("5"):

            libdir = os.path.join(self.installdir, 'lib')
            mkdir(libdir)

            includedir = os.path.join(self.installdir, 'include')
            mkdir(includedir)

            # copy libraries
            try:
                src = os.path.join(self.cfg['start_dir'], 'libmetis.a')
                dst = os.path.join(libdir, 'libmetis.a')
                shutil.copy2(src, dst)
            except OSError, err:
                raise EasyBuildError("Copying file libmetis.a to lib dir failed: %s", err)

            # copy include files
            try:
                for f in ['defs.h', 'macros.h', 'metis.h', 'proto.h', 'rename.h', 'struct.h']:
                    src = os.path.join(self.cfg['start_dir'], 'Lib', f)
                    dst = os.path.join(includedir, f)
                    shutil.copy2(src, dst)
                    os.chmod(dst, 0755)
            except OSError, err:
                raise EasyBuildError("Copying file metis.h to include dir failed: %s", err)
    def configure_step(self, cmd_prefix=''):
        """
        Configure with Meson.
        """
        # make sure both Meson and Ninja are included as build dependencies
        build_dep_names = [d['name'] for d in self.cfg.builddependencies()]
        for tool in ['Ninja', 'Meson']:
            if tool not in build_dep_names:
                raise EasyBuildError("%s not included as build dependency", tool)
            cmd = tool.lower()
            if not which(cmd):
                raise EasyBuildError("'%s' command not found", cmd)

        if self.cfg.get('separate_build_dir', True):
            builddir = os.path.join(self.builddir, 'easybuild_obj')
            mkdir(builddir)
            change_dir(builddir)

        cmd = "%(preconfigopts)s meson --prefix %(installdir)s %(configopts)s %(sourcedir)s" % {
            'configopts': self.cfg['configopts'],
            'installdir': self.installdir,
            'preconfigopts': self.cfg['preconfigopts'],
            'sourcedir': self.start_dir,
        }
        (out, _) = run_cmd(cmd, log_all=True, simple=False)
        return out
Esempio n. 21
0
    def extensions_step(self):
        """Build & Install both Python and R extension"""
        # we start with the python bindings
        self.py_ext.src = os.path.join(self.mxnet_src_dir, "python")
        change_dir(self.py_ext.src)

        self.py_ext.prerun()
        self.py_ext.run(unpack_src=False)
        self.py_ext.postrun()

        # next up, the R bindings
        self.r_ext.src = os.path.join(self.mxnet_src_dir, "R-package")
        change_dir(self.r_ext.src)
        mkdir("inst")
        symlink(os.path.join(self.installdir, "lib"), os.path.join("inst", "libs"))
        symlink(os.path.join(self.installdir, "include"), os.path.join("inst", "include"))

        # MXNet doesn't provide a list of its R dependencies by default
        write_file("NAMESPACE", R_NAMESPACE)
        change_dir(self.mxnet_src_dir)
        self.r_ext.prerun()
        # MXNet is just weird. To install the R extension, we have to:
        # - First install the extension like it is
        # - Let R export the extension again. By doing this, all the dependencies get
        #   correctly filled and some mappings are done
        # - Reinstal the exported version
        self.r_ext.run()
        run_cmd("R_LIBS=%s Rscript -e \"require(mxnet); mxnet:::mxnet.export(\\\"R-package\\\")\"" % self.installdir)
        change_dir(self.r_ext.src)
        self.r_ext.run()
        self.r_ext.postrun()
Esempio n. 22
0
    def configure_step(self, cmd_prefix=''):
        """
        Configure with Meson.
        """
        # make sure both Meson and Ninja are included as build dependencies
        build_dep_names = [d['name'] for d in self.cfg.builddependencies()]
        for tool in ['Ninja', 'Meson']:
            if tool not in build_dep_names:
                raise EasyBuildError("%s not included as build dependency", tool)
            cmd = tool.lower()
            if not which(cmd):
                raise EasyBuildError("'%s' command not found", cmd)

        if self.cfg.get('separate_build_dir', True):
            builddir = os.path.join(self.builddir, 'easybuild_obj')
            mkdir(builddir)
            change_dir(builddir)

        # Make sure libdir doesn't get set to lib/x86_64-linux-gnu or something
        # on Debian/Ubuntu multiarch systems and others.
        no_Dlibdir = '-Dlibdir' not in self.cfg['configopts']
        no_libdir = '--libdir' not in self.cfg['configopts']
        if no_Dlibdir and no_libdir:
            self.cfg.update('configopts', '-Dlibdir=lib')

        cmd = "%(preconfigopts)s meson --prefix %(installdir)s %(configopts)s %(sourcedir)s" % {
            'configopts': self.cfg['configopts'],
            'installdir': self.installdir,
            'preconfigopts': self.cfg['preconfigopts'],
            'sourcedir': self.start_dir,
        }
        (out, _) = run_cmd(cmd, log_all=True, simple=False)
        return out
Esempio n. 23
0
def init_repo(path, repo_name, silent=False):
    """
    Initialize a new Git repository at the specified location.

    @param path: location where Git repository should be initialized
    @param repo_name: name of Git repository
    @param silent: keep quiet (don't print any messages)
    """
    repo_path = os.path.join(path, repo_name)

    # copy or init git working directory
    git_working_dirs_path = build_option('git_working_dirs_path')
    if git_working_dirs_path:
        workdir = os.path.join(git_working_dirs_path, repo_name)
        if os.path.exists(workdir):
            try:
                print_msg("copying %s..." % workdir, silent=silent)
                shutil.copytree(workdir, repo_path)
            except OSError as err:
                raise EasyBuildError("Failed to copy git working dir %s to %s: %s", workdir, repo_path, err)

    if not os.path.exists(repo_path):
        mkdir(repo_path, parents=True)

    try:
        repo = git.Repo.init(repo_path)
    except GitCommandError as err:
        raise EasyBuildError("Failed to init git repo at %s: %s", repo_path, err)

    _log.debug("temporary git working directory ready at %s", repo_path)

    return repo
Esempio n. 24
0
    def test_expand_glob_paths(self):
        """Test expand_glob_paths function."""
        for dirname in ['empty_dir', 'test_dir']:
            ft.mkdir(os.path.join(self.test_prefix, dirname), parents=True)
        for filename in ['file1.txt', 'test_dir/file2.txt', 'test_dir/file3.txt', 'test_dir2/file4.dat']:
            ft.write_file(os.path.join(self.test_prefix, filename), 'gibberish')

        globs = [os.path.join(self.test_prefix, '*.txt'), os.path.join(self.test_prefix, '*', '*')]
        expected = [
            os.path.join(self.test_prefix, 'file1.txt'),
            os.path.join(self.test_prefix, 'test_dir', 'file2.txt'),
            os.path.join(self.test_prefix, 'test_dir', 'file3.txt'),
            os.path.join(self.test_prefix, 'test_dir2', 'file4.dat'),
        ]
        self.assertEqual(sorted(ft.expand_glob_paths(globs)), sorted(expected))

        # passing non-glob patterns is fine too
        file2 = os.path.join(self.test_prefix, 'test_dir', 'file2.txt')
        self.assertEqual(ft.expand_glob_paths([file2]), [file2])

        # test expanding of '~' into $HOME value
        # hard overwrite $HOME in environment (used by os.path.expanduser) so we can reliably test this
        new_home = os.path.join(self.test_prefix, 'home')
        ft.mkdir(new_home, parents=True)
        ft.write_file(os.path.join(new_home, 'test.txt'), 'test')
        os.environ['HOME'] = new_home
        self.assertEqual(ft.expand_glob_paths(['~/*.txt']), [os.path.join(new_home, 'test.txt')])

        # check behaviour if glob that has no (file) matches is passed
        glob_pat = os.path.join(self.test_prefix, 'test_*')
        self.assertErrorRegex(EasyBuildError, "No files found using glob pattern", ft.expand_glob_paths, [glob_pat])
Esempio n. 25
0
    def install_step(self):
        """Custom install procedure for TINKER."""

        change_dir(os.path.join(self.cfg['start_dir'], 'source'))

        mkdir(os.path.join(self.cfg['start_dir'], 'bin'))
        run_cmd(os.path.join(self.cfg['start_dir'], self.build_subdir, 'rename.make'))
Esempio n. 26
0
    def test_step(self):
        """Test the built Python package."""

        if isinstance(self.cfg['runtest'], basestring):
            self.testcmd = self.cfg['runtest']

        if self.cfg['runtest'] and not self.testcmd is None:
            extrapath = ""
            testinstalldir = None

            if self.testinstall:
                # install in test directory and export PYTHONPATH

                try:
                    testinstalldir = tempfile.mkdtemp()
                    mkdir(os.path.join(testinstalldir, self.pylibdir), parents=True)
                except OSError, err:
                    self.log.error("Failed to create test install dir: %s" % err)

                tup = (self.cfg['preinstallopts'], testinstalldir, self.cfg['installopts'])
                cmd = "%s python setup.py install --prefix=%s %s" % tup
                run_cmd(cmd, log_all=True, simple=True)

                run_cmd("python -c 'import sys; print(sys.path)'")  # print Python search path (debug)
                extrapath = "export PYTHONPATH=%s:$PYTHONPATH && " % os.path.join(testinstalldir, self.pylibdir)

            if self.testcmd:
                cmd = "%s%s" % (extrapath, self.testcmd)
                run_cmd(cmd, log_all=True, simple=True)

            if testinstalldir:
                try:
                    rmtree2(testinstalldir)
                except OSError, err:
                    self.log.exception("Removing testinstalldir %s failed: %s" % (testinstalldir, err))
    def run(self):
        """Install R package as an extension."""

        # determine location
        if isinstance(self.master, EB_R):
            # extension is being installed as part of an R installation/module
            (out, _) = run_cmd("R RHOME", log_all=True, simple=False)
            rhome = out.strip()
            lib_install_prefix = os.path.join(rhome, 'library')
        else:
            # extension is being installed in a separate installation prefix
            lib_install_prefix = os.path.join(self.installdir, self.cfg['exts_subdir'])
            mkdir(lib_install_prefix, parents=True)

        if self.patches:
            super(RPackage, self).run(unpack_src=True)
        else:
            super(RPackage, self).run()

        if self.src:
            self.ext_src = self.src
            self.log.debug("Installing R package %s version %s." % (self.name, self.version))
            cmd, stdin = self.make_cmdline_cmd(prefix=lib_install_prefix)
        else:
            self.log.debug("Installing most recent version of R package %s (source not found)." % self.name)
            cmd, stdin = self.make_r_cmd(prefix=lib_install_prefix)

        self.install_R_package(cmd, inp=stdin)
Esempio n. 28
0
    def configure_step(self):
        """Custom configuration procedure for Doris."""
        fftw = get_software_root('FFTW')
        if fftw is None:
            raise EasyBuildError("Required dependency FFTW is missing")

        # create installation directory (and /bin subdirectory) early, make sure it doesn't get removed later
        self.make_installdir()
        mkdir(os.path.join(self.installdir, 'bin'))
        self.cfg['keeppreviousinstall'] = True

        # configure/build/install should be done from 'src' subdirectory
        change_dir(os.path.join(self.cfg['start_dir'], 'src'))

        qa = {
            "===> Press enter to continue.": '',
            "===> What is your C++ compiler? [g++]": os.getenv('CXX'),
            "===> Do you have the FFTW library (y/n)? [n]": 'y',
            "===> What is the path to the FFTW library (libfftw3f.a or libfftw3f.so)? []": os.path.join(fftw, 'lib'),
            "===> What is the path to the FFTW include file (fftw3.h)? []": os.path.join(fftw, 'include'),
            "===> Do you have the VECLIB library (y/n)? [n]": 'n',
            "===> Do you have the LAPACK library (y/n)? [n]": 'y',
            "===> What is the path to the LAPACK library liblapack.a? []": os.getenv('LAPACK_LIB_DIR'),
            "===> Are you working on a Little Endian (X86 PC, Intel) machine (y/n)? [y]": 'y',
            "===> Installation of Doris in directory: /usr/local/bin (y/n)? [y]": 'n',
            "===> Enter installation directory (use absolute path):": os.path.join(self.installdir, 'bin'),
            "===> Press enter to continue (CTRL-C to exit).": '',
        }
        std_qa = {
            "===> Do you want to compile a more verbose DEBUG version \(y/n\)\? \[n\](.|\n)*expected results\)": 'n',
        }

        run_cmd_qa('./configure', qa, std_qa=std_qa, log_all=True, simple=True)
    def configure_step(self):
        """Configure Boost build using custom tools"""

        # mpi sanity check
        if self.cfg['boost_mpi'] and not self.toolchain.options.get('usempi', None):
            raise EasyBuildError("When enabling building boost_mpi, also enable the 'usempi' toolchain option.")

        # create build directory (Boost doesn't like being built in source dir)
        self.objdir = os.path.join(self.builddir, 'obj')
        mkdir(self.objdir)

        # generate config depending on compiler used
        toolset = self.cfg['toolset']
        if toolset is None:
            if self.toolchain.comp_family() == toolchain.INTELCOMP:
                toolset = 'intel-linux'
            elif self.toolchain.comp_family() == toolchain.GCC:
                toolset = 'gcc'
            else:
                raise EasyBuildError("Unknown compiler used, don't know what to specify to --with-toolset, aborting.")

        cmd = "%s ./bootstrap.sh --with-toolset=%s --prefix=%s %s"
        tup = (self.cfg['preconfigopts'], toolset, self.objdir, self.cfg['configopts'])
        run_cmd(cmd % tup, log_all=True, simple=True)

        if self.cfg['boost_mpi']:

            self.toolchain.options['usempi'] = True
            # configure the boost mpi module
            # http://www.boost.org/doc/libs/1_47_0/doc/html/mpi/getting_started.html
            # let Boost.Build know to look here for the config file

            txt = ''
            # Check if using a Cray toolchain and configure MPI accordingly
            if self.toolchain.toolchain_family() == toolchain.CRAYPE:
                if self.toolchain.PRGENV_MODULE_NAME_SUFFIX == 'gnu':
                    craympichdir = os.getenv('CRAY_MPICH2_DIR')
                    craygccversion = os.getenv('GCC_VERSION')
                    txt = '\n'.join([
                        'local CRAY_MPICH2_DIR =  %s ;' % craympichdir,
                        'using gcc ',
                        ': %s' % craygccversion,
                        ': CC ',
                        ': <compileflags>-I$(CRAY_MPICH2_DIR)/include ',
                        '  <linkflags>-L$(CRAY_MPICH2_DIR)/lib \ ',
                        '; ',
                        'using mpi ',
                        ': CC ',
                        ': <find-shared-library>mpich ',
                        ': %s' % self.cfg['mpi_launcher'],
                        ';',
                        '',
                    ])
                else:
                    raise EasyBuildError("Bailing out: only PrgEnv-gnu supported for now")
            else:
                txt = "using mpi : %s ;" % os.getenv("MPICXX")

            write_file('user-config.jam', txt, append=True)
    def install_step(self):
        """Custom install procedure for ALADIN."""

        try:
            mkdir(os.getenv('ROOTPACK'), parents=True)
            mkdir(os.getenv('HOMEPACK'), parents=True)
        except OSError, err:
            raise EasyBuildError("Failed to create rootpack dir in %s: %s", err)
Esempio n. 31
0
            tests = [
                t for t in tests
                if not (t.endswith('gpcr.run') or t.endswith('ifabp.run'))
            ]
            for test in tests:
                run_cmd(test)

    def install_step(self):
        """Custom install procedure for TINKER."""
        source_dir = os.path.join(self.cfg['start_dir'], 'source')
        try:
            os.chdir(source_dir)
        except OSError, err:
            raise EasyBuildError("Failed to move to %s: %s", source_dir, err)

        mkdir(os.path.join(self.cfg['start_dir'], 'bin'))
        run_cmd(
            os.path.join(self.cfg['start_dir'], self.build_subdir,
                         'rename.make'))

    def sanity_check_step(self):
        """Custom sanity check for TINKER."""
        custom_paths = {
            'files': ['tinker/source/libtinker.a'],
            'dirs': ['tinker/bin'],
        }
        super(EB_TINKER, self).sanity_check_step(custom_paths=custom_paths)

    def make_module_req_guess(self):
        """Custom guesses for module file prepend-path statements."""
        guesses = super(EB_TINKER, self).make_module_req_guess()
Esempio n. 32
0
    def configure_step(self):
        """
        Custom configure and build procedure for Siesta.
        - There are two main builds to do, siesta and transiesta
        - In addition there are multiple support tools to build
        """

        start_dir = self.cfg['start_dir']
        obj_dir = os.path.join(start_dir, 'Obj')
        arch_make = os.path.join(obj_dir, 'arch.make')
        bindir = os.path.join(start_dir, 'bin')

        loose_ver = LooseVersion(self.version)

        par = ''
        if loose_ver >= LooseVersion('4.1'):
            par = '-j %s' % self.cfg['parallel']

        # enable OpenMP support if desired
        env_var_suff = ''
        if self.toolchain.options.get('openmp', None):
            env_var_suff = '_MT'

        scalapack = os.environ['LIBSCALAPACK' + env_var_suff]
        blacs = os.environ['LIBSCALAPACK' + env_var_suff]
        lapack = os.environ['LIBLAPACK' + env_var_suff]
        blas = os.environ['LIBBLAS' + env_var_suff]
        if get_software_root('imkl') or get_software_root('FFTW'):
            fftw = os.environ['LIBFFT' + env_var_suff]
        else:
            fftw = None

        regex_newlines = []
        regex_subs = [
            ('dc_lapack.a', ''),
            (r'^NETCDF_INTERFACE\s*=.*$', ''),
            ('libsiestaBLAS.a', ''),
            ('libsiestaLAPACK.a', ''),
            # Needed here to allow 4.1-b1 to be built with openmp
            (r"^(LDFLAGS\s*=).*$", r"\1 %s %s" % (os.environ['FCFLAGS'], os.environ['LDFLAGS'])),
        ]

        netcdff_loc = get_software_root('netCDF-Fortran')
        if netcdff_loc:
            # Needed for gfortran at least
            regex_newlines.append((r"^(ARFLAGS_EXTRA\s*=.*)$", r"\1\nNETCDF_INCFLAGS = -I%s/include" % netcdff_loc))

        if fftw:
            fft_inc, fft_lib = os.environ['FFT_INC_DIR'], os.environ['FFT_LIB_DIR']
            fppflags = r"\1\nFFTW_INCFLAGS = -I%s\nFFTW_LIBS = -L%s %s" % (fft_inc, fft_lib, fftw)
            regex_newlines.append((r'(FPPFLAGS\s*:?=.*)$', fppflags))

        # Make a temp installdir during the build of the various parts
        mkdir(bindir)

        # change to actual build dir
        change_dir(obj_dir)

        # Populate start_dir with makefiles
        run_cmd(os.path.join(start_dir, 'Src', 'obj_setup.sh'), log_all=True, simple=True, log_output=True)

        if loose_ver < LooseVersion('4.1-b2'):
            # MPI?
            if self.toolchain.options.get('usempi', None):
                self.cfg.update('configopts', '--enable-mpi')

            # BLAS and LAPACK
            self.cfg.update('configopts', '--with-blas="%s"' % blas)
            self.cfg.update('configopts', '--with-lapack="%s"' % lapack)

            # ScaLAPACK (and BLACS)
            self.cfg.update('configopts', '--with-scalapack="%s"' % scalapack)
            self.cfg.update('configopts', '--with-blacs="%s"' % blacs)

            # NetCDF-Fortran
            if netcdff_loc:
                self.cfg.update('configopts', '--with-netcdf=-lnetcdff')

            # Configure is run in obj_dir, configure script is in ../Src
            super(EB_Siesta, self).configure_step(cmd_prefix='../Src/')

            if loose_ver > LooseVersion('4.0'):
                regex_subs_Makefile = [
                    (r'CFLAGS\)-c', r'CFLAGS) -c'),
                ]
                apply_regex_substitutions('Makefile', regex_subs_Makefile)

        else:  # there's no configure on newer versions

            if self.toolchain.comp_family() in [toolchain.INTELCOMP]:
                copy_file(os.path.join(obj_dir, 'intel.make'), arch_make)
            elif self.toolchain.comp_family() in [toolchain.GCC]:
                copy_file(os.path.join(obj_dir, 'gfortran.make'), arch_make)
            else:
                raise EasyBuildError("There is currently no support for compiler: %s", self.toolchain.comp_family())

            regex_subs.append((r"^(FPPFLAGS\s*:?=.*)$", r"\1 -DF2003"))

            if self.toolchain.options.get('usempi', None):
                regex_subs.extend([
                    (r"^(CC\s*=\s*).*$", r"\1%s" % os.environ['MPICC']),
                    (r"^(FC\s*=\s*).*$", r"\1%s" % os.environ['MPIF90']),
                    (r"^(FPPFLAGS\s*:?=.*)$", r"\1 -DMPI"),
                ])
                regex_newlines.append((r"^(FPPFLAGS\s*:?=.*)$", r"\1\nMPI_INTERFACE = libmpi_f90.a\nMPI_INCLUDE = ."))
                complibs = scalapack
            else:
                complibs = lapack

            regex_subs.extend([
                (r"^(LIBS\s*=).*$", r"\1 %s" % complibs),
                # Needed for a couple of the utils
                (r"^(FFLAGS\s*=\s*).*$", r"\1 -fPIC %s" % os.environ['FCFLAGS']),
            ])
            regex_newlines.append((r"^(COMP_LIBS\s*=.*)$", r"\1\nWXML = libwxml.a"))

            if netcdff_loc:
                regex_subs.extend([
                    (r"^(LIBS\s*=.*)$", r"\1 $(NETCDF_LIBS)"),
                    (r"^(FPPFLAGS\s*:?=.*)$", r"\1 -DCDF -DNCDF -DNCDF_4 -DNCDF_PARALLEL $(NETCDF_INCLUDE)"),
                    (r"^(COMP_LIBS\s*=.*)$", r"\1 libncdf.a libfdict.a"),
                ])
                netcdf_lib_and_inc = "NETCDF_LIBS = -lnetcdff\nNETCDF_INCLUDE = -I%s/include" % netcdff_loc
                netcdf_lib_and_inc += "\nINCFLAGS = $(NETCDF_INCLUDE)"
                regex_newlines.append((r"^(COMP_LIBS\s*=.*)$", r"\1\n%s" % netcdf_lib_and_inc))

            xmlf90 = get_software_root('xmlf90')
            if xmlf90:
                regex_subs.append((r"^(XMLF90_ROOT\s*=).*$", r"\1%s" % xmlf90))

            libpsml = get_software_root('libPSML')
            if libpsml:
                regex_subs.append((r"^(PSML_ROOT\s*=).*$.*", r"\1%s" % libpsml))

            libgridxc = get_software_root('libGridXC')
            if libgridxc:
                regex_subs.append((r"^(GRIDXC_ROOT\s*=).*$", r"\1%s" % libgridxc))

            libxc = get_software_root('libxc')
            if libxc:
                regex_subs.append((r"^#(LIBXC_ROOT\s*=).*$", r"\1 %s" % libxc))

            elpa = get_software_root('ELPA')
            if elpa:
                elpa_ver = get_software_version('ELPA')
                regex_subs.extend([
                    (r"^(FPPFLAGS\s*:?=.*)$", r"\1 -DSIESTA__ELPA"),
                    (r"^(FPPFLAGS\s*:?=.*)$", r"\1 -I%s/include/elpa-%s/modules" % (elpa, elpa_ver)),
                    (r"^(LIBS\s*=.*)$", r"\1 -L%s/lib -lelpa" % elpa),
                ])

            elsi = get_software_root('ELSI')
            if elsi:
                if not os.path.isfile(os.path.join(elsi, 'lib', 'libelsi.%s' % get_shared_lib_ext())):
                    raise EasyBuildError("This easyblock requires ELSI shared libraries instead of static")

                regex_subs.extend([
                    (r"^(FPPFLAGS\s*:?=.*)$", r"\1 -DSIESTA__ELSI"),
                    (r"^(FPPFLAGS\s*:?=.*)$", r"\1 -I%s/include" % elsi),
                    (r"^(LIBS\s*=.*)$", r"\1 $(FFTW_LIBS) -L%s/lib -lelsi" % elsi),
                ])

            metis = get_software_root('METIS')
            if metis:
                regex_subs.extend([
                    (r"^(FPPFLAGS\s*:?=.*)$", r"\1 -DSIESTA__METIS"),
                    (r"^(LIBS\s*=.*)$", r"\1 -L%s/lib -lmetis" % metis),
                ])

        apply_regex_substitutions(arch_make, regex_subs)

        # individually apply substitutions that add lines
        for regex_nl in regex_newlines:
            apply_regex_substitutions(arch_make, [regex_nl])

        run_cmd('make %s' % par, log_all=True, simple=True, log_output=True)

        # Put binary in temporary install dir
        copy_file(os.path.join(obj_dir, 'siesta'), bindir)

        if self.cfg['with_utils']:
            # Make the utils
            change_dir(os.path.join(start_dir, 'Util'))

            if loose_ver >= LooseVersion('4'):
                # clean_all.sh might be missing executable bit...
                adjust_permissions('./clean_all.sh', stat.S_IXUSR, recursive=False, relative=True)
                run_cmd('./clean_all.sh', log_all=True, simple=True, log_output=True)

            if loose_ver >= LooseVersion('4.1'):
                regex_subs_TS = [
                    (r"^default:.*$", r""),
                    (r"^EXE\s*=.*$", r""),
                    (r"^(include\s*..ARCH_MAKE.*)$", r"EXE=tshs2tshs\ndefault: $(EXE)\n\1"),
                    (r"^(INCFLAGS.*)$", r"\1 -I%s" % obj_dir),
                ]

                makefile = os.path.join(start_dir, 'Util', 'TS', 'tshs2tshs', 'Makefile')
                apply_regex_substitutions(makefile, regex_subs_TS)

            if loose_ver >= LooseVersion('4'):
                # SUFFIX rules in wrong place
                regex_subs_suffix = [
                    (r'^(\.SUFFIXES:.*)$', r''),
                    (r'^(include\s*\$\(ARCH_MAKE\).*)$', r'\1\n.SUFFIXES:\n.SUFFIXES: .c .f .F .o .a .f90 .F90'),
                ]
                makefile = os.path.join(start_dir, 'Util', 'Sockets', 'Makefile')
                apply_regex_substitutions(makefile, regex_subs_suffix)
                makefile = os.path.join(start_dir, 'Util', 'SiestaSubroutine', 'SimpleTest', 'Src', 'Makefile')
                apply_regex_substitutions(makefile, regex_subs_suffix)

            regex_subs_UtilLDFLAGS = [
                (r'(\$\(FC\)\s*-o\s)', r'$(FC) %s %s -o ' % (os.environ['FCFLAGS'], os.environ['LDFLAGS'])),
            ]
            makefile = os.path.join(start_dir, 'Util', 'Optimizer', 'Makefile')
            apply_regex_substitutions(makefile, regex_subs_UtilLDFLAGS)
            if loose_ver >= LooseVersion('4'):
                makefile = os.path.join(start_dir, 'Util', 'JobList', 'Src', 'Makefile')
                apply_regex_substitutions(makefile, regex_subs_UtilLDFLAGS)

            # remove clean at the end of default target
            # And yes, they are re-introducing this bug.
            is_ver40_to_401 = loose_ver >= LooseVersion('4.0') and loose_ver < LooseVersion('4.0.2')
            if (is_ver40_to_401 or loose_ver == LooseVersion('4.1-b3')):
                makefile = os.path.join(start_dir, 'Util', 'SiestaSubroutine', 'SimpleTest', 'Src', 'Makefile')
                apply_regex_substitutions(makefile, [(r"simple_mpi_parallel clean", r"simple_mpi_parallel")])
                makefile = os.path.join(start_dir, 'Util', 'SiestaSubroutine', 'ProtoNEB', 'Src', 'Makefile')
                apply_regex_substitutions(makefile, [(r"protoNEB clean", r"protoNEB")])

            # build_all.sh might be missing executable bit...
            adjust_permissions('./build_all.sh', stat.S_IXUSR, recursive=False, relative=True)
            run_cmd('./build_all.sh', log_all=True, simple=True, log_output=True)

            # Now move all the built utils to the temp installdir
            expected_utils = [
                'CMLComp/ccViz',
                'Contrib/APostnikov/eig2bxsf', 'Contrib/APostnikov/fmpdos',
                'Contrib/APostnikov/md2axsf', 'Contrib/APostnikov/rho2xsf',
                'Contrib/APostnikov/vib2xsf', 'Contrib/APostnikov/xv2xsf',
                'COOP/fat', 'COOP/mprop',
                'Denchar/Src/denchar',
                'DensityMatrix/cdf2dm', 'DensityMatrix/dm2cdf',
                'Eig2DOS/Eig2DOS',
                'Gen-basis/gen-basis', 'Gen-basis/ioncat',
                'Gen-basis/ionplot.sh',
                'Grid/cdf2grid', 'Grid/cdf2xsf', 'Grid/cdf_laplacian',
                'Grid/g2c_ng', 'Grid/grid2cdf', 'Grid/grid2cube',
                'Grid/grid2val', 'Grid/grid_rotate',
                'Helpers/get_chem_labels',
                'HSX/hs2hsx', 'HSX/hsx2hs',
                'JobList/Src/countJobs', 'JobList/Src/getResults',
                'JobList/Src/horizontal', 'JobList/Src/runJobs',
                'Macroave/Src/macroave',
                'ON/lwf2cdf',
                'Optimizer/simplex', 'Optimizer/swarm',
                'pdosxml/pdosxml',
                'Projections/orbmol_proj',
                'SiestaSubroutine/FmixMD/Src/driver',
                'SiestaSubroutine/FmixMD/Src/para',
                'SiestaSubroutine/FmixMD/Src/simple',
                'STM/ol-stm/Src/stm', 'STM/simple-stm/plstm',
                'Vibra/Src/fcbuild', 'Vibra/Src/vibra',
                'WFS/readwf', 'WFS/readwfx', 'WFS/wfs2wfsx',
                'WFS/wfsnc2wfsx', 'WFS/wfsx2wfs',
            ]

            # skip broken utils in 4.1-MaX-1.0 release, hopefully will be fixed later
            if self.version != '4.1-MaX-1.0':
                expected_utils.extend([
                    'VCA/fractional', 'VCA/mixps',
                ])

            if loose_ver >= LooseVersion('3.2'):
                expected_utils.extend([
                    'Bands/eigfat2plot',
                ])

            if loose_ver >= LooseVersion('4.0'):
                if self.version != '4.1-MaX-1.0':
                    expected_utils.extend([
                        'SiestaSubroutine/ProtoNEB/Src/protoNEB',
                        'SiestaSubroutine/SimpleTest/Src/simple_pipes_parallel',
                        'SiestaSubroutine/SimpleTest/Src/simple_pipes_serial',
                        'SiestaSubroutine/SimpleTest/Src/simple_sockets_parallel',
                        'SiestaSubroutine/SimpleTest/Src/simple_sockets_serial',
                        ])
                expected_utils.extend([
                    'Sockets/f2fmaster', 'Sockets/f2fslave',
                ])
                if self.toolchain.options.get('usempi', None):
                    if self.version != '4.1-MaX-1.0':
                        expected_utils.extend([
                            'SiestaSubroutine/SimpleTest/Src/simple_mpi_parallel',
                            'SiestaSubroutine/SimpleTest/Src/simple_mpi_serial',
                        ])

            if loose_ver < LooseVersion('4.1'):
                expected_utils.append('WFS/info_wfsx')
                if loose_ver >= LooseVersion('4.0'):
                    expected_utils.extend([
                        'COOP/dm_creator',
                        'TBTrans_rep/tbtrans',
                    ])
                else:
                    expected_utils.extend([
                        'TBTrans/tbtrans',
                    ])

            if loose_ver < LooseVersion('4.0.2'):
                expected_utils.extend([
                    'Bands/new.gnubands',
                ])
            else:
                expected_utils.extend([
                    'Bands/gnubands',
                ])
                # Need to revisit this when 4.1 is officialy released.
                # This is based on b1-b3 releases
                if loose_ver < LooseVersion('4.1'):
                    expected_utils.extend([
                        'Contour/grid1d', 'Contour/grid2d',
                        'Optical/optical', 'Optical/optical_input',
                        'sies2arc/sies2arc',
                    ])

            if loose_ver >= LooseVersion('4.1'):
                expected_utils.extend([
                    'DensityMatrix/dmbs2dm', 'DensityMatrix/dmUnblock',
                    'Grimme/fdf2grimme',
                    'SpPivot/pvtsp',
                    'TS/TBtrans/tbtrans', 'TS/tselecs.sh',
                    'TS/ts2ts/ts2ts',
                ])
                if self.version != '4.1-MaX-1.0':
                    expected_utils.extend([
                        'TS/tshs2tshs/tshs2tshs',
                    ])

            for util in expected_utils:
                copy_file(os.path.join(start_dir, 'Util', util), bindir)

        if self.cfg['with_transiesta']:
            # Build transiesta
            change_dir(obj_dir)

            ts_clean_target = 'clean'
            if loose_ver >= LooseVersion('4.1-b4'):
                ts_clean_target += '-transiesta'

            run_cmd('make %s' % ts_clean_target, log_all=True, simple=True, log_output=True)
            run_cmd('make %s transiesta' % par, log_all=True, simple=True, log_output=True)

            copy_file(os.path.join(obj_dir, 'transiesta'), bindir)
Esempio n. 33
0
    def test_adjust_permissions(self):
        """Test adjust_permissions"""
        # set umask hard to run test reliably
        orig_umask = os.umask(0022)

        # prep files/dirs/(broken) symlinks is test dir

        # file: rw-r--r--
        ft.write_file(os.path.join(self.test_prefix, 'foo'), 'foo')
        foo_perms = os.stat(os.path.join(self.test_prefix,
                                         'foo'))[stat.ST_MODE]
        for bit in [stat.S_IRUSR, stat.S_IWUSR, stat.S_IRGRP, stat.S_IROTH]:
            self.assertTrue(foo_perms & bit)
        for bit in [
                stat.S_IXUSR, stat.S_IWGRP, stat.S_IXGRP, stat.S_IWOTH,
                stat.S_IXOTH
        ]:
            self.assertFalse(foo_perms & bit)

        # dir: rwxr-xr-x
        ft.mkdir(os.path.join(self.test_prefix, 'bar'))
        bar_perms = os.stat(os.path.join(self.test_prefix,
                                         'bar'))[stat.ST_MODE]
        for bit in [
                stat.S_IRUSR, stat.S_IWUSR, stat.S_IXUSR, stat.S_IRGRP,
                stat.S_IXGRP, stat.S_IROTH, stat.S_IXOTH
        ]:
            self.assertTrue(bar_perms & bit)
        for bit in [stat.S_IWGRP, stat.S_IWOTH]:
            self.assertFalse(bar_perms & bit)

        # file in dir: rw-r--r--
        foobar_path = os.path.join(self.test_prefix, 'bar', 'foobar')
        ft.write_file(foobar_path, 'foobar')
        foobar_perms = os.stat(foobar_path)[stat.ST_MODE]
        for bit in [stat.S_IRUSR, stat.S_IWUSR, stat.S_IRGRP, stat.S_IROTH]:
            self.assertTrue(foobar_perms & bit)
        for bit in [
                stat.S_IXUSR, stat.S_IWGRP, stat.S_IXGRP, stat.S_IWOTH,
                stat.S_IXOTH
        ]:
            self.assertFalse(foobar_perms & bit)

        # include symlink
        os.symlink(foobar_path, os.path.join(self.test_prefix,
                                             'foobar_symlink'))

        # include broken symlink (symlinks are skipped, so this shouldn't cause problems)
        tmpfile = os.path.join(self.test_prefix, 'thiswontbetherelong')
        ft.write_file(tmpfile, 'poof!')
        os.symlink(tmpfile, os.path.join(self.test_prefix, 'broken_symlink'))
        os.remove(tmpfile)

        # test default behaviour:
        # recursive, add permissions, relative to existing permissions, both files and dirs, skip symlinks
        # add user execution, group write permissions
        ft.adjust_permissions(self.test_prefix, stat.S_IXUSR | stat.S_IWGRP)

        # foo file: rwxrw-r--
        foo_perms = os.stat(os.path.join(self.test_prefix,
                                         'foo'))[stat.ST_MODE]
        for bit in [
                stat.S_IRUSR, stat.S_IWUSR, stat.S_IXUSR, stat.S_IRGRP,
                stat.S_IWGRP, stat.S_IROTH
        ]:
            self.assertTrue(foo_perms & bit)
        for bit in [stat.S_IXGRP, stat.S_IWOTH, stat.S_IXOTH]:
            self.assertFalse(foo_perms & bit)

        # bar dir: rwxrwxr-x
        bar_perms = os.stat(os.path.join(self.test_prefix,
                                         'bar'))[stat.ST_MODE]
        for bit in [
                stat.S_IRUSR, stat.S_IWUSR, stat.S_IXUSR, stat.S_IRGRP,
                stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IXOTH
        ]:
            self.assertTrue(bar_perms & bit)
        self.assertFalse(bar_perms & stat.S_IWOTH)

        # foo/foobar file: rwxrw-r--
        for path in [
                os.path.join(self.test_prefix, 'bar', 'foobar'),
                os.path.join(self.test_prefix, 'foobar_symlink')
        ]:
            perms = os.stat(path)[stat.ST_MODE]
            for bit in [
                    stat.S_IRUSR, stat.S_IWUSR, stat.S_IXUSR, stat.S_IRGRP,
                    stat.S_IWGRP, stat.S_IROTH
            ]:
                self.assertTrue(perms & bit)
            for bit in [stat.S_IXGRP, stat.S_IWOTH, stat.S_IXOTH]:
                self.assertFalse(perms & bit)

        # broken symlinks are trouble if symlinks are not skipped
        self.assertErrorRegex(EasyBuildError,
                              "No such file or directory",
                              ft.adjust_permissions,
                              self.test_prefix,
                              stat.S_IXUSR,
                              skip_symlinks=False)

        # restore original umask
        os.umask(orig_umask)
Esempio n. 34
0
    def test_include_easyblocks(self):
        """Test include_easyblocks()."""
        test_easyblocks = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), 'sandbox', 'easybuild',
            'easyblocks')

        # put a couple of custom easyblocks in place, to test
        myeasyblocks = os.path.join(self.test_prefix, 'myeasyblocks')
        mkdir(os.path.join(myeasyblocks, 'generic'), parents=True)

        # include __init__.py files that should be ignored, and shouldn't cause trouble (bug #1697)
        write_file(os.path.join(myeasyblocks, '__init__.py'),
                   "# dummy init, should not get included")
        write_file(os.path.join(myeasyblocks, 'generic', '__init__.py'),
                   "# dummy init, should not get included")

        myfoo_easyblock_txt = '\n'.join([
            "from easybuild.easyblocks.generic.configuremake import ConfigureMake",
            "class EB_Foo(ConfigureMake):",
            "   pass",
        ])
        write_file(os.path.join(myeasyblocks, 'myfoo.py'), myfoo_easyblock_txt)

        mybar_easyblock_txt = '\n'.join([
            "from easybuild.framework.easyblock import EasyBlock",
            "class Bar(EasyBlock):",
            "   pass",
        ])
        write_file(os.path.join(myeasyblocks, 'generic', 'mybar.py'),
                   mybar_easyblock_txt)

        # second myfoo easyblock, should get ignored...
        myfoo_bis = os.path.join(self.test_prefix, 'myfoo.py')
        write_file(myfoo_bis, '')

        # hijack $HOME to test expanding ~ in locations passed to include_easyblocks
        os.environ['HOME'] = myeasyblocks

        # expand set of known easyblocks with our custom ones;
        # myfoo easyblock is included twice, first path should have preference
        glob_paths = [
            os.path.join('~', '*'),
            os.path.join(myeasyblocks, '*/*.py'), myfoo_bis
        ]
        included_easyblocks_path = include_easyblocks(self.test_prefix,
                                                      glob_paths)

        expected_paths = [
            '__init__.py', 'easyblocks/__init__.py', 'easyblocks/myfoo.py',
            'easyblocks/generic/__init__.py', 'easyblocks/generic/mybar.py'
        ]
        for filepath in expected_paths:
            fullpath = os.path.join(included_easyblocks_path, 'easybuild',
                                    filepath)
            self.assertTrue(os.path.exists(fullpath), "%s exists" % fullpath)

        # path to included easyblocks should be prepended to Python search path
        self.assertEqual(sys.path[0], included_easyblocks_path)

        # importing custom easyblocks should work
        import easybuild.easyblocks.myfoo
        myfoo_pyc_path = easybuild.easyblocks.myfoo.__file__
        myfoo_real_py_path = os.path.realpath(
            os.path.join(os.path.dirname(myfoo_pyc_path), 'myfoo.py'))
        self.assertTrue(
            os.path.samefile(up(myfoo_real_py_path, 1), myeasyblocks))

        import easybuild.easyblocks.generic.mybar
        mybar_pyc_path = easybuild.easyblocks.generic.mybar.__file__
        mybar_real_py_path = os.path.realpath(
            os.path.join(os.path.dirname(mybar_pyc_path), 'mybar.py'))
        self.assertTrue(
            os.path.samefile(up(mybar_real_py_path, 2), myeasyblocks))

        # existing (test) easyblocks are unaffected
        import easybuild.easyblocks.foofoo
        foofoo_path = os.path.dirname(
            os.path.dirname(easybuild.easyblocks.foofoo.__file__))
        self.assertTrue(os.path.samefile(foofoo_path, test_easyblocks))
Esempio n. 35
0
def regtest(easyconfig_paths, modtool, build_specs=None):
    """
    Run regression test, using easyconfigs available in given path
    :param easyconfig_paths: path of easyconfigs to run regtest on
    :param modtool: ModulesTool instance to use
    :param build_specs: dictionary specifying build specifications (e.g. version, toolchain, ...)
    """

    cur_dir = os.getcwd()

    aggregate_regtest = build_option('aggregate_regtest')
    if aggregate_regtest is not None:
        output_file = os.path.join(
            aggregate_regtest,
            "%s-aggregate.xml" % os.path.basename(aggregate_regtest))
        aggregate_xml_in_dirs(aggregate_regtest, output_file)
        _log.info("aggregated xml files inside %s, output written to: %s" %
                  (aggregate_regtest, output_file))
        sys.exit(0)

    # create base directory, which is used to place all log files and the test output as xml
    regtest_output_dir = build_option('regtest_output_dir')
    testoutput = build_option('testoutput')
    if regtest_output_dir is not None:
        output_dir = regtest_output_dir
    elif testoutput is not None:
        output_dir = os.path.abspath(testoutput)
    else:
        # default: current dir + easybuild-test-[timestamp]
        dirname = "easybuild-test-%s" % datetime.now().strftime("%Y%m%d%H%M%S")
        output_dir = os.path.join(cur_dir, dirname)

    mkdir(output_dir, parents=True)

    # find all easyconfigs
    ecfiles = []
    if easyconfig_paths:
        for path in easyconfig_paths:
            ecfiles += find_easyconfigs(
                path, ignore_dirs=build_option('ignore_dirs'))
    else:
        raise EasyBuildError("No easyconfig paths specified.")

    test_results = []

    # process all the found easyconfig files
    easyconfigs = []
    for ecfile in ecfiles:
        try:
            easyconfigs.extend(
                process_easyconfig(ecfile, build_specs=build_specs))
        except EasyBuildError as err:
            test_results.append((ecfile, 'parsing_easyconfigs',
                                 'easyconfig file error: %s' % err, _log))

    # skip easyconfigs for which a module is already available, unless forced
    if not build_option('force'):
        _log.debug(
            "Skipping easyconfigs from %s that already have a module available..."
            % easyconfigs)
        easyconfigs = skip_available(easyconfigs, modtool)
        _log.debug("Retained easyconfigs after skipping: %s" % easyconfigs)

    if build_option('sequential'):
        return build_easyconfigs(easyconfigs, output_dir, test_results)
    else:
        resolved = resolve_dependencies(easyconfigs, modtool)

        cmd = "eb %(spec)s --regtest --sequential -ld --testoutput=%(output_dir)s"
        command = "unset TMPDIR && cd %s && %s; " % (cur_dir, cmd)
        # retry twice in case of failure, to avoid fluke errors
        command += "if [ $? -ne 0 ]; then %(cmd)s --force && %(cmd)s --force; fi" % {
            'cmd': cmd
        }

        build_easyconfigs_in_parallel(command, resolved, output_dir=output_dir)

        _log.info("Submitted regression test as jobs, results in %s" %
                  output_dir)

        return True  # success
Esempio n. 36
0
    def test_end2end_singularity_recipe(self):
        """End-to-end test for --containerize (recipe only)."""
        test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                'easyconfigs', 'test_ecs')
        toy_ec = os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb')

        containerpath = os.path.join(self.test_prefix, 'containers')
        os.environ['EASYBUILD_CONTAINERPATH'] = containerpath
        # --containerpath must be an existing directory (this is done to avoid misconfiguration)
        mkdir(containerpath)

        args = [
            toy_ec,
            '--containerize',
            '--experimental',
        ]

        error_pattern = "--container-base must be specified"
        self.assertErrorRegex(EasyBuildError,
                              error_pattern,
                              self.run_main,
                              args,
                              raise_error=True)

        # generating Singularity definition file with 'docker' or 'shub' bootstrap agents always works,
        # i.e. image label is not verified, image tag can be anything
        for cont_base in [
                'docker:test123', 'docker:test123:foo', 'shub:test123',
                'shub:test123:foo'
        ]:
            stdout, stderr = self.run_main(args +
                                           ['--container-base=%s' % cont_base])

            self.assertFalse(stderr)
            regexs = [
                "^== Singularity definition file created at %s/containers/Singularity.toy-0.0"
                % self.test_prefix
            ]
            self.check_regexs(regexs, stdout)

            remove_file(
                os.path.join(self.test_prefix, 'containers',
                             'Singularity.toy-0.0'))

        args.append("--container-base=shub:test123")
        self.run_main(args)

        # existing definition file is not overwritten without use of --force
        error_pattern = "Container recipe at .* already exists, not overwriting it without --force"
        self.assertErrorRegex(EasyBuildError,
                              error_pattern,
                              self.run_main,
                              args,
                              raise_error=True)

        stdout, stderr = self.run_main(args + ['--force'])
        self.assertFalse(stderr)
        regexs = [
            "^== WARNING: overwriting existing container recipe at .* due to --force",
            "^== Singularity definition file created at %s/containers/Singularity.toy-0.0"
            % self.test_prefix,
        ]
        self.check_regexs(regexs, stdout)

        remove_file(
            os.path.join(self.test_prefix, 'containers',
                         'Singularity.toy-0.0'))

        # add another easyconfig file to check if multiple easyconfigs are handled correctly
        args.insert(1, os.path.join(test_ecs, 'g', 'GCC', 'GCC-4.9.2.eb'))

        # with 'localimage' bootstrap agent, specified image must exist
        test_img = os.path.join(self.test_prefix, 'test123.img')
        args[-1] = "--container-base=localimage:%s" % test_img
        error_pattern = "Singularity base image at specified path does not exist"
        self.assertErrorRegex(EasyBuildError,
                              error_pattern,
                              self.run_main,
                              args,
                              raise_error=True)

        write_file(test_img, '')
        stdout, stderr = self.run_main(args)
        self.assertFalse(stderr)
        regexs = [
            "^== Singularity definition file created at %s/containers/Singularity.toy-0.0"
            % self.test_prefix
        ]
        self.check_regexs(regexs, stdout)

        # check contents of generated recipe
        def_file = read_file(
            os.path.join(self.test_prefix, 'containers',
                         'Singularity.toy-0.0'))
        regexs = [
            "^Bootstrap: localimage$",
            "^From: %s$" % test_img,
            "^eb toy-0.0.eb GCC-4.9.2.eb",
            "module load toy/0.0 GCC/4.9.2$",
        ]
        self.check_regexs(regexs, def_file)

        # image extension must make sense when localimage is used
        for img_name in ['test123.foo', 'test123']:
            test_img = os.path.join(self.test_prefix, img_name)
            args[-1] = "--container-base=localimage:%s" % test_img
            write_file(test_img, '')
            error_pattern = "Invalid image extension '.*' must be \.img or \.simg"
            self.assertErrorRegex(EasyBuildError,
                                  error_pattern,
                                  self.run_main,
                                  args,
                                  raise_error=True)
Esempio n. 37
0
    def test_end2end_singularity_image(self):
        """End-to-end test for --containerize (recipe + image)."""
        topdir = os.path.dirname(os.path.abspath(__file__))
        toy_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy',
                              'toy-0.0.eb')

        containerpath = os.path.join(self.test_prefix, 'containers')
        os.environ['EASYBUILD_CONTAINERPATH'] = containerpath
        # --containerpath must be an existing directory (this is done to avoid misconfiguration)
        mkdir(containerpath)

        test_img = os.path.join(self.test_prefix, 'test123.img')
        write_file(test_img, '')

        args = [
            toy_ec,
            '-C',  # equivalent with --containerize
            '--experimental',
            '--container-base=localimage:%s' % test_img,
            '--container-build-image',
        ]

        if which('singularity') is None:
            error_pattern = "singularity with version 2.4 or higher not found on your system."
            self.assertErrorRegex(EasyBuildError,
                                  error_pattern,
                                  self.eb_main,
                                  args,
                                  raise_error=True)

        # install mocked versions of 'sudo' and 'singularity' commands
        singularity = os.path.join(self.test_prefix, 'bin', 'singularity')
        write_file(singularity, MOCKED_SINGULARITY)
        adjust_permissions(singularity, stat.S_IXUSR, add=True)

        sudo = os.path.join(self.test_prefix, 'bin', 'sudo')
        write_file(
            sudo,
            '#!/bin/bash\necho "running command \'$@\' with sudo..."\neval "$@"\n'
        )
        adjust_permissions(sudo, stat.S_IXUSR, add=True)

        os.environ['PATH'] = os.path.pathsep.join(
            [os.path.join(self.test_prefix, 'bin'),
             os.getenv('PATH')])

        stdout, stderr = self.run_main(args)
        self.assertFalse(stderr)
        regexs = [
            "^== singularity tool found at %s/bin/singularity" %
            self.test_prefix,
            "^== singularity version '2.4.0' is 2.4 or higher ... OK",
            "^== Singularity definition file created at %s/containers/Singularity\.toy-0.0"
            % self.test_prefix,
            "^== Running 'sudo\s*\S*/singularity build\s*/.* /.*', you may need to enter your 'sudo' password...",
            "^== Singularity image created at %s/containers/toy-0.0\.simg" %
            self.test_prefix,
        ]
        self.check_regexs(regexs, stdout)

        self.assertTrue(
            os.path.exists(os.path.join(containerpath, 'toy-0.0.simg')))

        remove_file(os.path.join(containerpath, 'Singularity.toy-0.0'))

        # check use of --container-image-format & --container-image-name
        args.extend([
            "--container-image-format=ext3",
            "--container-image-name=foo-bar",
        ])
        stdout, stderr = self.run_main(args)
        self.assertFalse(stderr)
        regexs[
            -3] = "^== Singularity definition file created at %s/containers/Singularity\.foo-bar" % self.test_prefix
        regexs[
            -2] = "^== Running 'sudo\s*\S*/singularity build --writable /.* /.*', you may need to enter .*"
        regexs[
            -1] = "^== Singularity image created at %s/containers/foo-bar\.img$" % self.test_prefix
        self.check_regexs(regexs, stdout)

        cont_img = os.path.join(containerpath, 'foo-bar.img')
        self.assertTrue(os.path.exists(cont_img))

        remove_file(os.path.join(containerpath, 'Singularity.foo-bar'))

        # test again with container image already existing

        error_pattern = "Container image already exists at %s, not overwriting it without --force" % cont_img
        self.mock_stdout(True)
        self.assertErrorRegex(EasyBuildError,
                              error_pattern,
                              self.run_main,
                              args,
                              raise_error=True)
        self.mock_stdout(False)

        args.append('--force')
        stdout, stderr = self.run_main(args)
        self.assertFalse(stderr)
        regexs.extend([
            "WARNING: overwriting existing container image at %s due to --force"
            % cont_img,
        ])
        self.check_regexs(regexs, stdout)
        self.assertTrue(os.path.exists(cont_img))

        # also check behaviour under --extended-dry-run
        args.append('--extended-dry-run')
        stdout, stderr = self.run_main(args)
        self.assertFalse(stderr)
        self.check_regexs(regexs, stdout)

        # test use of --container-tmpdir
        args.append('--container-tmpdir=%s' % self.test_prefix)
        stdout, stderr = self.run_main(args)
        self.assertFalse(stderr)
        regexs[
            -3] = "^== Running 'sudo\s*SINGULARITY_TMPDIR=%s \S*/singularity build .*" % self.test_prefix
        self.check_regexs(regexs, stdout)
    def test_step(self):
        """Run WIEN2k test benchmarks. """
        def run_wien2k_test(cmd_arg):
            """Run a WPS command, and check for success."""

            cmd = "x_lapw lapw1 %s" % cmd_arg
            (out, _) = run_cmd(cmd, log_all=True, simple=False)

            re_success = re.compile(r"LAPW1\s+END")
            if not re_success.search(out):
                raise EasyBuildError(
                    "Test '%s' in %s failed (pattern '%s' not found)?", cmd,
                    os.getcwd(), re_success.pattern)
            else:
                self.log.info("Test '%s' seems to have run successfully: %s" %
                              (cmd, out))

        if self.cfg['runtest']:
            if not self.cfg['testdata']:
                raise EasyBuildError("List of URLs for testdata not provided.")

            # prepend $PATH with install directory, define $SCRATCH which is used by the tests
            env.setvar('PATH', "%s:%s" % (self.installdir, os.environ['PATH']))
            try:
                cwd = os.getcwd()

                # create temporary directory
                tmpdir = tempfile.mkdtemp()
                os.chdir(tmpdir)
                self.log.info("Running tests in %s" % tmpdir)

                scratch = os.path.join(tmpdir, 'scratch')
                mkdir(scratch)
                env.setvar('SCRATCH', scratch)

                # download data
                testdata_paths = {}
                for testdata in self.cfg['testdata']:
                    td_path = self.obtain_file(testdata)
                    if not td_path:
                        raise EasyBuildError(
                            "Downloading file from %s failed?", testdata)
                    testdata_paths.update(
                        {os.path.basename(testdata): td_path})

                self.log.debug('testdata_paths: %s' % testdata_paths)

                # unpack serial benchmark
                serial_test_name = "test_case"
                srcdir = extract_file(testdata_paths['%s.tar.gz' %
                                                     serial_test_name],
                                      tmpdir,
                                      change_into_dir=False)
                change_dir(srcdir)

                # run serial benchmark
                os.chdir(os.path.join(tmpdir, serial_test_name))
                run_wien2k_test("-c")

                # unpack parallel benchmark (in serial benchmark dir)
                parallel_test_name = "mpi-benchmark"
                srcdir = extract_file(testdata_paths['%s.tar.gz' %
                                                     parallel_test_name],
                                      tmpdir,
                                      change_into_dir=False)
                change_dir(srcdir)

                # run parallel benchmark
                os.chdir(os.path.join(tmpdir, serial_test_name))
                run_wien2k_test("-p")

                os.chdir(cwd)
                remove_dir(tmpdir)

            except OSError as err:
                raise EasyBuildError(
                    "Failed to run WIEN2k benchmark tests: %s", err)

            self.log.debug("Current dir: %s" % os.getcwd())
Esempio n. 39
0
    def configure_step(self, srcdir=None, builddir=None):
        """Configure build using cmake"""

        setup_cmake_env(self.toolchain)

        if builddir is None and self.cfg.get('separate_build_dir', True):
            builddir = os.path.join(self.builddir, 'easybuild_obj')
            # For separate_build_dir we want a clean folder. So remove if it exists
            # This can happen when multiple iterations are done (e.g. shared, static, ...)
            if os.path.exists(builddir):
                self.log.warning('Build directory %s already exists (from previous iterations?). Removing...',
                                 builddir)
                remove_dir(builddir)

        if builddir:
            mkdir(builddir, parents=True)
            change_dir(builddir)
            default_srcdir = self.cfg['start_dir']
        else:
            default_srcdir = '.'

        if srcdir is None:
            if self.cfg.get('srcdir', None) is not None:
                # Note that the join returns srcdir if it is absolute
                srcdir = os.path.join(default_srcdir, self.cfg['srcdir'])
            else:
                srcdir = default_srcdir

        options = ['-DCMAKE_INSTALL_PREFIX=%s' % self.installdir]

        if self.installdir.startswith('/opt') or self.installdir.startswith('/usr'):
            # https://cmake.org/cmake/help/latest/module/GNUInstallDirs.html
            localstatedir = os.path.join(self.installdir, 'var')
            runstatedir = os.path.join(localstatedir, 'run')
            sysconfdir = os.path.join(self.installdir, 'etc')
            options.append("-DCMAKE_INSTALL_LOCALSTATEDIR=%s" % localstatedir)
            options.append("-DCMAKE_INSTALL_RUNSTATEDIR=%s" % runstatedir)
            options.append("-DCMAKE_INSTALL_SYSCONFDIR=%s" % sysconfdir)

        if '-DCMAKE_BUILD_TYPE=' in self.cfg['configopts']:
            if self.cfg.get('build_type') is not None:
                self.log.warning('CMAKE_BUILD_TYPE is set in configopts. Ignoring build_type')
        else:
            options.append('-DCMAKE_BUILD_TYPE=%s' % self.build_type)

        # Add -fPIC flag if necessary
        if self.toolchain.options['pic']:
            options.append('-DCMAKE_POSITION_INDEPENDENT_CODE=ON')

        if self.cfg['generator']:
            options.append('-G "%s"' % self.cfg['generator'])

        # sjb: comment this section out as it involves bringing in a framework change as well
        # pass --sysroot value down to CMake,
        # and enable using absolute paths to compiler commands to avoid
        # that CMake picks up compiler from sysroot rather than toolchain compiler...
        # sysroot = build_option('sysroot')
        # if sysroot:
        #     options.append('-DCMAKE_SYSROOT=%s' % sysroot)
        #     self.log.info("Using absolute path to compiler commands because of alterate sysroot %s", sysroot)
        #     self.cfg['abs_path_compilers'] = True

        # Set flag for shared libs if requested
        # Not adding one allows the project to choose a default
        build_shared_libs = self.cfg.get('build_shared_libs')
        if build_shared_libs is not None:
            # Contrary to other options build_shared_libs takes precedence over configopts which may be unexpected.
            # This is to allow self.lib_ext to be determined correctly.
            # Usually you want to remove -DBUILD_SHARED_LIBS from configopts and set build_shared_libs to True or False
            # If you need it in configopts don't set build_shared_libs (or explicitely set it to `None` (Default))
            if '-DBUILD_SHARED_LIBS=' in self.cfg['configopts']:
                print_warning('Ignoring BUILD_SHARED_LIBS is set in configopts because build_shared_libs is set')
            self.cfg.update('configopts', '-DBUILD_SHARED_LIBS=%s' % ('ON' if build_shared_libs else 'OFF'))

        env_to_options = {
            'CC': 'CMAKE_C_COMPILER',
            'CFLAGS': 'CMAKE_C_FLAGS',
            'CXX': 'CMAKE_CXX_COMPILER',
            'CXXFLAGS': 'CMAKE_CXX_FLAGS',
            'F90': 'CMAKE_Fortran_COMPILER',
            'FFLAGS': 'CMAKE_Fortran_FLAGS',
        }
        for env_name, option in env_to_options.items():
            value = os.getenv(env_name)
            if value is not None:
                if option.endswith('_COMPILER') and self.cfg.get('abs_path_compilers', False):
                    value = which(value)
                    self.log.info("Using absolute path to compiler command: %s", value)
                options.append("-D%s='%s'" % (option, value))

        if build_option('rpath'):
            # instruct CMake not to fiddle with RPATH when --rpath is used, since it will undo stuff on install...
            # https://github.com/LLNL/spack/blob/0f6a5cd38538e8969d11bd2167f11060b1f53b43/lib/spack/spack/build_environment.py#L416
            options.append('-DCMAKE_SKIP_RPATH=ON')

        # show what CMake is doing by default
        options.append('-DCMAKE_VERBOSE_MAKEFILE=ON')

        if not self.cfg.get('allow_system_boost', False):
            # don't pick up on system Boost if Boost is included as dependency
            # - specify Boost location via -DBOOST_ROOT
            # - instruct CMake to not search for Boost headers/libraries in other places
            # - disable search for Boost CMake package configuration file
            boost_root = get_software_root('Boost')
            if boost_root:
                options.extend([
                    '-DBOOST_ROOT=%s' % boost_root,
                    '-DBoost_NO_SYSTEM_PATHS=ON',
                    '-DBoost_NO_BOOST_CMAKE=ON',
                ])

        options_string = ' '.join(options)

        if self.cfg.get('configure_cmd') == DEFAULT_CONFIGURE_CMD:
            command = ' '.join([
                self.cfg['preconfigopts'],
                DEFAULT_CONFIGURE_CMD,
                options_string,
                self.cfg['configopts'],
                srcdir])
        else:
            command = ' '.join([
                self.cfg['preconfigopts'],
                self.cfg.get('configure_cmd'),
                self.cfg['configopts']])

        (out, _) = run_cmd(command, log_all=True, simple=False)

        return out
    def test_modules_tool_stateless(self):
        """Check whether ModulesTool instance is stateless between runs."""
        test_modules_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), 'modules')

        # copy test Core/Compiler modules, we need to rewrite the 'module use' statement in the one we're going to load
        shutil.copytree(os.path.join(test_modules_path, 'Core'),
                        os.path.join(self.test_prefix, 'Core'))
        shutil.copytree(os.path.join(test_modules_path, 'Compiler'),
                        os.path.join(self.test_prefix, 'Compiler'))

        modtxt = read_file(
            os.path.join(self.test_prefix, 'Core', 'GCC', '4.7.2'))
        modpath_extension = os.path.join(self.test_prefix, 'Compiler', 'GCC',
                                         '4.7.2')
        modtxt = re.sub('module use .*', 'module use %s' % modpath_extension,
                        modtxt, re.M)
        write_file(os.path.join(self.test_prefix, 'Core', 'GCC', '4.7.2'),
                   modtxt)

        modtxt = read_file(
            os.path.join(self.test_prefix, 'Compiler', 'GCC', '4.7.2',
                         'OpenMPI', '1.6.4'))
        modpath_extension = os.path.join(self.test_prefix, 'MPI', 'GCC',
                                         '4.7.2', 'OpenMPI', '1.6.4')
        mkdir(modpath_extension, parents=True)
        modtxt = re.sub('module use .*', 'module use %s' % modpath_extension,
                        modtxt, re.M)
        write_file(
            os.path.join(self.test_prefix, 'Compiler', 'GCC', '4.7.2',
                         'OpenMPI', '1.6.4'), modtxt)

        # force reset of any singletons by reinitiating config
        init_config()

        # make sure $LMOD_DEFAULT_MODULEPATH, since Lmod picks it up and tweaks $MODULEPATH to match it
        if 'LMOD_DEFAULT_MODULEPATH' in os.environ:
            del os.environ['LMOD_DEFAULT_MODULEPATH']

        self.reset_modulepath([os.path.join(self.test_prefix, 'Core')])

        if isinstance(self.modtool, Lmod):
            # GCC/4.6.3 is nowhere to be found (in $MODULEPATH)
            load_err_msg = r"The[\s\n]*following[\s\n]*module\(s\)[\s\n]*are[\s\n]*unknown"
        else:
            load_err_msg = "Unable to locate a modulefile"

        # GCC/4.6.3 is *not* an available Core module
        self.assertErrorRegex(EasyBuildError, load_err_msg, self.modtool.load,
                              ['GCC/4.6.3'])

        # GCC/4.7.2 is one of the available Core modules
        self.modtool.load(['GCC/4.7.2'])

        # OpenMPI/1.6.4 becomes available after loading GCC/4.7.2 module
        self.modtool.load(['OpenMPI/1.6.4'])
        self.modtool.purge()

        if 'LMOD_DEFAULT_MODULEPATH' in os.environ:
            del os.environ['LMOD_DEFAULT_MODULEPATH']

        # reset $MODULEPATH, obtain new ModulesTool instance,
        # which should not remember anything w.r.t. previous $MODULEPATH value
        os.environ['MODULEPATH'] = test_modules_path
        self.modtool = modules_tool()

        # GCC/4.6.3 is available
        self.modtool.load(['GCC/4.6.3'])
        self.modtool.purge()

        # GCC/4.7.2 is available (note: also as non-Core module outside of hierarchy)
        self.modtool.load(['GCC/4.7.2'])

        # OpenMPI/1.6.4 is *not* available with current $MODULEPATH (loaded GCC/4.7.2 was not a hierarchical module)
        if isinstance(self.modtool, Lmod):
            # OpenMPI/1.6.4 exists, but is not available for load;
            # exact error message depends on Lmod version
            load_err_msg = '|'.join([
                r'These[\s\sn]*module\(s\)[\s\sn]*exist[\s\sn]*but[\s\sn]*cannot[\s\sn]*be',
                'The[\s\sn]*following[\s\sn]*module\(s\)[\s\sn]*are[\s\sn]*unknown',
            ])
        else:
            load_err_msg = "Unable to locate a modulefile"

        self.assertErrorRegex(EasyBuildError, load_err_msg, self.modtool.load,
                              ['OpenMPI/1.6.4'])
Esempio n. 41
0
    def install_step(self):
        """
        Install by copying files over to the right places.

        Also create symlinks where expected by other software (Lib directory).
        """
        includedir = os.path.join(self.installdir, 'include')
        libdir = os.path.join(self.installdir, 'lib')

        if LooseVersion(self.version) >= LooseVersion("4"):
            # includedir etc changed in v4, use a normal make install
            cmd = "make install %s" % self.cfg['installopts']
            try:
                os.chdir(self.parmetis_builddir)
                run_cmd(cmd, log_all=True, simple=True)
                os.chdir(self.cfg['start_dir'])
            except OSError as err:
                raise EasyBuildError("Running '%s' in %s failed: %s", cmd,
                                     self.parmetis_builddir, err)

            # libraries
            try:
                src = os.path.join(self.cfg['start_dir'], 'build', 'libmetis',
                                   'libmetis.a')
                dst = os.path.join(libdir, 'libmetis.a')
                shutil.copy2(src, dst)
            except OSError as err:
                raise EasyBuildError(
                    "Copying files to installation dir failed: %s", err)

            # include files
            try:
                src = os.path.join(self.cfg['start_dir'], 'build', 'metis',
                                   'include', 'metis.h')
                dst = os.path.join(includedir, 'metis.h')
                shutil.copy2(src, dst)
            except OSError as err:
                raise EasyBuildError(
                    "Copying files to installation dir failed: %s", err)

        else:
            mkdir(libdir)
            mkdir(includedir)

            # libraries
            try:
                for fil in ['libmetis.a', 'libparmetis.a']:
                    src = os.path.join(self.cfg['start_dir'], fil)
                    dst = os.path.join(libdir, fil)
                    shutil.copy2(src, dst)
            except OSError as err:
                raise EasyBuildError(
                    "Copying files to installation dir failed: %s", err)

            # include files
            try:
                src = os.path.join(self.cfg['start_dir'], 'parmetis.h')
                dst = os.path.join(includedir, 'parmetis.h')
                shutil.copy2(src, dst)
                # some applications (SuiteSparse) can only use METIS (not ParMETIS), but header files are the same
                dst = os.path.join(includedir, 'metis.h')
                shutil.copy2(src, dst)
            except OSError as err:
                raise EasyBuildError(
                    "Copying files to installation dir failed: %s", err)

        # other applications depending on ParMETIS (SuiteSparse for one) look for both ParMETIS libraries
        # and header files in the Lib directory (capital L). The following symlink are hence created.
        try:
            llibdir = os.path.join(self.installdir, 'Lib')
            os.symlink(libdir, llibdir)
            for f in ['metis.h', 'parmetis.h']:
                os.symlink(os.path.join(includedir, f),
                           os.path.join(libdir, f))
        except OSError as err:
            raise EasyBuildError(
                "Something went wrong during symlink creation: %s", err)
Esempio n. 42
0
                src = os.path.join(self.cfg['start_dir'], 'build' ,'libmetis' ,'libmetis.a')
                dst = os.path.join(libdir, 'libmetis.a')
                shutil.copy2(src, dst)
            except OSError, err:
                raise EasyBuildError("Copying files to installation dir failed: %s", err)

            # include files
            try:
                src = os.path.join(self.cfg['start_dir'], 'build', 'metis', 'include', 'metis.h')
                dst = os.path.join(includedir, 'metis.h')
                shutil.copy2(src, dst)
            except OSError, err:
                raise EasyBuildError("Copying files to installation dir failed: %s", err)

        else:
            mkdir(libdir)
            mkdir(includedir)

            # libraries
            try:
                for fil in ['libmetis.a', 'libparmetis.a']:
                    src = os.path.join(self.cfg['start_dir'], fil)
                    dst = os.path.join(libdir, fil)
                    shutil.copy2(src, dst)
            except OSError, err:
                raise EasyBuildError("Copying files to installation dir failed: %s", err)

            # include files
            try:
                src = os.path.join(self.cfg['start_dir'], 'parmetis.h')
                dst = os.path.join(includedir, 'parmetis.h')
Esempio n. 43
0
    def build_step(self):
        """Custom build procedure for TensorFlow."""

        # pre-create target installation directory
        mkdir(os.path.join(self.installdir, self.pylibdir), parents=True)

        binutils_root = get_software_root('binutils')
        if binutils_root:
            binutils_bin = os.path.join(binutils_root, 'bin')
        else:
            raise EasyBuildError("Failed to determine installation prefix for binutils")

        gcc_root = get_software_root('GCCcore') or get_software_root('GCC')
        if gcc_root:
            gcc_lib64 = os.path.join(gcc_root, 'lib64')
            gcc_ver = get_software_version('GCCcore') or get_software_version('GCC')

            # figure out location of GCC include files
            res = glob.glob(os.path.join(gcc_root, 'lib', 'gcc', '*', gcc_ver, 'include'))
            if res and len(res) == 1:
                gcc_lib_inc = res[0]
            else:
                raise EasyBuildError("Failed to pinpoint location of GCC include files: %s", res)

            # make sure include-fixed directory is where we expect it to be
            gcc_lib_inc_fixed = os.path.join(os.path.dirname(gcc_lib_inc), 'include-fixed')
            if not os.path.exists(gcc_lib_inc_fixed):
                raise EasyBuildError("Derived directory %s does not exist", gcc_lib_inc_fixed)

            # also check on location of include/c++/<gcc version> directory
            gcc_cplusplus_inc = os.path.join(gcc_root, 'include', 'c++', gcc_ver)
            if not os.path.exists(gcc_cplusplus_inc):
                raise EasyBuildError("Derived directory %s does not exist", gcc_cplusplus_inc)
        else:
            raise EasyBuildError("Failed to determine installation prefix for GCC")

        inc_paths = [gcc_lib_inc, gcc_lib_inc_fixed, gcc_cplusplus_inc]
        lib_paths = [gcc_lib64]

        cuda_root = get_software_root('CUDA')
        if cuda_root:
            inc_paths.append(os.path.join(cuda_root, 'include'))
            lib_paths.append(os.path.join(cuda_root, 'lib64'))

        # fix hardcoded locations of compilers & tools
        cxx_inc_dir_lines = '\n'.join(r'cxx_builtin_include_directory: "%s"' % resolve_path(p) for p in inc_paths)
        cxx_inc_dir_lines_no_resolv_path = '\n'.join(r'cxx_builtin_include_directory: "%s"' % p for p in inc_paths)
        regex_subs = [
            (r'-B/usr/bin/', '-B%s/ %s' % (binutils_bin, ' '.join('-L%s/' % p for p in lib_paths))),
            (r'(cxx_builtin_include_directory:).*', ''),
            (r'^toolchain {', 'toolchain {\n' + cxx_inc_dir_lines + '\n' + cxx_inc_dir_lines_no_resolv_path),
        ]
        for tool in ['ar', 'cpp', 'dwp', 'gcc', 'gcov', 'ld', 'nm', 'objcopy', 'objdump', 'strip']:
            path = which(tool)
            if path:
                regex_subs.append((os.path.join('/usr', 'bin', tool), path))
            else:
                raise EasyBuildError("Failed to determine path to '%s'", tool)

        # -fPIE/-pie and -fPIC are not compatible, so patch out hardcoded occurences of -fPIE/-pie if -fPIC is used
        if self.toolchain.options.get('pic', None):
            regex_subs.extend([('-fPIE', '-fPIC'), ('"-pie"', '"-fPIC"')])

        # patch all CROSSTOOL* scripts to fix hardcoding of locations of binutils/GCC binaries
        for path, dirnames, filenames in os.walk(os.getcwd()):
            for filename in filenames:
                if filename.startswith('CROSSTOOL'):
                    full_path = os.path.join(path, filename)
                    self.log.info("Patching %s", full_path)
                    apply_regex_substitutions(full_path, regex_subs)

        tmpdir = tempfile.mkdtemp(suffix='-bazel-build')
        user_root_tmpdir = tempfile.mkdtemp(suffix='-user_root')

        # compose "bazel build" command with all its options...
        cmd = [self.cfg['prebuildopts'], 'bazel', '--output_base=%s' % tmpdir,
               '--install_base=%s' % os.path.join(tmpdir, 'inst_base'),
               '--output_user_root=%s' % user_root_tmpdir, 'build']

        # build with optimization enabled
        # cfr. https://docs.bazel.build/versions/master/user-manual.html#flag--compilation_mode
        cmd.append('--compilation_mode=opt')

        # select 'opt' config section (this is *not* the same as --compilation_mode=opt!)
        # https://docs.bazel.build/versions/master/user-manual.html#flag--config
        cmd.append('--config=opt')

        # make Bazel print full command line + make it verbose on failures
        # https://docs.bazel.build/versions/master/user-manual.html#flag--subcommands
        # https://docs.bazel.build/versions/master/user-manual.html#flag--verbose_failures
        cmd.extend(['--subcommands', '--verbose_failures'])

        # limit the number of parallel jobs running simultaneously (useful on KNL)...
        cmd.append('--jobs=%s' % self.cfg['parallel'])

        if self.toolchain.options.get('pic', None):
            cmd.append('--copt="-fPIC"')

        # include install location of Python packages in $PYTHONPATH,
        # and specify that value of $PYTHONPATH should be passed down into Bazel build environment;
        # this is required to make sure that Python packages included as extensions are found at build time;
        # see also https://github.com/tensorflow/tensorflow/issues/22395
        pythonpath = os.getenv('PYTHONPATH', '')
        env.setvar('PYTHONPATH', '%s:%s' % (os.path.join(self.installdir, self.pylibdir), pythonpath))

        cmd.append('--action_env=PYTHONPATH')

        # use same configuration for both host and target programs, which can speed up the build
        # only done when optarch is enabled, since this implicitely assumes that host and target platform are the same
        # see https://docs.bazel.build/versions/master/guide.html#configurations
        if self.toolchain.options.get('optarch'):
            cmd.append('--distinct_host_configuration=false')

        cmd.append(self.cfg['buildopts'])

        if cuda_root:
            cmd.append('--config=cuda')

        # if mkl-dnn is listed as a dependency it is used. Otherwise downloaded if with_mkl_dnn is true
        mkl_root = get_software_root('mkl-dnn')
        if mkl_root:
            cmd.extend(['--config=mkl'])
            cmd.insert(0, "export TF_MKL_DOWNLOAD=0 &&")
            cmd.insert(0, "export TF_MKL_ROOT=%s &&" % mkl_root)
        elif self.cfg['with_mkl_dnn']:
            # this makes TensorFlow use mkl-dnn (cfr. https://github.com/01org/mkl-dnn)
            cmd.extend(['--config=mkl'])
            cmd.insert(0, "export TF_MKL_DOWNLOAD=1 && ")

        # specify target of the build command as last argument
        cmd.append('//tensorflow/tools/pip_package:build_pip_package')

        run_cmd(' '.join(cmd), log_all=True, simple=True, log_ok=True)

        # run generated 'build_pip_package' script to build the .whl
        cmd = "bazel-bin/tensorflow/tools/pip_package/build_pip_package %s" % self.builddir
        run_cmd(cmd, log_all=True, simple=True, log_ok=True)
Esempio n. 44
0
    def configure_step(self):
        """Run CMake for stage 1 Clang."""

        self.llvm_obj_dir_stage1 = os.path.join(self.builddir, 'llvm.obj.1')
        if self.cfg['bootstrap']:
            self.llvm_obj_dir_stage2 = os.path.join(self.builddir,
                                                    'llvm.obj.2')
            self.llvm_obj_dir_stage3 = os.path.join(self.builddir,
                                                    'llvm.obj.3')

        if LooseVersion(self.version) >= LooseVersion('3.3'):
            disable_san_tests = False
            # all sanitizer tests will fail when there's a limit on the vmem
            # this is ugly but I haven't found a cleaner way so far
            (vmemlim, ec) = run_cmd("ulimit -v", regexp=False)
            if not vmemlim.startswith("unlimited"):
                disable_san_tests = True
                self.log.warn(
                    "There is a virtual memory limit set of %s KB. The tests of the "
                    "sanitizers will be disabled as they need unlimited virtual "
                    "memory unless --strict=error is used." % vmemlim.strip())

            # the same goes for unlimited stacksize
            (stacklim, ec) = run_cmd("ulimit -s", regexp=False)
            if stacklim.startswith("unlimited"):
                disable_san_tests = True
                self.log.warn(
                    "The stacksize limit is set to unlimited. This causes the ThreadSanitizer "
                    "to fail. The sanitizers tests will be disabled unless --strict=error is used."
                )

            if (disable_san_tests or self.cfg['skip_sanitizer_tests']
                ) and build_option('strict') != run.ERROR:
                self.log.debug("Disabling the sanitizer tests")
                self.disable_sanitizer_tests()

        # Create and enter build directory.
        mkdir(self.llvm_obj_dir_stage1)
        change_dir(self.llvm_obj_dir_stage1)

        # GCC and Clang are installed in different prefixes and Clang will not
        # find the GCC installation on its own.
        # First try with GCCcore, as GCC built on top of GCCcore is just a wrapper for GCCcore and binutils,
        # instead of a full-fledge compiler
        gcc_prefix = get_software_root('GCCcore')

        # If that doesn't work, try with GCC
        if gcc_prefix is None:
            gcc_prefix = get_software_root('GCC')

        # If that doesn't work either, print error and exit
        if gcc_prefix is None:
            raise EasyBuildError("Can't find GCC or GCCcore to use")

        self.cfg.update('configopts',
                        "-DGCC_INSTALL_PREFIX='%s' " % gcc_prefix)
        self.log.debug("Using %s as GCC_INSTALL_PREFIX", gcc_prefix)

        self.cfg['configopts'] += "-DCMAKE_BUILD_TYPE=Release "
        if self.cfg['assertions']:
            self.cfg['configopts'] += "-DLLVM_ENABLE_ASSERTIONS=ON "
        else:
            self.cfg['configopts'] += "-DLLVM_ENABLE_ASSERTIONS=OFF "

        self.cfg['configopts'] += '-DLLVM_TARGETS_TO_BUILD="%s" ' % ';'.join(
            self.cfg['build_targets'])

        if self.cfg['parallel']:
            self.make_parallel_opts = "-j %s" % self.cfg['parallel']

        self.log.info("Configuring")
        super(EB_Clang, self).configure_step(srcdir=self.llvm_src_dir)
    def install_step(self):
        """Symlink target OpenSSL installation"""

        if all(self.system_ssl[key]
               for key in ('bin', 'engines', 'include', 'lib')):
            # note: symlink to individual files, not directories,
            # since directory symlinks get resolved easily...

            # link OpenSSL libraries in system
            lib64_dir = os.path.join(self.installdir, 'lib64')
            lib64_engines_dir = os.path.join(
                lib64_dir, os.path.basename(self.system_ssl['engines']))
            mkdir(lib64_engines_dir, parents=True)

            # link existing known libraries
            ssl_syslibdir = os.path.dirname(self.system_ssl['lib'])
            lib_files = [
                os.path.join(ssl_syslibdir, x) for x in self.target_ssl_libs
            ]
            for libso in lib_files:
                symlink(libso, os.path.join(lib64_dir,
                                            os.path.basename(libso)))

            # link engines library files
            engine_lib_pattern = [
                os.path.join(self.system_ssl['engines'], '*')
            ]
            for engine_lib in expand_glob_paths(engine_lib_pattern):
                symlink(
                    engine_lib,
                    os.path.join(lib64_engines_dir,
                                 os.path.basename(engine_lib)))

            # relative symlink for unversioned libraries
            cwd = change_dir(lib64_dir)
            for libso in self.target_ssl_libs:
                unversioned_lib = '%s.%s' % (libso.split('.')[0],
                                             get_shared_lib_ext())
                symlink(libso, unversioned_lib, use_abspath_source=False)
            change_dir(cwd)

            # link OpenSSL headers in system
            include_dir = os.path.join(self.installdir, 'include',
                                       self.name.lower())
            mkdir(include_dir, parents=True)
            include_pattern = [os.path.join(self.system_ssl['include'], '*')]
            for header_file in expand_glob_paths(include_pattern):
                symlink(
                    header_file,
                    os.path.join(include_dir, os.path.basename(header_file)))

            # link OpenSSL binary in system
            bin_dir = os.path.join(self.installdir, 'bin')
            mkdir(bin_dir)
            symlink(self.system_ssl['bin'],
                    os.path.join(bin_dir, self.name.lower()))

        else:
            # install OpenSSL component
            print_warning(
                "Not all OpenSSL components found, falling back to OpenSSL in EasyBuild!"
            )
            super(EB_OpenSSL_wrapper, self).install_step()
    def configure_step(self):
        """Run CMake for stage 1 Clang."""

        if all(dep['name'] != 'ncurses' for dep in self.cfg['dependencies']):
            print_warning('Clang requires ncurses to run, did you forgot to add it to dependencies?')

        self.llvm_obj_dir_stage1 = os.path.join(self.builddir, 'llvm.obj.1')
        if self.cfg['bootstrap']:
            self.llvm_obj_dir_stage2 = os.path.join(self.builddir, 'llvm.obj.2')
            self.llvm_obj_dir_stage3 = os.path.join(self.builddir, 'llvm.obj.3')

        if LooseVersion(self.version) >= LooseVersion('3.3'):
            disable_san_tests = False
            # all sanitizer tests will fail when there's a limit on the vmem
            # this is ugly but I haven't found a cleaner way so far
            (vmemlim, ec) = run_cmd("ulimit -v", regexp=False)
            if not vmemlim.startswith("unlimited"):
                disable_san_tests = True
                self.log.warn("There is a virtual memory limit set of %s KB. The tests of the "
                              "sanitizers will be disabled as they need unlimited virtual "
                              "memory unless --strict=error is used." % vmemlim.strip())

            # the same goes for unlimited stacksize
            (stacklim, ec) = run_cmd("ulimit -s", regexp=False)
            if stacklim.startswith("unlimited"):
                disable_san_tests = True
                self.log.warn("The stacksize limit is set to unlimited. This causes the ThreadSanitizer "
                              "to fail. The sanitizers tests will be disabled unless --strict=error is used.")

            if (disable_san_tests or self.cfg['skip_sanitizer_tests']) and build_option('strict') != run.ERROR:
                self.log.debug("Disabling the sanitizer tests")
                self.disable_sanitizer_tests()

        # Create and enter build directory.
        mkdir(self.llvm_obj_dir_stage1)
        change_dir(self.llvm_obj_dir_stage1)

        # GCC and Clang are installed in different prefixes and Clang will not
        # find the GCC installation on its own.
        # First try with GCCcore, as GCC built on top of GCCcore is just a wrapper for GCCcore and binutils,
        # instead of a full-fledge compiler
        gcc_prefix = get_software_root('GCCcore')

        # If that doesn't work, try with GCC
        if gcc_prefix is None:
            gcc_prefix = get_software_root('GCC')

        # If that doesn't work either, print error and exit
        if gcc_prefix is None:
            raise EasyBuildError("Can't find GCC or GCCcore to use")

        self.cfg.update('configopts', "-DGCC_INSTALL_PREFIX='%s'" % gcc_prefix)
        self.log.debug("Using %s as GCC_INSTALL_PREFIX", gcc_prefix)

        # Configure some default options
        if self.cfg["enable_rtti"]:
            self.cfg.update('configopts', '-DLLVM_REQUIRES_RTTI=ON')
            self.cfg.update('configopts', '-DLLVM_ENABLE_RTTI=ON')
            self.cfg.update('configopts', '-DLLVM_ENABLE_EH=ON')
        if self.cfg["default_openmp_runtime"]:
            self.cfg.update(
                'configopts',
                '-DCLANG_DEFAULT_OPENMP_RUNTIME=%s' % self.cfg["default_openmp_runtime"]
            )

        if self.cfg['assertions']:
            self.cfg.update('configopts', "-DLLVM_ENABLE_ASSERTIONS=ON")
        else:
            self.cfg.update('configopts', "-DLLVM_ENABLE_ASSERTIONS=OFF")

        if self.cfg["usepolly"]:
            self.cfg.update('configopts', "-DLINK_POLLY_INTO_TOOLS=ON")

        # If Z3 is included as a dep, enable support in static analyzer (if enabled)
        if self.cfg["static_analyzer"] and LooseVersion(self.version) >= LooseVersion('9.0.0'):
            z3_root = get_software_root("Z3")
            if z3_root:
                self.cfg.update('configopts', "-DLLVM_ENABLE_Z3_SOLVER=ON")
                self.cfg.update('configopts', "-DLLVM_Z3_INSTALL_DIR=%s" % z3_root)

        build_targets = self.cfg['build_targets']

        if self.cfg["usepolly"] and "NVPTX" in build_targets:
            self.cfg.update('configopts', "-DPOLLY_ENABLE_GPGPU_CODEGEN=ON")

        self.cfg.update('configopts', '-DLLVM_TARGETS_TO_BUILD="%s"' % ';'.join(build_targets))

        if self.cfg['parallel']:
            self.make_parallel_opts = "-j %s" % self.cfg['parallel']

        # If hwloc is included as a dep, use it in OpenMP runtime for affinity
        hwloc_root = get_software_root('hwloc')
        if hwloc_root:
            self.cfg.update('configopts', '-DLIBOMP_USE_HWLOC=ON')
            self.cfg.update('configopts', '-DLIBOMP_HWLOC_INSTALL_DIR=%s' % hwloc_root)

        # If 'NVPTX' is in the build targets we assume the user would like OpenMP offload support as well
        if 'NVPTX' in build_targets:
            # list of CUDA compute capabilities to use can be specifed in two ways (where (2) overrules (1)):
            # (1) in the easyconfig file, via the custom cuda_compute_capabilities;
            # (2) in the EasyBuild configuration, via --cuda-compute-capabilities configuration option;
            ec_cuda_cc = self.cfg['cuda_compute_capabilities']
            cfg_cuda_cc = build_option('cuda_compute_capabilities')
            cuda_cc = cfg_cuda_cc or ec_cuda_cc or []
            if not cuda_cc:
                raise EasyBuildError("Can't build Clang with CUDA support "
                                     "without specifying 'cuda-compute-capabilities'")
            default_cc = self.cfg['default_cuda_capability'] or min(cuda_cc)
            if not self.cfg['default_cuda_capability']:
                print_warning("No default CUDA capability defined! "
                              "Using '%s' taken as minimum from 'cuda_compute_capabilities'" % default_cc)
            cuda_cc = [cc.replace('.', '') for cc in cuda_cc]
            default_cc = default_cc.replace('.', '')
            self.cfg.update('configopts', '-DCLANG_OPENMP_NVPTX_DEFAULT_ARCH=sm_%s' % default_cc)
            self.cfg.update('configopts', '-DLIBOMPTARGET_NVPTX_COMPUTE_CAPABILITIES=%s' % ','.join(cuda_cc))
        # If we don't want to build with CUDA (not in dependencies) trick CMakes FindCUDA module into not finding it by
        # using the environment variable which is used as-is and later checked for a falsy value when determining
        # whether CUDA was found
        if not get_software_root('CUDA'):
            setvar('CUDA_NVCC_EXECUTABLE', 'IGNORE')
        # If 'AMDGPU' is in the build targets we assume the user would like OpenMP offload support for AMD
        if 'AMDGPU' in build_targets:
            if not get_software_root('ROCR-Runtime'):
                raise EasyBuildError("Can't build Clang with AMDGPU support "
                                     "without dependency 'ROCR-Runtime'")
            ec_amdgfx = self.cfg['amd_gfx_list']
            if not ec_amdgfx:
                raise EasyBuildError("Can't build Clang with AMDGPU support "
                                     "without specifying 'amd_gfx_list'")
            self.cfg.update('configopts', '-DLIBOMPTARGET_AMDGCN_GFXLIST=%s' % ' '.join(ec_amdgfx))

        self.log.info("Configuring")
        super(EB_Clang, self).configure_step(srcdir=self.llvm_src_dir)
Esempio n. 47
0
    def configure_step(self):
        """Set some extra environment variables before configuring."""

        # enable verbose output if desired
        if self.cfg['verbose']:
            for x in ["CONFIGURE", "MAKEFILE"]:
                self.cfg.update('configopts',
                                "-DTrilinos_VERBOSE_%s:BOOL=ON" % x)

        # compiler flags
        cflags = [os.getenv('CFLAGS')]
        cxxflags = [os.getenv('CXXFLAGS')]
        fflags = [os.getenv('FFLAGS')]

        ignore_cxx_seek_mpis = [
            toolchain.INTELMPI, toolchain.MPICH, toolchain.MPICH2,
            toolchain.MVAPICH2
        ]  #@UndefinedVariable
        ignore_cxx_seek_flag = "-DMPICH_IGNORE_CXX_SEEK"
        if self.toolchain.mpi_family() in ignore_cxx_seek_mpis:
            cflags.append(ignore_cxx_seek_flag)
            cxxflags.append(ignore_cxx_seek_flag)
            fflags.append(ignore_cxx_seek_flag)

        self.cfg.update('configopts',
                        '-DCMAKE_C_FLAGS="%s"' % ' '.join(cflags))
        self.cfg.update('configopts',
                        '-DCMAKE_CXX_FLAGS="%s"' % ' '.join(cxxflags))
        self.cfg.update('configopts',
                        '-DCMAKE_Fortran_FLAGS="%s"' % ' '.join(fflags))

        # OpenMP
        if self.cfg['openmp']:
            self.cfg.update('configopts', "-DTrilinos_ENABLE_OpenMP:BOOL=ON")

        # MPI
        if self.toolchain.options.get('usempi', None):
            self.cfg.update('configopts', "-DTPL_ENABLE_MPI:BOOL=ON")

        # shared libraries
        if self.cfg['shared_libs']:
            self.cfg.update('configopts', "-DBUILD_SHARED_LIBS:BOOL=ON")
        else:
            self.cfg.update('configopts', "-DBUILD_SHARED_LIBS:BOOL=OFF")

        # release or debug get_version
        if self.toolchain.options['debug']:
            self.cfg.update('configopts', "-DCMAKE_BUILD_TYPE:STRING=DEBUG")
        else:
            self.cfg.update('configopts', "-DCMAKE_BUILD_TYPE:STRING=RELEASE")

        # enable full testing
        self.cfg.update('configopts', "-DTrilinos_ENABLE_TESTS:BOOL=ON")
        self.cfg.update('configopts',
                        "-DTrilinos_ENABLE_ALL_FORWARD_DEP_PACKAGES:BOOL=ON")

        lib_re = re.compile("^lib(.*).a$")

        # BLAS, LAPACK
        for dep in ["BLAS", "LAPACK"]:
            self.cfg.update('configopts', '-DTPL_ENABLE_%s:BOOL=ON' % dep)
            libdirs = os.getenv('%s_LIB_DIR' % dep)
            if self.toolchain.comp_family(
            ) == toolchain.GCC:  #@UndefinedVariable
                libdirs += ";%s/lib64" % get_software_root('GCC')
            self.cfg.update('configopts',
                            '-D%s_LIBRARY_DIRS="%s"' % (dep, libdirs))
            libs = os.getenv('%s_MT_STATIC_LIBS' % dep).split(',')
            lib_names = ';'.join([lib_re.search(l).group(1) for l in libs])
            if self.toolchain.comp_family(
            ) == toolchain.GCC:  #@UndefinedVariable
                # explicitely specify static lib!
                lib_names += ";libgfortran.a"
            self.cfg.update('configopts',
                            '-D%s_LIBRARY_NAMES="%s"' % (dep, lib_names))

        # MKL
        if get_software_root('imkl') and LooseVersion(
                self.version) >= LooseVersion('12.12'):
            self.cfg.update('configopts', "-DTPL_ENABLE_MKL:BOOL=ON")
            self.cfg.update(
                'configopts', '-DMKL_LIBRARY_DIRS:PATH="%s/lib/intel64"' %
                os.getenv('MKLROOT'))
            self.cfg.update(
                'configopts',
                '-DMKL_INCLUDE_DIRS:PATH="%s/include"' % os.getenv('MKLROOT'))

        # UMFPACK is part of SuiteSparse
        suitesparse = get_software_root('SuiteSparse')
        if suitesparse:
            self.cfg.update('configopts', "-DTPL_ENABLE_UMFPACK:BOOL=ON")
            incdirs, libdirs, libnames = [], [], []
            for lib in [
                    "UMFPACK", "CHOLMOD", "COLAMD", "AMD", "CCOLAMD", "CAMD"
            ]:
                incdirs.append(os.path.join(suitesparse, lib, "Include"))
                libdirs.append(os.path.join(suitesparse, lib, "Lib"))
                libnames.append(lib.lower())

            # add SuiteSparse config lib, it is in recent versions of suitesparse
            libdirs.append(os.path.join(suitesparse, 'SuiteSparse_config'))
            libnames.append('suitesparseconfig')
            # because of "SuiteSparse_config.c:function SuiteSparse_tic: error: undefined reference to 'clock_gettime'"
            libnames.append('rt')

            # required to resolve METIS symbols in SuiteSparse's libcholmod.a
            # doesn't need to be full location, probably because it can be found via $LIBRARY_PATH
            # not easy to know whether it should come from METIS or ParMETIS...
            # see https://answers.launchpad.net/dorsal/+question/223167
            libnames.append('libmetis.a')

            self.cfg.update(
                'configopts',
                '-DUMFPACK_INCLUDE_DIRS:PATH="%s"' % ';'.join(incdirs))
            self.cfg.update(
                'configopts',
                '-DUMFPACK_LIBRARY_DIRS:PATH="%s"' % ';'.join(libdirs))
            self.cfg.update(
                'configopts',
                '-DUMFPACK_LIBRARY_NAMES:STRING="%s"' % ';'.join(libnames))

        # BLACS
        if get_software_root('BLACS'):
            self.cfg.update('configopts', "-DTPL_ENABLE_BLACS:BOOL=ON")
            self.cfg.update(
                'configopts',
                '-DBLACS_INCLUDE_DIRS:PATH="%s"' % os.getenv('BLACS_INC_DIR'))
            self.cfg.update(
                'configopts',
                '-DBLACS_LIBRARY_DIRS:PATH="%s"' % os.getenv('BLACS_LIB_DIR'))
            blacs_lib_names = os.getenv('BLACS_STATIC_LIBS').split(',')
            blacs_lib_names = [
                lib_re.search(x).group(1) for x in blacs_lib_names
            ]
            self.cfg.update(
                'configopts', '-DBLACS_LIBRARY_NAMES:STRING="%s"' %
                (';'.join(blacs_lib_names)))

        # ScaLAPACK
        if get_software_root('ScaLAPACK'):
            self.cfg.update('configopts', "-DTPL_ENABLE_SCALAPACK:BOOL=ON")
            self.cfg.update(
                'configopts', '-DSCALAPACK_INCLUDE_DIRS:PATH="%s"' %
                os.getenv('SCALAPACK_INC_DIR'))
            self.cfg.update(
                'configopts', '-DSCALAPACK_LIBRARY_DIRS:PATH="%s;%s"' %
                (os.getenv('SCALAPACK_LIB_DIR'), os.getenv('BLACS_LIB_DIR')))
        # PETSc
        petsc = get_software_root('PETSc')
        if petsc:
            self.cfg.update('configopts', "-DTPL_ENABLE_PETSC:BOOL=ON")
            incdirs = [os.path.join(petsc, "include")]
            self.cfg.update(
                'configopts',
                '-DPETSC_INCLUDE_DIRS:PATH="%s"' % ';'.join(incdirs))
            petsc_libdirs = [
                os.path.join(petsc, "lib"),
                os.path.join(suitesparse, "UMFPACK", "Lib"),
                os.path.join(suitesparse, "CHOLMOD", "Lib"),
                os.path.join(suitesparse, "COLAMD", "Lib"),
                os.path.join(suitesparse, "AMD", "Lib"),
                os.getenv('FFTW_LIB_DIR'),
                os.path.join(get_software_root('ParMETIS'), "Lib")
            ]
            self.cfg.update(
                'configopts',
                '-DPETSC_LIBRARY_DIRS:PATH="%s"' % ';'.join(petsc_libdirs))
            petsc_libnames = [
                "petsc", "umfpack", "cholmod", "colamd", "amd", "parmetis",
                "metis"
            ]
            petsc_libnames += [
                lib_re.search(x).group(1)
                for x in os.getenv('FFTW_STATIC_LIBS').split(',')
            ]
            self.cfg.update(
                'configopts',
                '-DPETSC_LIBRARY_NAMES:STRING="%s"' % ';'.join(petsc_libnames))

        # other Third-Party Libraries (TPLs)
        deps = self.cfg.dependencies()
        builddeps = self.cfg.builddependencies() + ["SuiteSparse"]
        deps = [dep['name'] for dep in deps if not dep['name'] in builddeps]
        for dep in deps:
            deproot = get_software_root(dep)
            if deproot:
                depmap = {
                    'SCOTCH': 'Scotch',
                }
                dep = depmap.get(dep, dep)
                self.cfg.update('configopts', "-DTPL_ENABLE_%s:BOOL=ON" % dep)
                incdir = os.path.join(deproot, "include")
                self.cfg.update('configopts',
                                '-D%s_INCLUDE_DIRS:PATH="%s"' % (dep, incdir))
                libdir = os.path.join(deproot, "lib")
                self.cfg.update('configopts',
                                '-D%s_LIBRARY_DIRS:PATH="%s"' % (dep, libdir))

        # extensions_step
        if self.cfg['all_exts']:
            self.cfg.update('configopts',
                            "-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON")

        else:
            for ext in self.cfg['exts_list']:
                self.cfg.update('configopts', "-DTrilinos_ENABLE_%s=ON" % ext)

        # packages to skip
        skip_exts = self.cfg['skip_exts']
        if skip_exts:
            for ext in skip_exts:
                self.cfg.update('configopts',
                                "-DTrilinos_ENABLE_%s:BOOL=OFF" % ext)

        # building in source dir not supported
        # + if the build directory is a long path, problems like "Argument list too long" may occur
        # cfr. https://github.com/trilinos/Trilinos/issues/2434
        # so, try to create build directory with shorter path length to build in
        salt = ''.join(random.choice(string.letters) for _ in range(5))
        self.short_start_dir = os.path.join(build_path(),
                                            self.name + '-' + salt)
        if os.path.exists(self.short_start_dir):
            raise EasyBuildError(
                "Short start directory %s for Trilinos already exists?!",
                self.short_start_dir)

        self.log.info("Length of path to original start directory: %s",
                      len(self.start_dir))
        self.log.info("Short start directory: %s (length: %d)",
                      self.short_start_dir, len(self.short_start_dir))

        mkdir(self.short_start_dir)
        short_src_dir = os.path.join(self.short_start_dir, 'src')
        symlink(self.start_dir, short_src_dir)
        short_build_dir = os.path.join(self.short_start_dir, 'obj')
        obj_dir = os.path.join(self.builddir, 'obj')
        mkdir(obj_dir)
        symlink(obj_dir, short_build_dir)

        # configure using cmake
        super(EB_Trilinos, self).configure_step(srcdir=short_src_dir,
                                                builddir=short_build_dir)
Esempio n. 48
0
class EB_NWChem(ConfigureMake):
    """Support for building/installing NWChem."""
    def __init__(self, *args, **kwargs):
        """Initialisation of custom class variables for NWChem."""
        super(EB_NWChem, self).__init__(*args, **kwargs)

        self.test_cases_dir = None
        # path for symlink to local copy of default .nwchemrc, required by NWChem at runtime
        # this path is hardcoded by NWChem, and there's no way to make it use a config file at another path...
        self.home_nwchemrc = os.path.join(os.getenv('HOME'), '.nwchemrc')
        # temporary directory that is common across multiple nodes in a cluster;
        # we can't rely on tempfile.gettempdir() since that follows $TMPDIR,
        # which is typically set to a unique directory in jobs;
        # use /tmp as default, allow customisation via $EB_NWCHEM_TMPDIR environment variable
        common_tmp_dir = os.getenv('EB_NWCHEM_TMPDIR', '/tmp')
        # local NWChem .nwchemrc config file, to which symlink will point
        # using this approach, multiple parallel builds (on different nodes) can use the same symlink
        self.local_nwchemrc = os.path.join(common_tmp_dir, os.getenv('USER'),
                                           'easybuild_nwchem', '.nwchemrc')

    @staticmethod
    def extra_options():
        """Custom easyconfig parameters for NWChem."""

        extra_vars = {
            'target': ["LINUX64", "Target platform", CUSTOM],
            # possible options for ARMCI_NETWORK on LINUX64 with Infiniband:
            # OPENIB, MPI-MT, MPI-SPAWN, MELLANOX
            'armci_network': ["OPENIB", "Network protocol to use", CUSTOM],
            'msg_comms': ["MPI", "Type of message communication", CUSTOM],
            'modules': ["all", "NWChem modules to build", CUSTOM],
            'lib_defines':
            ["", "Additional defines for C preprocessor", CUSTOM],
            'tests': [True, "Run example test cases", CUSTOM],
            # lots of tests fail, so allow a certain fail ratio
            'max_fail_ratio': [0.5, "Maximum test case fail ratio", CUSTOM],
        }
        return ConfigureMake.extra_options(extra_vars)

    def setvar_env_makeopt(self, name, value):
        """Set a variable both in the environment and a an option to make."""
        env.setvar(name, value)
        self.cfg.update('buildopts', "%s='%s'" % (name, value))

    def configure_step(self):
        """Custom configuration procedure for NWChem."""

        # check whether a (valid) symlink to a .nwchemrc config file exists (via a dummy file if necessary)
        # fail early if the link is not what's we expect, since running the test cases will likely fail in this case
        try:
            if os.path.exists(self.home_nwchemrc) or os.path.islink(
                    self.home_nwchemrc):
                # create a dummy file to check symlink
                if not os.path.exists(self.local_nwchemrc):
                    write_file(self.local_nwchemrc, 'dummy')

                self.log.debug(
                    "Contents of %s: %s", os.path.dirname(self.local_nwchemrc),
                    os.listdir(os.path.dirname(self.local_nwchemrc)))

                if os.path.islink(self.home_nwchemrc):
                    home_nwchemrc_target = os.readlink(self.home_nwchemrc)
                    if home_nwchemrc_target != self.local_nwchemrc:
                        raise EasyBuildError(
                            "Found %s, but it's not a symlink to %s. "
                            "Please (re)move %s while installing NWChem; it can be restored later",
                            self.home_nwchemrc, self.local_nwchemrc,
                            self.home_nwchemrc)
                # ok to remove, we'll recreate it anyway
                remove_file(self.local_nwchemrc)
        except (IOError, OSError), err:
            raise EasyBuildError("Failed to validate %s symlink: %s",
                                 self.home_nwchemrc, err)

        # building NWChem in a long path name is an issue, so let's try to make sure we have a short one
        try:
            # NWChem insists that version is in name of build dir
            tmpdir = tempfile.mkdtemp(suffix='-%s-%s' %
                                      (self.name, self.version))
            # remove created directory, since we're not going to use it as is
            os.rmdir(tmpdir)
            # avoid having '['/']' characters in build dir name, NWChem doesn't like that
            start_dir = tmpdir.replace('[', '_').replace(']', '_')
            mkdir(os.path.dirname(start_dir), parents=True)
            symlink(self.cfg['start_dir'], start_dir)
            change_dir(start_dir)
            self.cfg['start_dir'] = start_dir
        except OSError, err:
            raise EasyBuildError(
                "Failed to symlink build dir to a shorter path name: %s", err)
Esempio n. 49
0
    def install_step_classic(self,
                             silent_cfg_names_map=None,
                             silent_cfg_extras=None):
        """Actual installation for versions prior to 2021.x

        - create silent cfg file
        - set environment parameters
        - execute command
        """
        if silent_cfg_names_map is None:
            silent_cfg_names_map = {}

        if self.serial_number or self.requires_runtime_license:
            lic_entry = ""
            if self.serial_number:
                lic_entry = "%(license_serial_number)s=%(serial_number)s"
                self.cfg['license_activation'] = ACTIVATION_SERIAL
            else:
                # license file entry is only applicable with license file or server type of activation
                # also check whether specified activation type makes sense
                lic_file_server_activations = [
                    ACTIVATION_EXIST_LIC, ACTIVATION_LIC_FILE,
                    ACTIVATION_LIC_SERVER
                ]
                other_activations = [
                    act for act in ACTIVATION_TYPES
                    if act not in lic_file_server_activations
                ]
                if self.cfg[
                        'license_activation'] in lic_file_server_activations:
                    lic_entry = "%(license_file_name)s=%(license_file)s"
                elif not self.cfg['license_activation'] in other_activations:
                    raise EasyBuildError(
                        "Unknown type of activation specified: %s (known :%s)",
                        self.cfg['license_activation'], ACTIVATION_TYPES)
            silent = '\n'.join([
                "%(activation_name)s=%(activation)s",
                lic_entry,
                ""  # Add a newline at the end, so we can easily append if needed
            ]) % {
                'activation_name':
                silent_cfg_names_map.get('activation_name', ACTIVATION_NAME),
                'activation':
                self.cfg['license_activation'],
                'license_file_name':
                silent_cfg_names_map.get('license_file_name',
                                         LICENSE_FILE_NAME),
                'license_file':
                self.license_file,
                'license_serial_number':
                silent_cfg_names_map.get('license_serial_number',
                                         LICENSE_SERIAL_NUMBER),
                'serial_number':
                self.serial_number,
            }
        else:
            self.log.debug(
                "No license required, so not including license specifications in silent.cfg"
            )
            silent = ''

        silent += '\n'.join([
            "%(install_dir_name)s=%(install_dir)s",
            "ACCEPT_EULA=accept",
            "%(install_mode_name)s=%(install_mode)s",
            "CONTINUE_WITH_OPTIONAL_ERROR=yes",
            ""  # Add a newline at the end, so we can easily append if needed
        ]) % {
            'install_dir_name':
            silent_cfg_names_map.get('install_dir_name', INSTALL_DIR_NAME),
            'install_dir':
            silent_cfg_names_map.get('install_dir', self.installdir),
            'install_mode':
            silent_cfg_names_map.get('install_mode', INSTALL_MODE_2015),
            'install_mode_name':
            silent_cfg_names_map.get('install_mode_name',
                                     INSTALL_MODE_NAME_2015),
        }

        if self.install_components is not None:
            if len(
                    self.install_components
            ) == 1 and self.install_components[0] in [COMP_ALL, COMP_DEFAULTS]:
                # no quotes should be used for ALL or DEFAULTS
                silent += 'COMPONENTS=%s\n' % self.install_components[0]
            elif self.install_components:
                # a list of components is specified (needs quotes)
                components = ';'.join(self.install_components)
                if LooseVersion(self.version) >= LooseVersion('2017'):
                    # for versions 2017.x and newer, double quotes should not be there...
                    silent += 'COMPONENTS=%s\n' % components
                else:
                    silent += 'COMPONENTS="%s"\n' % components
            else:
                raise EasyBuildError(
                    "Empty list of matching components obtained via %s",
                    self.cfg['components'])

        if silent_cfg_extras is not None:
            if isinstance(silent_cfg_extras, dict):
                silent += '\n'.join("%s=%s" % (key, value)
                                    for (key,
                                         value) in silent_cfg_extras.items())
            else:
                raise EasyBuildError("silent_cfg_extras needs to be a dict")

        # we should be already in the correct directory
        silentcfg = os.path.join(os.getcwd(), 'silent.cfg')
        write_file(silentcfg, silent)
        self.log.debug("Contents of %s:\n%s", silentcfg, silent)

        # workaround for mktmp: create tmp dir and use it
        tmpdir = os.path.join(self.cfg['start_dir'], 'mytmpdir')
        mkdir(tmpdir, parents=True)

        tmppathopt = ''
        if self.cfg['usetmppath']:
            env.setvar('TMP_PATH', tmpdir)
            tmppathopt = "-t %s" % tmpdir

        # set some extra env variables
        env.setvar('LOCAL_INSTALL_VERBOSE', '1')
        env.setvar('VERBOSE_MODE', '1')

        env.setvar('INSTALL_PATH', self.installdir)

        # perform installation
        cmd = ' '.join([
            self.cfg['preinstallopts'],
            './install.sh',
            tmppathopt,
            '-s ' + silentcfg,
            self.cfg['installopts'],
        ])

        return run_cmd(cmd, log_all=True, simple=True, log_output=True)
Esempio n. 50
0
    def configure_step(self):
        """Run CMake for stage 1 Clang."""

        self.llvm_obj_dir_stage1 = os.path.join(self.builddir, 'llvm.obj.1')
        if self.cfg['bootstrap']:
            self.llvm_obj_dir_stage2 = os.path.join(self.builddir,
                                                    'llvm.obj.2')
            self.llvm_obj_dir_stage3 = os.path.join(self.builddir,
                                                    'llvm.obj.3')

        if LooseVersion(self.version) >= LooseVersion('3.3'):
            disable_san_tests = False
            # all sanitizer tests will fail when there's a limit on the vmem
            # this is ugly but I haven't found a cleaner way so far
            (vmemlim, ec) = run_cmd("ulimit -v", regexp=False)
            if not vmemlim.startswith("unlimited"):
                disable_san_tests = True
                self.log.warn(
                    "There is a virtual memory limit set of %s KB. The tests of the "
                    "sanitizers will be disabled as they need unlimited virtual "
                    "memory unless --strict=error is used." % vmemlim.strip())

            # the same goes for unlimited stacksize
            (stacklim, ec) = run_cmd("ulimit -s", regexp=False)
            if stacklim.startswith("unlimited"):
                disable_san_tests = True
                self.log.warn(
                    "The stacksize limit is set to unlimited. This causes the ThreadSanitizer "
                    "to fail. The sanitizers tests will be disabled unless --strict=error is used."
                )

            if (disable_san_tests or self.cfg['skip_sanitizer_tests']
                ) and build_option('strict') != run.ERROR:
                self.log.debug("Disabling the sanitizer tests")
                self.disable_sanitizer_tests()

        # Create and enter build directory.
        mkdir(self.llvm_obj_dir_stage1)
        change_dir(self.llvm_obj_dir_stage1)

        # GCC and Clang are installed in different prefixes and Clang will not
        # find the GCC installation on its own.
        # First try with GCCcore, as GCC built on top of GCCcore is just a wrapper for GCCcore and binutils,
        # instead of a full-fledge compiler
        gcc_prefix = get_software_root('GCCcore')

        # If that doesn't work, try with GCC
        if gcc_prefix is None:
            gcc_prefix = get_software_root('GCC')

        # If that doesn't work either, print error and exit
        if gcc_prefix is None:
            raise EasyBuildError("Can't find GCC or GCCcore to use")

        self.cfg.update('configopts', "-DGCC_INSTALL_PREFIX='%s'" % gcc_prefix)
        self.log.debug("Using %s as GCC_INSTALL_PREFIX", gcc_prefix)

        # Configure some default options
        if self.cfg["enable_rtti"]:
            self.cfg.update('configopts', '-DLLVM_REQUIRES_RTTI=ON')
            self.cfg.update('configopts', '-DLLVM_ENABLE_RTTI=ON')
            self.cfg.update('configopts', '-DLLVM_ENABLE_EH=ON')
        if self.cfg["default_openmp_runtime"]:
            self.cfg.update(
                'configopts', '-DCLANG_DEFAULT_OPENMP_RUNTIME=%s' %
                self.cfg["default_openmp_runtime"])

        if self.cfg['assertions']:
            self.cfg.update('configopts', "-DLLVM_ENABLE_ASSERTIONS=ON")
        else:
            self.cfg.update('configopts', "-DLLVM_ENABLE_ASSERTIONS=OFF")

        if self.cfg["usepolly"]:
            self.cfg.update('configopts', "-DLINK_POLLY_INTO_TOOLS=ON")

        build_targets = self.cfg['build_targets']
        if build_targets is None:
            arch = get_cpu_architecture()
            default_targets = DEFAULT_TARGETS_MAP.get(arch, None)
            if default_targets:
                # If CUDA is included as a dep, add NVPTX as a target (could also support AMDGPU if we knew how)
                if get_software_root("CUDA"):
                    default_targets += ["NVPTX"]
                self.cfg['build_targets'] = build_targets = default_targets
                self.log.debug(
                    "Using %s as default build targets for CPU/GPU architecture %s.",
                    default_targets, arch)
            else:
                raise EasyBuildError(
                    "No default build targets defined for CPU architecture %s.",
                    arch)

        unknown_targets = [
            target for target in build_targets if target not in CLANG_TARGETS
        ]

        if unknown_targets:
            raise EasyBuildError(
                "Some of the chosen build targets (%s) are not in %s.",
                ', '.join(unknown_targets), ', '.join(CLANG_TARGETS))

        if LooseVersion(self.version) < LooseVersion(
                '3.4') and "R600" in build_targets:
            raise EasyBuildError(
                "Build target R600 not supported in < Clang-3.4")

        if LooseVersion(self.version) > LooseVersion(
                '3.3') and "MBlaze" in build_targets:
            raise EasyBuildError(
                "Build target MBlaze is not supported anymore in > Clang-3.3")

        if self.cfg["usepolly"] and "NVPTX" in build_targets:
            self.cfg.update('configopts', "-DPOLLY_ENABLE_GPGPU_CODEGEN=ON")

        self.cfg.update(
            'configopts',
            '-DLLVM_TARGETS_TO_BUILD="%s"' % ';'.join(build_targets))

        if self.cfg['parallel']:
            self.make_parallel_opts = "-j %s" % self.cfg['parallel']

        # If we don't want to build with CUDA (not in dependencies) trick CMakes FindCUDA module into
        # not finding it by using the environment variable which is used as-is and later checked
        # for a falsy value when determining wether CUDA was found
        if not get_software_root('CUDA'):
            setvar('CUDA_NVCC_EXECUTABLE', 'IGNORE')

        self.log.info("Configuring")
        super(EB_Clang, self).configure_step(srcdir=self.llvm_src_dir)
Esempio n. 51
0
    def test_step(self):
        """Run available numpy unit tests, and more."""
        super(EB_numpy, self).test_step()

        # temporarily install numpy, it doesn't alow to be used straight from the source dir
        tmpdir = tempfile.mkdtemp()
        abs_pylibdirs = [
            os.path.join(tmpdir, pylibdir) for pylibdir in self.all_pylibdirs
        ]
        for pylibdir in abs_pylibdirs:
            mkdir(pylibdir, parents=True)
        pythonpath = "export PYTHONPATH=%s &&" % os.pathsep.join(
            abs_pylibdirs + ['$PYTHONPATH'])
        cmd = self.compose_install_command(tmpdir, extrapath=pythonpath)
        run_cmd(cmd, log_all=True, simple=True, verbose=False)

        try:
            pwd = os.getcwd()
            os.chdir(tmpdir)
        except OSError as err:
            raise EasyBuildError("Faild to change to %s: %s", tmpdir, err)

        # evaluate performance of numpy.dot (3 runs, 3 loops each)
        size = 1000
        cmd = ' '.join([
            pythonpath,
            '%s -m timeit -n 3 -r 3' % self.python_cmd,
            '-s "import numpy; x = numpy.random.random((%(size)d, %(size)d))"'
            % {
                'size': size
            },
            '"numpy.dot(x, x.T)"',
        ])
        (out, ec) = run_cmd(cmd, simple=False)
        self.log.debug("Test output: %s" % out)

        # fetch result
        time_msec = None
        msec_re = re.compile(
            "\d+ loops, best of \d+: (?P<time>[0-9.]+) msec per loop")
        res = msec_re.search(out)
        if res:
            time_msec = float(res.group('time'))
        else:
            sec_re = re.compile(
                "\d+ loops, best of \d+: (?P<time>[0-9.]+) sec per loop")
            res = sec_re.search(out)
            if res:
                time_msec = 1000 * float(res.group('time'))
            elif self.dry_run:
                # use fake value during dry run
                time_msec = 123
                self.log.warning(
                    "Using fake value for time required for %dx%d matrix dot product under dry run: %s",
                    size, size, time_msec)
            else:
                raise EasyBuildError(
                    "Failed to determine time for numpy.dot test run.")

        # make sure we observe decent performance
        if time_msec < self.cfg['blas_test_time_limit']:
            self.log.info(
                "Time for %dx%d matrix dot product: %d msec < %d msec => OK",
                size, size, time_msec, self.cfg['blas_test_time_limit'])
        else:
            raise EasyBuildError(
                "Time for %dx%d matrix dot product: %d msec >= %d msec => ERROR",
                size, size, time_msec, self.cfg['blas_test_time_limit'])
        try:
            os.chdir(pwd)
            rmtree2(tmpdir)
        except OSError as err:
            raise EasyBuildError("Failed to change back to %s: %s", pwd, err)
    def configure_step(self):
        """
        Configure Geant4 build, either via CMake for versions more recent than 9.5,
        or using an interactive configuration procedure otherwise.
        """

        # Geant4 switched to a cmake build system in 9.5
        if LooseVersion(self.version) >= LooseVersion("9.5"):
            mkdir('configdir')
            os.chdir('configdir')
            super(EB_Geant4, self).configure_step(srcdir="..")

        else:
            pwd = self.cfg['start_dir']
            dst = self.installdir
            clhepdir = get_software_root('CLHEP')
            cmd = "%s/Configure -E -build" % pwd

            self.qanda = {# questions and answers for version 9.1.p03
                          "There exists a config.sh file. Shall I use it to set the defaults? [y]": "n",
                          "Would you like to see the instructions? [n]": "",
                          "[Type carriage return to continue]": "",
                          "Definition of G4SYSTEM variable is Linux-g++. That stands for: 1) OS : Linux" \
                            "2) Compiler : g++ To modify default settings, select number above (e.g. 2) " \
                            "[Press [Enter] for default settings]": "2",
                          "Which C++ compiler? [g++]": "$(GPP)",
                          "Confirm your selection or set recommended 'g++'! [*]": "",
                          "Definition of G4SYSTEM variable is Linux-icc. That stands for: 1) OS : Linux 2)" \
                            "Compiler : icc To modify default settings, select number above (e.g. 2) " \
                            "[Press [Enter] for default settings]": "",
                          "Do you expect to run these scripts and binaries on multiple machines? [n]": "y",
                          "Where is Geant4 source installed? [%s]" % pwd: "",
                          "Specify the path where Geant4 libraries and source files should be installed." \
                            " [%s]" % pwd: dst,
                          "Do you want to copy all Geant4 headers in one directory? [n]": "y",
                          "Please, specify default directory where ALL the Geant4 data is installed:" \
                            "G4LEVELGAMMADATA: %(pwd)s/data/PhotonEvaporation2.0 G4RADIOACTIVEDATA: " \
                            "%(pwd)s/data/RadioactiveDecay3.2 G4LEDATA: %(pwd)s/data/G4EMLOW5.1 G4NEUTRONHPDATA:    " \
                            "%(pwd)s/data/G4NDL3.12 G4ABLADATA: %(pwd)s/data/G4ABLA3.0 You will be asked about " \
                            "customizing these next. [%(pwd)s/data]" % {'pwd': pwd}: "%s/data" % dst,
                          "Directory %s/data doesn't exist. Use that name anyway? [n]" % dst: "y",
                          "Please, specify default directory where the Geant4 data is installed: " \
                            "1) G4LEVELGAMMADATA: %(dst)s/data/PhotonEvaporation2.0 2) G4RADIOACTIVEDATA: " \
                            "%(dst)s/data/RadioactiveDecay3.2 3) G4LEDATA: %(dst)s/data/G4EMLOW5.1 4) G4NEUTRONHPDATA: " \
                            "%(dst)s/data/G4NDL3.12 5) G4ABLADATA: %(dst)s/data/G4ABLA3.0 To modify default settings, " \
                            "select number above (e.g. 2) [Press [Enter] for default settings]" % {'dst': dst}: "",
                          "Please, specify where CLHEP is installed: CLHEP_BASE_DIR: ": clhepdir,
                          "Please, specify where CLHEP is installed: CLHEP_BASE_DIR: [%s]" % clhepdir: "",
                          "You can customize paths and library name of you CLHEP installation: 1) CLHEP_INCLUDE_DIR: " \
                            "%(clhepdir)s/include 2) CLHEP_LIB_DIR: %(clhepdir)s/lib 3) CLHEP_LIB: CLHEP To modify " \
                            "default settings, select number above (e.g. 2) [Press [Enter] for default settings]" % 
                            {'clhepdir': clhepdir}: "",
                          "By default 'static' (.a) libraries are built. Do you want to build 'shared' (.so) " \
                            "libraries? [n]": "y",
                          "You selected to build 'shared' (.so) libraries. Do you want to build 'static' (.a) " \
                            "libraries too? [n]": "y",
                          "Do you want to build 'global' compound libraries? [n]": "",
                          "Do you want to compile libraries in DEBUG mode (-g)? [n]": "",
                          "G4UI_NONE If this variable is set, no UI sessions nor any UI libraries are built. " \
                            "This can be useful when running a pure batch job or in a user framework having its own " \
                            "UI system. Do you want to set this variable ? [n]": "",
                          "G4UI_BUILD_XAW_SESSION G4UI_USE_XAW Specifies to include and use the XAW interfaces in " \
                            "the application to be built. The XAW (X11 Athena Widget set) extensions are required to " \
                            "activate and build this driver. [n]": "",
                          "G4UI_BUILD_XM_SESSION G4UI_USE_XM Specifies to include and use the XM Motif based user " \
                            "interfaces. The XM Motif extensions are required to activate and build this driver. [n]": "",
                          "G4VIS_NONE If this variable is set, no visualization drivers will be built or used. Do " \
                            "you want to set this variable ? [n]": "n",
                          "G4VIS_BUILD_OPENGLX_DRIVER G4VIS_USE_OPENGLX It is an interface to the de facto standard " \
                            "3D graphics library, OpenGL. It is well suited for real-time fast visualization and " \
                            "prototyping. The X11 version of the OpenGL libraries is required. [n]": "",
                          "G4VIS_BUILD_OPENGLXM_DRIVER G4VIS_USE_OPENGLXM It is an interface to the de facto " \
                            "standard 3D graphics library, OpenGL. It is well suited for real-time fast visualization " \
                            "and prototyping. The X11 version of the OpenGL libraries and the Motif Xm extension is " \
                            "required. [n]": "",
                          "G4VIS_BUILD_DAWN_DRIVER G4VIS_USE_DAWN DAWN drivers are interfaces to the Fukui Renderer " \
                            "DAWN. DAWN is a vectorized 3D PostScript processor suited to prepare technical high " \
                            "quality outputs for presentation and/or documentation. [n]": "",
                          "G4VIS_BUILD_OIX_DRIVER G4VIS_USE_OIX The OpenInventor driver is based on OpenInventor tech" \
                            "nology for scientific visualization. The X11 version of OpenInventor is required. [n]": "",
                          "G4VIS_BUILD_RAYTRACERX_DRIVER G4VIS_USE_RAYTRACERX Allows for interactive ray-tracing " \
                            "graphics through X11. The X11 package is required. [n]": "",
                          "G4VIS_BUILD_VRML_DRIVER G4VIS_USE_VRML These driver generate VRML files, which describe " \
                            "3D scenes to be visualized with a proper VRML viewer. [n]": "",
                          "G4LIB_BUILD_GDML Setting this variable will enable building of the GDML plugin module " \
                            "embedded in Geant4 for detector description persistency. It requires your system to have " \
                            "the XercesC library and headers installed. Do you want to set this variable? [n]": "",
                          "G4LIB_BUILD_G3TOG4 The utility module 'g3tog4' will be built by setting this variable. " \
                            "NOTE: it requires a valid FORTRAN compiler to be installed on your system and the " \
                            "'cernlib' command in the path, in order to build the ancillary tools! Do you want to " \
                            "build 'g3tog4' ? [n]": "",
                          "G4LIB_BUILD_ZLIB Do you want to activate compression for output files generated by the " \
                            "HepRep visualization driver? [n]": "y",
                          "G4ANALYSIS_USE Activates the configuration setup for allowing plugins to analysis tools " \
                            "based on AIDA (Astract Interfaces for Data Analysis). In order to use AIDA features and " \
                            "compliant analysis tools, the proper environment for these tools will have to be set " \
                            "(see documentation for the specific analysis tools). [n]": "",
                          "Press [Enter] to start installation or use a shell escape to edit config.sh: ": "",
                          # extra questions and answers for version 9.2.p03
                          "Directory %s doesn't exist. Use that name anyway? [n]" % dst: "y",
                          "Specify the path where the Geant4 data libraries PhotonEvaporation%s " \
                            "RadioactiveDecay%s G4EMLOW%s G4NDL%s G4ABLA%s are " \
                            "installed. For now, a flat directory structure is assumed, and this can be customized " \
                            "at the next step if needed. [%s/data]" % (self.cfg['PhotonEvaporationVersion'],
                                                                       self.cfg['G4RadioactiveDecayVersion'],
                                                                       self.cfg['G4EMLOWVersion'],
                                                                       self.cfg['G4NDLVersion'],
                                                                       self.cfg['G4ABLAVersion'],
                                                                       pwd
                                                                      ): "%s/data" % dst,
                          "Please enter 1) Another path to search in 2) 'f' to force the use of the path " \
                            "you entered previously (the data libraries are not needed to build Geant4, but " \
                            "are needed to run applications later). 3) 'c' to customize the data paths, e.g. " \
                            "if you have the data libraries installed in different locations. [f]": "",
                          "G4UI_BUILD_QT_SESSION G4UI_USE_QT Setting these variables will enable the building " \
                            "of the G4 Qt based user interface module and the use of this module in your " \
                            "applications respectively. The Qt3 or Qt4 headers, libraries and moc application are " \
                            "required to enable the building of this module. Do you want to enable build and use of " \
                            "this module? [n]": "",
                          # extra questions and answers for version 9.4.po1
                          "What is the path to the Geant4 source tree? [%s]" % pwd: "",
                          "Where should Geant4 be installed? [%s]" % pwd: dst,
                          "Do you want to install all Geant4 headers in one directory? [n]": "y",
                          "Do you want to build shared libraries? [y]": "",
                          "Do you want to build static libraries too? [n]": "",
                          "Do you want to build global libraries? [y]": "",
                          "Do you want to build granular libraries as well? [n]": "",
                          "Do you want to build libraries with debugging information? [n]": "",
                          "Specify the path where the Geant4 data libraries are installed: [%s/data]" % pwd: "%s/data" % dst,
                          "How many parallel jobs should make launch? [1]": "%s" % self.cfg['parallel'],
                          "Please enter 1) Another path to search in 2) 'f' to force the use of the path you entered " \
                            "previously (the data libraries are NOT needed to build Geant4, but are needed to run " \
                            "applications later). 3) 'c' to customize the data paths, e.g. if you have the data " \
                            "libraries installed in different locations. [f]": "",
                          "Enable building of User Interface (UI) modules? [y]": "",
                          "Enable building of the XAW (X11 Athena Widget set) UI module? [n]": "",
                          "Enable building of the X11-Motif (Xm) UI module? [n]": "",
                          "Enable building of the Qt UI module? [n]": "",
                          "Enable building of visualization drivers? [y]": "n",
                          "Enable the Geometry Description Markup Language (GDML) module? [n]": "",
                          "Enable build of the g3tog4 utility module? [n]": "",
                          "Enable internal zlib compression for HepRep visualization? [n] ": "",
                         }

            self.noqanda = [r"Compiling\s+.*?\s+\.\.\.",
                            r"Making\s+dependency\s+for\s+file\s+.*?\s+\.\.\.",
                            r"Making\s+libname\.map\s+starter\s+file\s+\.\.\.",
                            r"Making\s+libname\.map\s+\.\.\.",
                            r"Reading\s+library\s+get_name\s+map\s+file\s*\.\.\.",
                            r"Reading\s+dependency\s+files\s*\.\.\.",
                            r"Creating\s+shared\s+library\s+.*?\s+\.\.\."
                           ]

            run_cmd_qa(cmd, self.qanda, self.noqanda, log_all=True, simple=True)

            # determining self.g4system
            try:
                scriptdirbase = os.path.join(pwd, '.config', 'bin')
                filelist = os.listdir(scriptdirbase)
            except OSError, err:
                self.log.error("Failed to determine self.g4system: %s" % err)
    
            if len(filelist) != 1:
                self.log.error("Exactly one directory is expected in %s; found back: %s" % (scriptdirbase, filelist))
            else:
                self.g4system = filelist[0]
    
            self.scriptdir = os.path.join(scriptdirbase, self.g4system)
            if not os.path.isdir(self.scriptdir):
                self.log.error("Something went wrong. Dir: %s doesn't exist." % self.scriptdir)
            self.log.info("The directory containing several important scripts to be copied was found: %s" % self.scriptdir)

            # copying config.sh to pwd
            try:
                self.log.info("copying config.sh to %s" % pwd)
                shutil.copy2(os.path.join(self.scriptdir, 'config.sh'), pwd)
            except IOError, err:
                self.log.error("Failed to copy config.sh to %s" % pwd)
Esempio n. 53
0
    def configure_step(self):
        """Custom configuration procedure for Molpro: use 'configure -batch'."""

        if not os.path.isfile(self.license_token):
            if self.cfg['license_file'] is not None and os.path.isfile(
                    self.cfg['license_file']):
                # put symlink in place to specified license file in $HOME/.molpro/token
                # other approaches (like defining $MOLPRO_KEY) don't seem to work
                self.cleanup_token_symlink = True
                mkdir(os.path.dirname(self.license_token))
                symlink(self.cfg['license_file'], self.license_token)
                self.log.debug("Symlinked %s to %s", self.cfg['license_file'],
                               self.license_token)
            else:
                self.log.warning(
                    "No licence token found at either %s or via 'license_file'",
                    self.license_token)

        # Only do the rest of the configuration if we're building from source
        if not self.cfg['precompiled_binaries']:
            # installation prefix
            self.cfg.update('configopts', "-prefix %s" % self.installdir)

            # compilers

            # compilers & MPI
            if self.toolchain.options.get('usempi', None):
                self.cfg.update(
                    'configopts',
                    "-%s -%s" % (os.environ['CC_SEQ'], os.environ['F90_SEQ']))
                if 'MPI_INC_DIR' in os.environ:
                    self.cfg.update(
                        'configopts',
                        "-mpp -mppbase %s" % os.environ['MPI_INC_DIR'])
                else:
                    raise EasyBuildError("$MPI_INC_DIR not defined")
            else:
                self.cfg.update(
                    'configopts',
                    "-%s -%s" % (os.environ['CC'], os.environ['F90']))

            # BLAS/LAPACK
            if 'BLAS_LIB_DIR' in os.environ:
                self.cfg.update(
                    'configopts',
                    "-blas -blaspath %s" % os.environ['BLAS_LIB_DIR'])
            else:
                raise EasyBuildError("$BLAS_LIB_DIR not defined")

            if 'LAPACK_LIB_DIR' in os.environ:
                self.cfg.update(
                    'configopts',
                    "-lapack -lapackpath %s" % os.environ['LAPACK_LIB_DIR'])
            else:
                raise EasyBuildError("$LAPACK_LIB_DIR not defined")

            # 32 vs 64 bit
            if self.toolchain.options.get('32bit', None):
                self.cfg.update('configopts', '-i4')
            else:
                self.cfg.update('configopts', '-i8')

            run_cmd("./configure -batch %s" % self.cfg['configopts'])

            cfgfile = os.path.join(self.cfg['start_dir'], 'CONFIG')
            cfgtxt = read_file(cfgfile)

            # determine original LAUNCHER value
            launcher_regex = re.compile('^LAUNCHER=(.*)$', re.M)
            res = launcher_regex.search(cfgtxt)
            if res:
                self.orig_launcher = res.group(1)
                self.log.debug("Found original value for LAUNCHER: %s",
                               self.orig_launcher)
            else:
                raise EasyBuildError("Failed to determine LAUNCHER value")

            # determine full installation prefix
            prefix_regex = re.compile('^PREFIX=(.*)$', re.M)
            res = prefix_regex.search(cfgtxt)
            if res:
                self.full_prefix = res.group(1)
                self.log.debug("Found full installation prefix: %s",
                               self.full_prefix)
            else:
                raise EasyBuildError(
                    "Failed to determine full installation prefix")

            # determine MPI launcher command that can be used during build/test
            # obtain command with specific number of cores (required by mpi_cmd_for), then replace that number with '%n'
            launcher = self.toolchain.mpi_cmd_for('%x', self.cfg['parallel'])
            launcher = launcher.replace(' %s' % self.cfg['parallel'], ' %n')

            # patch CONFIG file to change LAUNCHER definition, in order to avoid having to start mpd
            apply_regex_substitutions(
                cfgfile, [(r"^(LAUNCHER\s*=\s*).*$", r"\1 %s" % launcher)])

            # reread CONFIG and log contents
            cfgtxt = read_file(cfgfile)
            self.log.info("Contents of CONFIG file:\n%s", cfgtxt)
    def test_build_easyconfigs_in_parallel_gc3pie(self):
        """Test build_easyconfigs_in_parallel(), using GC3Pie with local config as backend for --job."""
        try:
            import gc3libs  # noqa (ignore unused import)
        except ImportError:
            print "GC3Pie not available, skipping test"
            return

        # put GC3Pie config in place to use local host and fork/exec
        resourcedir = os.path.join(self.test_prefix, 'gc3pie')
        gc3pie_cfgfile = os.path.join(self.test_prefix, 'gc3pie_local.ini')
        gc3pie_cfgtxt = GC3PIE_LOCAL_CONFIGURATION % {
            'resourcedir': resourcedir,
            'time': which('time'),
        }
        write_file(gc3pie_cfgfile, gc3pie_cfgtxt)

        output_dir = os.path.join(self.test_prefix, 'subdir',
                                  'gc3pie_output_dir')
        # purposely pre-create output dir, and put a file in it (to check whether GC3Pie tries to rename the output dir)
        mkdir(output_dir, parents=True)
        write_file(os.path.join(output_dir, 'foo'), 'bar')
        # remove write permissions on parent dir of specified output dir,
        # to check that GC3Pie does not try to rename the (already existing) output directory...
        adjust_permissions(os.path.dirname(output_dir),
                           stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH,
                           add=False,
                           recursive=False)

        topdir = os.path.dirname(os.path.abspath(__file__))

        build_options = {
            'job_backend_config': gc3pie_cfgfile,
            'job_max_walltime': 24,
            'job_output_dir': output_dir,
            'job_polling_interval': 0.2,  # quick polling
            'job_target_resource': 'ebtestlocalhost',
            'robot_path': os.path.join(topdir, 'easyconfigs', 'test_ecs'),
            'silent': True,
            'valid_module_classes': config.module_classes(),
            'validate': False,
        }
        init_config(args=['--job-backend=GC3Pie'], build_options=build_options)

        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy',
                               'toy-0.0.eb')
        easyconfigs = process_easyconfig(ec_file)
        ordered_ecs = resolve_dependencies(easyconfigs, self.modtool)
        topdir = os.path.dirname(
            os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
        test_easyblocks_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), 'sandbox')
        cmd = "PYTHONPATH=%s:%s:$PYTHONPATH eb %%(spec)s -df" % (
            topdir, test_easyblocks_path)
        build_easyconfigs_in_parallel(cmd, ordered_ecs, prepare_first=False)

        self.assertTrue(
            os.path.join(self.test_installpath, 'modules', 'all', 'toy',
                         '0.0'))
        self.assertTrue(
            os.path.join(self.test_installpath, 'software', 'toy', '0.0',
                         'bin', 'toy'))
Esempio n. 55
0
    def make_module_step(self, fake=False):
        """Install .modulerc file."""
        modfile_path = self.module_generator.get_module_filepath(fake=fake)
        modulerc = os.path.join(os.path.dirname(modfile_path),
                                self.module_generator.DOT_MODULERC)

        deps = self.cfg['dependencies']
        if len(deps) != 1:
            raise EasyBuildError(
                "There should be exactly one dependency specified, found %d",
                len(deps))

        # names should match
        if self.name != deps[0]['name']:
            raise EasyBuildError(
                "Name does not match dependency name: %s vs %s", self.name,
                deps[0]['name'])

        # ensure version to alias to is a prefix of the version of the dependency
        if not deps[0]['version'].startswith(
                self.version) and not self.version == "default":
            raise EasyBuildError(
                "Version is not 'default' and not a prefix of dependency version: %s vs %s",
                self.version, deps[0]['version'])

        alias_modname = deps[0]['short_mod_name']
        self.log.info("Adding module version alias for %s to %s",
                      alias_modname, modulerc)

        # add symlink to wrapped module file when generating .modulerc in temporary directory (done during sanity check)
        # this is strictly required for Lmod 6.x, for which .modulerc and wrapped module file must be in same location
        if fake:
            wrapped_mod_path = self.modules_tool.modulefile_path(alias_modname)
            wrapped_mod_filename = os.path.basename(wrapped_mod_path)
            target = os.path.join(os.path.dirname(modulerc),
                                  wrapped_mod_filename)
            mkdir(os.path.dirname(target), parents=True)
            symlink(wrapped_mod_path, target)

        module_version_specs = {
            'modname': alias_modname,
            'sym_version': self.version,
            'version': deps[0]['version'],
        }
        self.module_generator.modulerc(module_version=module_version_specs,
                                       filepath=modulerc)

        if not fake:
            print_msg("updated .modulerc file at %s" % modulerc, log=self.log)

            # symlink .modulerc in other locations (unless they're already linked)
            mod_symlink_dirs = ActiveMNS().det_module_symlink_paths(self.cfg)
            mod_subdir = os.path.dirname(ActiveMNS().det_full_module_name(
                self.cfg))

            mod_install_path = install_path('mod')
            modulerc_filename = os.path.basename(modulerc)

            for mod_symlink_dir in mod_symlink_dirs:
                modulerc_symlink = os.path.join(mod_install_path,
                                                mod_symlink_dir, mod_subdir,
                                                modulerc_filename)
                if os.path.islink(modulerc_symlink):
                    if resolve_path(modulerc_symlink) == resolve_path(
                            modulerc):
                        print_msg("symlink %s to %s already exists",
                                  modulerc_symlink, modulerc)
                    else:
                        raise EasyBuildError(
                            "%s exists but is not a symlink to %s",
                            modulerc_symlink, modulerc)
                else:
                    # Make sure folder exists
                    mkdir(os.path.dirname(modulerc_symlink), parents=True)
                    symlink(modulerc, modulerc_symlink)
                    print_msg("created symlink %s to .modulerc file at %s",
                              modulerc_symlink,
                              modulerc,
                              log=self.log)

        modpath = self.module_generator.get_modules_path(fake=fake)
        self.invalidate_module_caches(modpath)

        return modpath
Esempio n. 56
0
    def configure_step(self, srcdir=None, builddir=None):
        """Configure build using cmake"""

        # Set the search paths for CMake
        tc_ipaths = self.toolchain.get_variable("CPPFLAGS", list)
        tc_lpaths = self.toolchain.get_variable("LDFLAGS", list)
        cpaths = os.getenv('CPATH', '').split(os.pathsep)
        lpaths = os.getenv('LD_LIBRARY_PATH', '').split(os.pathsep)
        include_paths = os.pathsep.join(nub(tc_ipaths + cpaths))
        library_paths = os.pathsep.join(nub(tc_lpaths + lpaths))
        setvar("CMAKE_INCLUDE_PATH", include_paths)
        setvar("CMAKE_LIBRARY_PATH", library_paths)

        if builddir is None and self.cfg.get('separate_build_dir', False):
            builddir = os.path.join(self.builddir, 'easybuild_obj')

        if builddir:
            mkdir(builddir, parents=True)
            change_dir(builddir)
            default_srcdir = self.cfg['start_dir']
        else:
            default_srcdir = '.'

        if srcdir is None:
            if self.cfg.get('srcdir', None) is not None:
                srcdir = self.cfg['srcdir']
            else:
                srcdir = default_srcdir

        options = ['-DCMAKE_INSTALL_PREFIX=%s' % self.installdir]
        env_to_options = {
            'CC': 'CMAKE_C_COMPILER',
            'CFLAGS': 'CMAKE_C_FLAGS',
            'CXX': 'CMAKE_CXX_COMPILER',
            'CXXFLAGS': 'CMAKE_CXX_FLAGS',
            'F90': 'CMAKE_Fortran_COMPILER',
            'FFLAGS': 'CMAKE_Fortran_FLAGS',
        }
        for env_name, option in env_to_options.items():
            value = os.getenv(env_name)
            if value is not None:
                if option.endswith('_COMPILER') and self.cfg.get(
                        'abs_path_compilers', False):
                    value = which(value)
                    self.log.info(
                        "Using absolute path to compiler command: %s", value)
                options.append("-D%s='%s'" % (option, value))

        if build_option('rpath'):
            # instruct CMake not to fiddle with RPATH when --rpath is used, since it will undo stuff on install...
            # https://github.com/LLNL/spack/blob/0f6a5cd38538e8969d11bd2167f11060b1f53b43/lib/spack/spack/build_environment.py#L416
            options.append('-DCMAKE_SKIP_RPATH=ON')

        # show what CMake is doing by default
        options.append('-DCMAKE_VERBOSE_MAKEFILE=ON')

        if not self.cfg.get('allow_system_boost', False):
            # don't pick up on system Boost if Boost is included as dependency
            # - specify Boost location via -DBOOST_ROOT
            # - instruct CMake to not search for Boost headers/libraries in other places
            # - disable search for Boost CMake package configuration file
            boost_root = get_software_root('Boost')
            if boost_root:
                options.extend([
                    '-DBOOST_ROOT=%s' % boost_root,
                    '-DBoost_NO_SYSTEM_PATHS=ON',
                    '-DBoost_NO_BOOST_CMAKE=ON',
                ])

        options_string = ' '.join(options)

        if self.cfg.get('configure_cmd') == DEFAULT_CONFIGURE_CMD:
            command = ' '.join([
                self.cfg['preconfigopts'], DEFAULT_CONFIGURE_CMD,
                options_string, self.cfg['configopts'], srcdir
            ])
        else:
            command = ' '.join([
                self.cfg['preconfigopts'],
                self.cfg.get('configure_cmd'), self.cfg['configopts']
            ])

        (out, _) = run_cmd(command, log_all=True, simple=False)

        return out
Esempio n. 57
0
    def install_step(self):
        """
        Actual installation
        - create silent cfg file
        - execute command
        """
        impiver = LooseVersion(self.version)
        if impiver >= LooseVersion('4.0.1'):
            # impi starting from version 4.0.1.x uses standard installation procedure.

            silent_cfg_names_map = {}

            if impiver < LooseVersion('4.1.1'):
                # since impi v4.1.1, silent.cfg has been slightly changed to be 'more standard'
                silent_cfg_names_map.update({
                    'activation_name':
                    ACTIVATION_NAME_2012,
                    'license_file_name':
                    LICENSE_FILE_NAME_2012,
                })

            super(EB_impi,
                  self).install_step(silent_cfg_names_map=silent_cfg_names_map)

            # impi v4.1.1 and v5.0.1 installers create impi/<version> subdir, so stuff needs to be moved afterwards
            if impiver == LooseVersion(
                    '4.1.1.036') or impiver >= LooseVersion('5.0.1.035'):
                super(EB_impi, self).move_after_install()
        else:
            # impi up until version 4.0.0.x uses custom installation procedure.
            silent = """[mpi]
INSTALLDIR=%(ins)s
LICENSEPATH=%(lic)s
INSTALLMODE=NONRPM
INSTALLUSER=NONROOT
UPDATE_LD_SO_CONF=NO
PROCEED_WITHOUT_PYTHON=yes
AUTOMOUNTED_CLUSTER=yes
EULA=accept
[mpi-rt]
INSTALLDIR=%(ins)s
LICENSEPATH=%(lic)s
INSTALLMODE=NONRPM
INSTALLUSER=NONROOT
UPDATE_LD_SO_CONF=NO
PROCEED_WITHOUT_PYTHON=yes
AUTOMOUNTED_CLUSTER=yes
EULA=accept

""" % {
                'lic': self.license_file,
                'ins': self.installdir
            }

            # already in correct directory
            silentcfg = os.path.join(os.getcwd(), "silent.cfg")
            write_file(silentcfg, silent)
            self.log.debug("Contents of %s: %s", silentcfg, silent)

            tmpdir = os.path.join(os.getcwd(), self.version, 'mytmpdir')
            mkdir(tmpdir, parents=True)

            cmd = "./install.sh --tmp-dir=%s --silent=%s" % (tmpdir, silentcfg)
            run_cmd(cmd, log_all=True, simple=True)

        # recompile libfabric (if requested)
        # some Intel MPI versions (like 2019 update 6) no longer ship libfabric sources
        libfabric_path = os.path.join(self.installdir, 'libfabric')
        if impiver >= LooseVersion('2019') and self.cfg['libfabric_rebuild']:
            if self.cfg['ofi_internal']:
                libfabric_src_tgz_fn = 'src.tgz'
                if os.path.exists(
                        os.path.join(libfabric_path, libfabric_src_tgz_fn)):
                    change_dir(libfabric_path)
                    srcdir = extract_file(libfabric_src_tgz_fn,
                                          os.getcwd(),
                                          change_into_dir=False)
                    change_dir(srcdir)
                    libfabric_installpath = os.path.join(
                        self.installdir, 'intel64', 'libfabric')

                    make = 'make'
                    if self.cfg['parallel']:
                        make += ' -j %d' % self.cfg['parallel']

                    cmds = [
                        './configure --prefix=%s %s' %
                        (libfabric_installpath,
                         self.cfg['libfabric_configopts']), make,
                        'make install'
                    ]
                    for cmd in cmds:
                        run_cmd(cmd, log_all=True, simple=True)
                else:
                    self.log.info(
                        "Rebuild of libfabric is requested, but %s does not exist, so skipping...",
                        libfabric_src_tgz_fn)
            else:
                raise EasyBuildError(
                    "Rebuild of libfabric is requested, but ofi_internal is set to False."
                )
Esempio n. 58
0
 def use(self, path):
     """Add module path via 'module use'."""
     # make sure path exists before we add it
     mkdir(path, parents=True)
     self.run_module(['use', path])
Esempio n. 59
0
                raise EasyBuildError("Copying src %s to dst %s failed: %s",
                                     src, dst, err)

        # some extra symlinks are necessary for UMFPACK to work.
        paths = [
            os.path.join('AMD', 'include', 'amd.h'),
            os.path.join('AMD', 'include', 'amd_internal.h'),
            os.path.join(self.config_name, '%s.h' % self.config_name),
            os.path.join('AMD', 'lib', 'libamd.a')
        ]
        for path in paths:
            src = os.path.join(self.installdir, path)
            dn = path.split(os.path.sep)[-2]
            fn = path.split(os.path.sep)[-1]
            dstdir = os.path.join(self.installdir, 'UMFPACK', dn)
            mkdir(dstdir)
            if os.path.exists(src):
                try:
                    os.symlink(src, os.path.join(dstdir, fn))
                except OSError, err:
                    raise EasyBuildError(
                        "Failed to make symbolic link from %s to %s: %s", src,
                        dst, err)

    def make_module_req_guess(self):
        """
        Extra path to consider for module file:
        * add config dir to $CPATH so include files are found
        * add UMFPACK and AMD library dirs to $LD_LIBRARY_PATH
        """
        guesses = super(EB_SuiteSparse, self).make_module_req_guess()
Esempio n. 60
0
    def test_make_module_pythonpackage(self):
        """Test make_module_step of PythonPackage easyblock."""
        app_class = get_easyblock_class('PythonPackage')
        self.writeEC('PythonPackage', name='testpypkg', version='3.14')
        app = app_class(EasyConfig(self.eb_file))

        # install dir should not be there yet
        self.assertFalse(os.path.exists(app.installdir),
                         "%s should not exist" % app.installdir)

        # create install dir and populate it with subdirs/files
        mkdir(app.installdir, parents=True)
        # $PATH, $LD_LIBRARY_PATH, $LIBRARY_PATH, $CPATH, $PKG_CONFIG_PATH
        write_file(os.path.join(app.installdir, 'bin', 'foo'), 'echo foo!')
        write_file(os.path.join(app.installdir, 'include', 'foo.h'), 'bar')
        write_file(os.path.join(app.installdir, 'lib', 'libfoo.a'), 'libfoo')
        pyver = '.'.join(map(str, sys.version_info[:2]))
        write_file(
            os.path.join(app.installdir, 'lib', 'python%s' % pyver,
                         'site-packages', 'foo.egg'), 'foo egg')
        write_file(
            os.path.join(app.installdir, 'lib64', 'pkgconfig', 'foo.pc'),
            'libfoo: foo')

        # PythonPackage relies on the fact that 'python' points to the right Python version
        tmpdir = tempfile.mkdtemp()
        python = os.path.join(tmpdir, 'python')
        write_file(python, '#!/bin/bash\necho $0 $@\n%s "$@"' % sys.executable)
        adjust_permissions(python, stat.S_IXUSR)
        os.environ['PATH'] = '%s:%s' % (tmpdir, os.getenv('PATH', ''))

        from easybuild.tools.filetools import which
        print(which('python'))

        # create module file
        app.make_module_step()

        remove_file(python)

        self.assertTrue(TMPDIR in app.installdir)
        self.assertTrue(TMPDIR in app.installdir_mod)

        modtxt = None
        for cand_mod_filename in ['3.14', '3.14.lua']:
            full_modpath = os.path.join(app.installdir_mod, 'testpypkg',
                                        cand_mod_filename)
            if os.path.exists(full_modpath):
                modtxt = read_file(full_modpath)
                break

        self.assertFalse(modtxt is None)

        regexs = [
            (r'^prepend.path.*\WCPATH\W.*include"?\W*$', True),
            (r'^prepend.path.*\WLD_LIBRARY_PATH\W.*lib"?\W*$', True),
            (r'^prepend.path.*\WLIBRARY_PATH\W.*lib"?\W*$', True),
            (r'^prepend.path.*\WPATH\W.*bin"?\W*$', True),
            (r'^prepend.path.*\WPKG_CONFIG_PATH\W.*lib64/pkgconfig"?\W*$',
             True),
            (r'^prepend.path.*\WPYTHONPATH\W.*lib/python[23]\.[0-9]/site-packages"?\W*$',
             True),
            # lib64 doesn't contain any library files, so these are *not* included in $LD_LIBRARY_PATH or $LIBRARY_PATH
            (r'^prepend.path.*\WLD_LIBRARY_PATH\W.*lib64', False),
            (r'^prepend.path.*\WLIBRARY_PATH\W.*lib64', False),
        ]
        for (pattern, found) in regexs:
            regex = re.compile(pattern, re.M)
            if found:
                assert_msg = "Pattern '%s' found in: %s" % (regex.pattern,
                                                            modtxt)
            else:
                assert_msg = "Pattern '%s' not found in: %s" % (regex.pattern,
                                                                modtxt)

            self.assertEqual(bool(regex.search(modtxt)), found, assert_msg)