Пример #1
0
    def configure_step(self):
        """Configure SCOTCH build: locate the template makefile, copy it to a general Makefile.inc and patch it."""

        # pick template makefile
        comp_fam = self.toolchain.comp_family()
        if comp_fam == toolchain.INTELCOMP:  # @UndefinedVariable
            makefilename = 'Makefile.inc.x86-64_pc_linux2.icc'
        elif comp_fam == toolchain.GCC:  # @UndefinedVariable
            makefilename = 'Makefile.inc.x86-64_pc_linux2'
        else:
            raise EasyBuildError("Unknown compiler family used: %s", comp_fam)

        srcdir = os.path.join(self.cfg['start_dir'], 'src')

        # create Makefile.inc
        makefile_inc = os.path.join(srcdir, 'Makefile.inc')
        copy_file(os.path.join(srcdir, 'Make.inc', makefilename), makefile_inc)
        self.log.debug("Successfully copied Makefile.inc to src dir: %s", makefile_inc)

        # the default behaviour of these makefiles is still wrong
        # e.g., compiler settings, and we need -lpthread
        regex_subs = [
            (r"^CCS\s*=.*$", "CCS\t= $(CC)"),
            (r"^CCP\s*=.*$", "CCP\t= $(MPICC)"),
            (r"^CCD\s*=.*$", "CCD\t= $(MPICC)"),
            # append -lpthread to LDFLAGS
            (r"^LDFLAGS\s*=(?P<ldflags>.*$)", "LDFLAGS\t=\g<ldflags> -lpthread"),
        ]
        apply_regex_substitutions(makefile_inc, regex_subs)

        # change to src dir for building
        change_dir(srcdir)
Пример #2
0
    def configure_step(self, cmd_prefix=''):
        """
        Configure with Meson.
        """
        # make sure both Meson and Ninja are included as build dependencies
        build_dep_names = [d['name'] for d in self.cfg.builddependencies()]
        for tool in ['Ninja', 'Meson']:
            if tool not in build_dep_names:
                raise EasyBuildError("%s not included as build dependency", tool)
            cmd = tool.lower()
            if not which(cmd):
                raise EasyBuildError("'%s' command not found", cmd)

        if self.cfg.get('separate_build_dir', True):
            builddir = os.path.join(self.builddir, 'easybuild_obj')
            mkdir(builddir)
            change_dir(builddir)

        # Make sure libdir doesn't get set to lib/x86_64-linux-gnu or something
        # on Debian/Ubuntu multiarch systems and others.
        no_Dlibdir = '-Dlibdir' not in self.cfg['configopts']
        no_libdir = '--libdir' not in self.cfg['configopts']
        if no_Dlibdir and no_libdir:
            self.cfg.update('configopts', '-Dlibdir=lib')

        cmd = "%(preconfigopts)s meson --prefix %(installdir)s %(configopts)s %(sourcedir)s" % {
            'configopts': self.cfg['configopts'],
            'installdir': self.installdir,
            'preconfigopts': self.cfg['preconfigopts'],
            'sourcedir': self.start_dir,
        }
        (out, _) = run_cmd(cmd, log_all=True, simple=False)
        return out
Пример #3
0
    def install_step(self):
        """
        Custom install procedure for OCaml.
        First install OCaml using 'make install', then install OPAM (if sources are provided).
        """
        super(EB_OCaml, self).install_step()

        fake_mod_data = self.load_fake_module(purge=True)

        try:
            all_dirs = os.listdir(self.builddir)
        except OSError as err:
            raise EasyBuildError("Failed to check contents of %s: %s", self.builddir, err)

        opam_dirs = [d for d in all_dirs if d.startswith('opam')]
        if len(opam_dirs) == 1:
            opam_dir = os.path.join(self.builddir, opam_dirs[0])
            self.log.info("Found unpacked OPAM sources at %s, so installing it.", opam_dir)
            self.with_opam = True
            change_dir(opam_dir)

            run_cmd("./configure --prefix=%s" % self.installdir)
            run_cmd("make lib-ext")  # locally build/install required dependencies
            run_cmd("make")
            run_cmd("make install")

            opam_init_cmd = mk_opam_init_cmd(root=os.path.join(self.installdir, OPAM_SUBDIR))
            run_cmd(opam_init_cmd)
        else:
            self.log.warning("OPAM sources not found in %s: %s", self.builddir, all_dirs)

        self.clean_up_fake_module(fake_mod_data)
    def run(self):
        """Perform Octave package installation (as extension)."""

        # if patches are specified, we need to unpack the source tarball, apply the patch,
        # and create a temporary tarball to use for installation
        if self.patches:
            # call out to ExtensionEasyBlock to unpack & apply patches
            super(OctavePackage, self).run(unpack_src=True)

            # create temporary tarball from unpacked & patched source
            src = os.path.join(tempfile.gettempdir(), '%s-%s-patched.tar.gz' % (self.name, self.version))
            cwd = change_dir(os.path.dirname(self.ext_dir))
            run_cmd("tar cfvz %s %s" % (src, os.path.basename(self.ext_dir)))
            change_dir(cwd)
        else:
            src = self.src

        # need to specify two install locations, to avoid that $HOME/octave is abused;
        # one general package installation prefix, one for architecture-dependent files
        pkg_prefix = os.path.join(self.installdir, 'share', 'octave', 'packages')
        pkg_arch_dep_prefix = pkg_prefix + '-arch-dep'
        octave_cmd = "pkg prefix %s %s; " % (pkg_prefix, pkg_arch_dep_prefix)

        octave_cmd += "pkg install -global %s" % src

        run_cmd("octave --eval '%s'" % octave_cmd)
Пример #5
0
    def configure_step(self):
        """Custom configuration procedure for Doris."""
        fftw = get_software_root('FFTW')
        if fftw is None:
            raise EasyBuildError("Required dependency FFTW is missing")

        # create installation directory (and /bin subdirectory) early, make sure it doesn't get removed later
        self.make_installdir()
        mkdir(os.path.join(self.installdir, 'bin'))
        self.cfg['keeppreviousinstall'] = True

        # configure/build/install should be done from 'src' subdirectory
        change_dir(os.path.join(self.cfg['start_dir'], 'src'))

        qa = {
            "===> Press enter to continue.": '',
            "===> What is your C++ compiler? [g++]": os.getenv('CXX'),
            "===> Do you have the FFTW library (y/n)? [n]": 'y',
            "===> What is the path to the FFTW library (libfftw3f.a or libfftw3f.so)? []": os.path.join(fftw, 'lib'),
            "===> What is the path to the FFTW include file (fftw3.h)? []": os.path.join(fftw, 'include'),
            "===> Do you have the VECLIB library (y/n)? [n]": 'n',
            "===> Do you have the LAPACK library (y/n)? [n]": 'y',
            "===> What is the path to the LAPACK library liblapack.a? []": os.getenv('LAPACK_LIB_DIR'),
            "===> Are you working on a Little Endian (X86 PC, Intel) machine (y/n)? [y]": 'y',
            "===> Installation of Doris in directory: /usr/local/bin (y/n)? [y]": 'n',
            "===> Enter installation directory (use absolute path):": os.path.join(self.installdir, 'bin'),
            "===> Press enter to continue (CTRL-C to exit).": '',
        }
        std_qa = {
            "===> Do you want to compile a more verbose DEBUG version \(y/n\)\? \[n\](.|\n)*expected results\)": 'n',
        }

        run_cmd_qa('./configure', qa, std_qa=std_qa, log_all=True, simple=True)
Пример #6
0
    def install_step(self):
        """Custom install procedure for TINKER."""

        change_dir(os.path.join(self.cfg['start_dir'], 'source'))

        mkdir(os.path.join(self.cfg['start_dir'], 'bin'))
        run_cmd(os.path.join(self.cfg['start_dir'], self.build_subdir, 'rename.make'))
Пример #7
0
    def run_clang_tests(self, obj_dir):
        """Run Clang tests in specified directory (unless disabled)."""
        if not self.cfg['skip_all_tests']:
            change_dir(obj_dir)

            self.log.info("Running tests")
            run_cmd("make %s check-all" % self.make_parallel_opts, log_all=True)
Пример #8
0
    def install_step(self):
        """Install stage 3 binaries."""

        if self.cfg['bootstrap']:
            change_dir(self.llvm_obj_dir_stage3)
        else:
            change_dir(self.llvm_obj_dir_stage1)
        super(EB_Clang, self).install_step()

        # the static analyzer is not installed by default
        # we do it by hand
        if self.cfg['static_analyzer'] and LooseVersion(self.version) < LooseVersion('3.8'):
            try:
                tools_src_dir = os.path.join(self.llvm_src_dir, 'tools', 'clang', 'tools')
                analyzer_target_dir = os.path.join(self.installdir, 'libexec', 'clang-analyzer')
                bindir = os.path.join(self.installdir, 'bin')
                for scan_dir in ['scan-build', 'scan-view']:
                    shutil.copytree(os.path.join(tools_src_dir, scan_dir), os.path.join(analyzer_target_dir, scan_dir))
                    os.symlink(os.path.relpath(bindir, os.path.join(analyzer_target_dir, scan_dir)),
                               os.path.join(analyzer_target_dir, scan_dir, 'bin'))
                    os.symlink(os.path.relpath(os.path.join(analyzer_target_dir, scan_dir, scan_dir), bindir),
                               os.path.join(bindir, scan_dir))

                mandir = os.path.join(self.installdir, 'share', 'man', 'man1')
                os.makedirs(mandir)
                shutil.copy2(os.path.join(tools_src_dir, 'scan-build', 'scan-build.1'), mandir)
            except OSError, err:
                raise EasyBuildError("Failed to copy static analyzer dirs to install dir: %s", err)
Пример #9
0
    def install_step(self):
        """MATLAB install procedure using 'install' command."""

        src = os.path.join(self.cfg['start_dir'], 'install')

        # make sure install script is executable
        adjust_permissions(src, stat.S_IXUSR)

        if LooseVersion(self.version) >= LooseVersion('2016b'):
            jdir = os.path.join(self.cfg['start_dir'], 'sys', 'java', 'jre', 'glnxa64', 'jre', 'bin')
            for perm_dir in [os.path.join(self.cfg['start_dir'], 'bin', 'glnxa64'), jdir]:
                adjust_permissions(perm_dir, stat.S_IXUSR)

        # make sure $DISPLAY is not defined, which may lead to (hard to trace) problems
        # this is a workaround for not being able to specify --nodisplay to the install scripts
        if 'DISPLAY' in os.environ:
            os.environ.pop('DISPLAY')

        if not '_JAVA_OPTIONS' in self.cfg['preinstallopts']:
            self.cfg['preinstallopts'] = ('export _JAVA_OPTIONS="%s" && ' % self.cfg['java_options']) + self.cfg['preinstallopts']
        if LooseVersion(self.version) >= LooseVersion('2016b'):
            change_dir(self.builddir)

        cmd = "%s %s -v -inputFile %s %s" % (self.cfg['preinstallopts'], src, self.configfile, self.cfg['installopts'])
        run_cmd(cmd, log_all=True, simple=True)
    def configure_step(self, cmd_prefix=''):
        """
        Configure with Meson.
        """
        # make sure both Meson and Ninja are included as build dependencies
        build_dep_names = [d['name'] for d in self.cfg.builddependencies()]
        for tool in ['Ninja', 'Meson']:
            if tool not in build_dep_names:
                raise EasyBuildError("%s not included as build dependency", tool)
            cmd = tool.lower()
            if not which(cmd):
                raise EasyBuildError("'%s' command not found", cmd)

        if self.cfg.get('separate_build_dir', True):
            builddir = os.path.join(self.builddir, 'easybuild_obj')
            mkdir(builddir)
            change_dir(builddir)

        cmd = "%(preconfigopts)s meson --prefix %(installdir)s %(configopts)s %(sourcedir)s" % {
            'configopts': self.cfg['configopts'],
            'installdir': self.installdir,
            'preconfigopts': self.cfg['preconfigopts'],
            'sourcedir': self.start_dir,
        }
        (out, _) = run_cmd(cmd, log_all=True, simple=False)
        return out
Пример #11
0
    def test_import_available_modules(self):
        """Test for import_available_modules function."""

        res = import_available_modules('easybuild.tools.repository')
        self.assertEqual(len(res), 5)
        # don't check all, since some required specific Python packages to be installed...
        self.assertTrue(easybuild.tools.repository.filerepo in res)

        # replicate situation where import_available_modules failed when running in directory where modules are located
        # cfr. https://github.com/easybuilders/easybuild-framework/issues/2659
        #      and https://github.com/easybuilders/easybuild-framework/issues/2742
        test123 = os.path.join(self.test_prefix, 'test123')
        mkdir(test123)
        write_file(os.path.join(test123, '__init__.py'), '')
        write_file(os.path.join(test123, 'one.py'), '')
        write_file(os.path.join(test123, 'two.py'), '')
        write_file(os.path.join(test123, 'three.py'), '')

        change_dir(self.test_prefix)
        res = import_available_modules('test123')

        import test123.one
        import test123.two
        import test123.three
        self.assertEqual([test123.one, test123.three, test123.two], res)
Пример #12
0
    def install_step(self):
        """Custom install procedure for RepeatMasker."""
        super(EB_RepeatMasker, self).install_step()

        # check for required dependencies
        perl_root = get_software_root('Perl')
        if perl_root:
            perl = os.path.join(perl_root, 'bin', 'perl')
        else:
            raise EasyBuildError("Missing required dependency: Perl")

        trf_root = get_software_root('TRF')
        if trf_root:
            trf = os.path.join(trf_root, 'trf')
        else:
            raise EasyBuildError("Missing required dependency: TRF")

        # determine which search engine to use
        # see also http://www.repeatmasker.org/RMDownload.html
        cand_search_engines = ['CrossMatch', 'RMBlast', 'WUBlast', 'HMMER']
        search_engine = None
        for dep in cand_search_engines:
            if get_software_root(dep):
                if search_engine is None:
                    search_engine = dep
                else:
                    raise EasyBuildError("Found multiple candidate search engines: %s and %s", search_engine, dep)

        if search_engine is None:
            raise EasyBuildError("No search engine found, one of these must be included as dependency: %s",
                                 ' '.join(cand_search_engines))

        change_dir(self.installdir)

        patch_perl_script_autoflush('configure')

        search_engine_map = {
            'CrossMatch': '1',
            'RMBlast': '2',
            'WUBlast': '3',
            'HMMER': '4',
        }
        search_engine_bindir = os.path.join(get_software_root(search_engine), 'bin')

        cmd = "perl ./configure"
        qa = {
            '<PRESS ENTER TO CONTINUE>': '',
            # select search engine
            'Enter Selection:': search_engine_map[search_engine],
        }
        std_qa = {
            r'\*\*PERL PROGRAM\*\*\n([^*]*\n)+Enter path.*': perl,
            r'\*\*REPEATMASKER INSTALLATION DIRECTORY\*\*\n([^*]*\n)+Enter path.*': self.installdir,
            r'\*\*TRF PROGRAM\*\*\n([^*]*\n)+Enter path.*': trf,
            # search engine installation path (location of /bin subdirectory)
            # also enter 'Y' to confirm + '5' ("Done") to complete selection process for search engine
            r'\*\*.* INSTALLATION PATH\*\*\n([^*]*\n)+Enter path.*': search_engine_bindir + '\nY\n5',
        }
        run_cmd_qa(cmd, qa, std_qa=std_qa, log_all=True, simple=True, log_ok=True)
Пример #13
0
    def build_step(self):
        """Custom build procedure for TINKER."""

        change_dir(os.path.join(self.cfg['start_dir'], 'source'))

        run_cmd(os.path.join(self.cfg['start_dir'], self.build_subdir, 'compile.make'))
        run_cmd(os.path.join(self.cfg['start_dir'], self.build_subdir, 'library.make'))
        run_cmd(os.path.join(self.cfg['start_dir'], self.build_subdir, 'link.make'))
Пример #14
0
    def install_step(self):
        """MATLAB install procedure using 'install' command."""

        src = os.path.join(self.cfg['start_dir'], 'install')

        # make sure install script is executable
        adjust_permissions(src, stat.S_IXUSR)

        if LooseVersion(self.version) >= LooseVersion('2016b'):
            jdir = os.path.join(self.cfg['start_dir'], 'sys', 'java', 'jre', 'glnxa64', 'jre', 'bin')
            for perm_dir in [os.path.join(self.cfg['start_dir'], 'bin', 'glnxa64'), jdir]:
                adjust_permissions(perm_dir, stat.S_IXUSR)

        # make sure $DISPLAY is not defined, which may lead to (hard to trace) problems
        # this is a workaround for not being able to specify --nodisplay to the install scripts
        if 'DISPLAY' in os.environ:
            os.environ.pop('DISPLAY')

        if '_JAVA_OPTIONS' not in self.cfg['preinstallopts']:
            java_opts = 'export _JAVA_OPTIONS="%s" && ' % self.cfg['java_options']
            self.cfg['preinstallopts'] = java_opts + self.cfg['preinstallopts']
        if LooseVersion(self.version) >= LooseVersion('2016b'):
            change_dir(self.builddir)

        # MATLAB installer ignores TMPDIR (always uses /tmp) and might need a large tmpdir
        tmpdir = "-tmpdir %s" % tempfile.mkdtemp()

        keys = self.cfg['key']
        if keys is None:
            keys = os.getenv('EB_MATLAB_KEY', '00000-00000-00000-00000-00000-00000-00000-00000-00000-00000')
        if isinstance(keys, basestring):
            keys = keys.split(',')

        # Make one install for each key
        for key in keys:
            cmd = ' '.join([
                self.cfg['preinstallopts'],
                src,
                '-v',
                tmpdir,
                '-inputFile',
                self.configfile,
                '-fileInstallationKey',
                key,
                self.cfg['installopts'],
            ])
            (out, _) = run_cmd(cmd, log_all=True, simple=False)

            # check installer output for known signs of trouble
            patterns = [
                "Error: You have entered an invalid File Installation Key",
            ]
            for pattern in patterns:
                regex = re.compile(pattern, re.I)
                if regex.search(out):
                    raise EasyBuildError("Found error pattern '%s' in output of installation command '%s': %s",
                                         regex.pattern, cmd, out)
Пример #15
0
    def extract_step(self):
        """Custom extract step for NAMD, we need to extract charm++ so we can patch it."""
        super(EB_NAMD, self).extract_step()

        change_dir(self.src[0]['finalpath'])
        self.charm_tarballs = glob.glob('charm-*.tar')
        if len(self.charm_tarballs) != 1:
            raise EasyBuildError("Expected to find exactly one tarball for Charm++, found: %s", self.charm_tarballs)

        extract_file(self.charm_tarballs[0], os.getcwd())
Пример #16
0
 def find_build_subdir(pattern):
     """Changes to the sub directory that matches the given pattern"""
     subdir = glob.glob(os.path.join(self.builddir, pattern))
     if subdir:
         change_dir(subdir[0])
         apply_regex_substitutions('makefile', makefiles_fixes)
         super(EB_BWISE, self).build_step()
         return subdir[0]
     else:
         raise EasyBuildError("Could not find a subdirectory matching the pattern %s", pattern)
Пример #17
0
    def test_step(self):
        """Custom built-in test procedure for WRF-Fire."""
        if self.cfg['runtest']:
            change_dir(os.path.join(self.cfg['start_dir'], 'WRFV3', 'test', 'em_fire', 'hill'))

            if self.cfg['buildtype'] in ['dmpar', 'smpar', 'dm+sm']:
                test_cmd = "ulimit -s unlimited && %s && %s" % (self.toolchain.mpi_cmd_for("./ideal.exe", 1),
                                                                self.toolchain.mpi_cmd_for("./wrf.exe", 2))
            else:
                test_cmd = "ulimit -s unlimited && ./ideal.exe && ./wrf.exe"
            run_cmd(test_cmd, simple=True, log_all=True, log_ok=True)
Пример #18
0
    def extract_step(self):
        """Unpack the source"""
        if LooseVersion(self.version) < LooseVersion('1.7'):

            copy_file(self.src[0]['path'], self.builddir)
            adjust_permissions(os.path.join(self.builddir, self.src[0]['name']), stat.S_IXUSR, add=True)

            change_dir(self.builddir)
            run_cmd(os.path.join(self.builddir, self.src[0]['name']), log_all=True, simple=True, inp='')
        else:
            PackedBinary.extract_step(self)
Пример #19
0
    def test_changed_files_pull_request(self):
        """Specific checks only done for the (easyconfig) files that were changed in a pull request."""

        # $TRAVIS_PULL_REQUEST should be a PR number, otherwise we're not running tests for a PR
        if re.match('^[0-9]+$', os.environ.get('TRAVIS_PULL_REQUEST', '(none)')):

            # target branch should be anything other than 'master';
            # usually is 'develop', but could also be a release branch like '3.7.x'
            travis_branch = os.environ.get('TRAVIS_BRANCH', None)
            if travis_branch and travis_branch != 'master':

                if not self.parsed_easyconfigs:
                    self.process_all_easyconfigs()

                # relocate to top-level directory of repository to run 'git diff' command
                top_dir = os.path.dirname(os.path.dirname(get_paths_for('easyconfigs')[0]))
                cwd = change_dir(top_dir)

                # get list of changed easyconfigs
                cmd = "git diff --name-only --diff-filter=AM %s...HEAD" % travis_branch
                out, ec = run_cmd(cmd, simple=False)
                changed_ecs_filenames = [os.path.basename(f) for f in out.strip().split('\n') if f.endswith('.eb')]
                print("\nList of changed easyconfig files in this PR: %s" % '\n'.join(changed_ecs_filenames))

                change_dir(cwd)

                # grab parsed easyconfigs for changed easyconfig files
                changed_ecs = []
                for ec_fn in changed_ecs_filenames:
                    match = None
                    for ec in self.parsed_easyconfigs:
                        if os.path.basename(ec['spec']) == ec_fn:
                            match = ec['ec']
                            break

                    if match:
                        changed_ecs.append(match)
                    else:
                        # if no easyconfig is found, it's possible some archived easyconfigs were touched in the PR...
                        # so as a last resort, try to find the easyconfig file in __archive__
                        easyconfigs_path = get_paths_for("easyconfigs")[0]
                        specs = glob.glob('%s/__archive__/*/*/%s' % (easyconfigs_path, ec_fn))
                        if len(specs) == 1:
                            ec = process_easyconfig(specs[0])[0]
                            changed_ecs.append(ec['ec'])
                        else:
                            error_msg = "Failed to find parsed easyconfig for %s" % ec_fn
                            error_msg += " (and could not isolate it in easyconfigs archive either)"
                            self.assertTrue(False, error_msg)

                # run checks on changed easyconfigs
                self.check_sha256_checksums(changed_ecs)
                self.check_python_packages(changed_ecs)
Пример #20
0
    def extensions_step(self):
        """Build & Install both Python and R extension"""
        # we start with the python bindings
        self.py_ext.src = os.path.join(self.mxnet_src_dir, "python")
        change_dir(self.py_ext.src)

        self.py_ext.prerun()
        self.py_ext.run(unpack_src=False)
        self.py_ext.postrun()

        # next up, the R bindings
        self.r_ext.src = os.path.join(self.mxnet_src_dir, "R-package")
        change_dir(self.r_ext.src)
        mkdir("inst")
        symlink(os.path.join(self.installdir, "lib"), os.path.join("inst", "libs"))
        symlink(os.path.join(self.installdir, "include"), os.path.join("inst", "include"))

        # MXNet doesn't provide a list of its R dependencies by default
        write_file("NAMESPACE", R_NAMESPACE)
        change_dir(self.mxnet_src_dir)
        self.r_ext.prerun()
        # MXNet is just weird. To install the R extension, we have to:
        # - First install the extension like it is
        # - Let R export the extension again. By doing this, all the dependencies get
        #   correctly filled and some mappings are done
        # - Reinstal the exported version
        self.r_ext.run()
        run_cmd("R_LIBS=%s Rscript -e \"require(mxnet); mxnet:::mxnet.export(\\\"R-package\\\")\"" % self.installdir)
        change_dir(self.r_ext.src)
        self.r_ext.run()
        self.r_ext.postrun()
Пример #21
0
    def build_step(self):
        """Custom build procedure for WRF-Fire."""

        cmd = './compile'
        if self.cfg['parallel']:
            cmd += " -j %d" % self.cfg['parallel']

        # first, build WRF part
        change_dir(os.path.join(self.cfg['start_dir'], 'WRFV3'))
        (out, ec) = run_cmd(cmd + ' em_fire', log_all=True, simple=False, log_ok=True)

        # next, build WPS part
        change_dir(os.path.join(self.cfg['start_dir'], 'WPS'))
        (out, ec) = run_cmd('./compile', log_all=True, simple=False, log_ok=True)
Пример #22
0
    def install_step(self):
        """Custom install procedure for DOLFIN: also install Python bindings."""
        super(EB_DOLFIN, self).install_step()

        if LooseVersion(self.version) >= LooseVersion('2018.1'):
            # see https://bitbucket.org/fenics-project/dolfin/issues/897/switch-from-swig-to-pybind11-for-python
            # and https://github.com/FEniCS/dolfin/blob/master/python/README.rst
            cwd = change_dir(os.path.join(self.start_dir, 'python'))

            env.setvar('CMAKE_PREFIX_PATH', self.installdir)
            env.setvar('PYBIND11_DIR', get_software_root('pybind11'))

            run_cmd("pip install --prefix %s ." % self.installdir)

            change_dir(cwd)
    def run(self, unpack_src=False):
        """Common operations for extensions: unpacking sources, patching, ..."""

        # unpack file if desired
        if unpack_src:
            targetdir = os.path.join(self.master.builddir, remove_unwanted_chars(self.name))
            self.ext_dir = extract_file("%s" % self.src, targetdir, extra_options=self.unpack_options)

            if self.start_dir and os.path.isdir(self.start_dir):
                self.log.debug("Using start_dir: %s", self.start_dir)
                change_dir(self.start_dir)

        # patch if needed
        if self.patches:
            for patchfile in self.patches:
                if not apply_patch(patchfile, self.ext_dir):
                    raise EasyBuildError("Applying patch %s failed", patchfile)
Пример #24
0
    def build_step(self):
        """Build Clang stage 1, 2, 3"""

        # Stage 1: build using system compiler.
        self.log.info("Building stage 1")
        change_dir(self.llvm_obj_dir_stage1)
        super(EB_Clang, self).build_step()

        if self.cfg['bootstrap']:
            # Stage 1: run tests.
            self.run_clang_tests(self.llvm_obj_dir_stage1)

            self.log.info("Building stage 2")
            self.build_with_prev_stage(self.llvm_obj_dir_stage1, self.llvm_obj_dir_stage2)
            self.run_clang_tests(self.llvm_obj_dir_stage2)

            self.log.info("Building stage 3")
            self.build_with_prev_stage(self.llvm_obj_dir_stage2, self.llvm_obj_dir_stage3)
Пример #25
0
    def make_module_extra_numpy_include(self):
        """
        Return update statements for $CPATH specifically for numpy
        """
        numpy_core_subdir = os.path.join('numpy', 'core')
        numpy_core_dirs = []
        cwd = change_dir(self.installdir)
        for pylibdir in self.all_pylibdirs:
            numpy_core_dirs.extend(glob.glob(os.path.join(pylibdir, numpy_core_subdir)))
            numpy_core_dirs.extend(glob.glob(os.path.join(pylibdir, 'numpy*.egg', numpy_core_subdir)))
        change_dir(cwd)

        txt = ''
        for numpy_core_dir in numpy_core_dirs:
            txt += self.module_generator.prepend_paths('CPATH', os.path.join(numpy_core_dir, 'include'))
            for lib_env_var in ('LD_LIBRARY_PATH', 'LIBRARY_PATH'):
                txt += self.module_generator.prepend_paths(lib_env_var, os.path.join(numpy_core_dir, 'lib'))

        return txt
Пример #26
0
    def install_step(self):
        """Custom install procedure for Stata."""
      
        change_dir(self.installdir)

        cmd = os.path.join(self.cfg['start_dir'], 'install')
        std_qa = {
            "Do you wish to continue\?\s*\(y/n or q to quit\)": 'y',
            "Are you sure you want to install into .*\?\s*\(y/n or q\)": 'y',
            "Okay to proceed\s*\(y/n or q to quit\)": 'y',
        }
        no_qa = [
            "About to proceed with installation:",
            "uncompressing files",
            "extracting files",
            "setting permissions",
        ]
        run_cmd_qa(cmd, {}, no_qa=no_qa, std_qa=std_qa, log_all=True, simple=True)

        print_msg("Note: you need to manually run ./stinit in %s to initialise the license for Stata!" % self.installdir)
Пример #27
0
    def build_with_prev_stage(self, prev_obj, next_obj):
        """Build Clang stage N using Clang stage N-1"""

        # Create and enter build directory.
        mkdir(next_obj)
        change_dir(next_obj)

        # Configure.
        CC = os.path.join(prev_obj, 'bin', 'clang')
        CXX = os.path.join(prev_obj, 'bin', 'clang++')

        options = "-DCMAKE_INSTALL_PREFIX=%s " % self.installdir
        options += "-DCMAKE_C_COMPILER='%s' " % CC
        options += "-DCMAKE_CXX_COMPILER='%s' " % CXX
        options += self.cfg['configopts']

        self.log.info("Configuring")
        run_cmd("cmake %s %s" % (options, self.llvm_src_dir), log_all=True)

        self.log.info("Building")
        run_cmd("make %s" % self.make_parallel_opts, log_all=True)
Пример #28
0
    def build_step(self):
        """Custom build procedure for Doris."""
        common_buildopts = self.cfg['buildopts']

        # build Doris
        change_dir(os.path.join(self.cfg['start_dir'], 'src'))

        # override some of the settings via options to 'make'
        lflags = "-L%s -lfftw3 " % os.path.join(get_software_root('FFTW'), 'lib')
        lflags += "-L%s %s" % (os.getenv('LAPACK_LIB_DIR'), os.getenv('LIBLAPACK_MT'))
        self.cfg.update('buildopts', 'LFLAGS="%s"' % lflags)
        self.cfg.update('buildopts', 'CFLAGSOPT="%s \$(DEFS)"' % os.getenv('CXXFLAGS'))

        super(EB_Doris, self).build_step()

        # build SARtools
        change_dir(os.path.join(self.cfg['start_dir'], 'SARtools'))

        self.cfg['buildopts'] = common_buildopts
        self.cfg.update('buildopts', 'CC="%s"' % os.getenv('CXX'))
        cflags = os.getenv('CXXFLAGS') + " -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -D_LARGEFILE64_SOURCE"
        self.cfg.update('buildopts', 'CFLAGS="%s"' % cflags)

        super(EB_Doris, self).build_step()

        # build ENVISAT_TOOLS
        change_dir(os.path.join(self.cfg['start_dir'], 'ENVISAT_TOOLS'))

        self.cfg['buildopts'] = common_buildopts
        self.cfg.update('buildopts', 'CC="%s"' % os.getenv('CC'))
        self.cfg.update('buildopts', 'CFLAGS="%s"' % os.getenv('CFLAGS'))

        super(EB_Doris, self).build_step()
Пример #29
0
    def test_step(self):
        """Custom built-in test procedure for TINKER."""
        if self.cfg['runtest']:
            # copy tests, params and built binaries to temporary directory for testing
            tmpdir = tempfile.mkdtemp()
            testdir = os.path.join(tmpdir, 'test')

            mkdir(os.path.join(tmpdir, 'bin'))
            binaries = glob.glob(os.path.join(self.cfg['start_dir'], 'source', '*.x'))
            for binary in binaries:
                copy_file(binary, os.path.join(tmpdir, 'bin', os.path.basename(binary)[:-2]))
            copy_dir(os.path.join(self.cfg['start_dir'], 'test'), testdir)
            copy_dir(os.path.join(self.cfg['start_dir'], 'params'), os.path.join(tmpdir, 'params'))

            change_dir(testdir)

            # run all tests via the provided 'run' scripts
            tests = glob.glob(os.path.join(testdir, '*.run'))
            # gpcr takes too logn (~1h), ifabp fails due to input issues (?)
            tests = [t for t in tests if not (t.endswith('gpcr.run') or t.endswith('ifabp.run'))]
            for test in tests:
                run_cmd(test)
Пример #30
0
    def install_step(self):
        """MATLAB install procedure using 'install' command."""

        src = os.path.join(self.cfg['start_dir'], 'install')

        # make sure install script is executable
        adjust_permissions(src, stat.S_IXUSR)

        if LooseVersion(self.version) >= LooseVersion('2016b'):
            jdir = os.path.join(self.cfg['start_dir'], 'sys', 'java', 'jre', 'glnxa64', 'jre', 'bin')
            for perm_dir in [os.path.join(self.cfg['start_dir'], 'bin', 'glnxa64'), jdir]:
                adjust_permissions(perm_dir, stat.S_IXUSR)

        # make sure $DISPLAY is not defined, which may lead to (hard to trace) problems
        # this is a workaround for not being able to specify --nodisplay to the install scripts
        if 'DISPLAY' in os.environ:
            os.environ.pop('DISPLAY')

        if '_JAVA_OPTIONS' not in self.cfg['preinstallopts']:
            java_opts = 'export _JAVA_OPTIONS="%s" && ' % self.cfg['java_options']
            self.cfg['preinstallopts'] = java_opts + self.cfg['preinstallopts']
        if LooseVersion(self.version) >= LooseVersion('2016b'):
            change_dir(self.builddir)

        cmd = "%s %s -v -inputFile %s %s" % (self.cfg['preinstallopts'], src, self.configfile, self.cfg['installopts'])
        (out, _) = run_cmd(cmd, log_all=True, simple=False)

        # check installer output for known signs of trouble
        patterns = [
            "Error: You have entered an invalid File Installation Key",
        ]
        for pattern in patterns:
            regex = re.compile(pattern, re.I)
            if regex.search(out):
                raise EasyBuildError("Found error pattern '%s' in output of installation command '%s': %s",
                                     regex.pattern, cmd, out)
Пример #31
0
    def configure_step(self):
        """
        FreeFem++ configure should run twice.
        First to configure PETSc (and then build it),
        then to configure FreeFem++ with the built PETSc.
        """

        # first Autoreconf has to be run
        if not get_software_root('Autoconf'):
            raise EasyBuildError("Autoconf is required to build FreeFem++. Please add it as build dependency")

        run_cmd("autoreconf -i", log_all=True, simple=False)

        configopts = [
            '--disable-optim',  # disable custom optimizations (not needed, $CFLAGS set by EasyBuild is picked up)
            '--enable-download',  # enable downloading of dependencies
            '--disable-openblas',  # do not download OpenBLAS
        ]

        blas_family = self.toolchain.blas_family()
        if blas_family == toolchain.OPENBLAS:
            openblas_root = get_software_root('OpenBLAS')
            configopts.append('--with-blas=%s' % os.path.join(openblas_root, 'lib'))
        elif blas_family == toolchain.INTELMKL:
            mkl_root = get_software_root('imkl')
            configopts.append("--with-mkl=%s" % os.path.join(mkl_root, 'mkl', 'lib', 'intel64'))

        # specify which MPI to build with based on MPI component of toolchain
        if self.toolchain.mpi_family() in toolchain.OPENMPI:
            self.cfg.update('configopts', '--with-mpi=openmpi')
        elif self.toolchain.mpi_family() in toolchain.INTELMPI:
            self.cfg.update('configopts', '--with-mpi=mpic++')

        # if usempi is enabled, configure with --with-mpi=yes
        elif self.toolchain.options.get('usempi', None):
            self.cfg.update('configopts', '--with-mpi=yes')
        else:
            self.cfg.update('configopts', '--with-mpi=no')

        # check dependencies and add the corresponding configure options for FreeFEM
        hdf5_root = get_software_root('HDF5')
        if hdf5_root:
            configopts.append('--with-hdf5=%s ' % os.path.join(hdf5_root, 'bin', 'h5pcc'))

        gsl_root = get_software_root('GSL')
        if gsl_root:
            configopts.append('--with-gsl-prefix=%s' % gsl_root)

        petsc_root = get_software_root('PETSc')
        if petsc_root:
            configopts.append('--with-petsc=%s' % os.path.join(petsc_root, 'lib', 'petsc', 'conf', 'petscvariables'))

        bemtool_root = get_software_root('BemTool')
        if bemtool_root:
            configopts.append('--with-bem-include=%s' % bemtool_root)

        for configopt in configopts:
            self.cfg.update('configopts', configopt)

        # initial configuration
        out = super(EB_FreeFEM, self).configure_step()

        regex = re.compile("WARNING: unrecognized options: (.*)", re.M)
        res = regex.search(out)
        if res:
            raise EasyBuildError("One or more configure options not recognized: %s" % res.group(1))

        if not petsc_root:
            # re-create installation dir (deletes old installation),
            # then set keeppreviousinstall to True (to avoid deleting PETSc installation)
            self.make_installdir()
            self.cfg['keeppreviousinstall'] = True

            # configure and make petsc-slepc
            # download & build PETSc as recommended by FreeFEM if no PETSc dependency was provided
            cwd = change_dir(os.path.join('3rdparty', 'ff-petsc'))

            cmd = ['make']
            if self.cfg['parallel']:
                cmd.append('-j %s' % self.cfg['parallel'])
            cmd.append('petsc-slepc')

            run_cmd(' '.join(cmd), log_all=True, simple=False)

            change_dir(cwd)

            # reconfigure for FreeFEM build
            super(EB_FreeFEM, self).configure_step()
Пример #32
0
def template_module_only_test(self,
                              easyblock,
                              name,
                              version='1.3.2',
                              extra_txt='',
                              tmpdir=None):
    """Test whether all easyblocks are compatible with --module-only."""

    if tmpdir is None:
        tmpdir = tempfile.mkdtemp()

    class_regex = re.compile(r"^class (.*)\(.*", re.M)

    self.log.debug("easyblock: %s" % easyblock)

    # read easyblock Python module
    f = open(easyblock, "r")
    txt = f.read()
    f.close()

    # obtain easyblock class name using regex
    res = class_regex.search(txt)
    if res:
        ebname = res.group(1)
        self.log.debug("Found class name for easyblock %s: %s" %
                       (easyblock, ebname))

        toolchain = None

        # figure out list of mandatory variables, and define with dummy values as necessary
        app_class = get_easyblock_class(ebname)

        # easyblocks deriving from IntelBase require a license file to be found for --module-only
        bases = list(app_class.__bases__)
        for base in copy.copy(bases):
            bases.extend(base.__bases__)
        if app_class == IntelBase or IntelBase in bases:
            os.environ['INTEL_LICENSE_FILE'] = os.path.join(
                tmpdir, 'intel.lic')
            write_file(os.environ['INTEL_LICENSE_FILE'], '# dummy license')

        elif app_class == EB_IMOD:
            # $JAVA_HOME must be set for IMOD
            os.environ['JAVA_HOME'] = tmpdir

        elif app_class == PythonBundle:
            # $EBROOTPYTHON must be set for PythonBundle easyblock
            os.environ[
                'EBROOTPYTHON'] = '/fake/install/prefix/Python/2.7.14-foss-2018a'

        elif app_class == GoPackage:
            # $EBROOTGO must be set for GoPackage easyblock
            os.environ['EBROOTGO'] = '/fake/install/prefix/Go/1.14'
            os.environ['EBVERSIONGO'] = '1.14'

        elif app_class == EB_OpenFOAM:
            # proper toolchain must be used for OpenFOAM(-Extend), to determine value to set for $WM_COMPILER
            write_file(
                os.path.join(tmpdir, 'GCC', '4.9.3-2.25'), '\n'.join([
                    '#%Module',
                    'setenv EBROOTGCC %s' % tmpdir,
                    'setenv EBVERSIONGCC 4.9.3',
                ]))
            write_file(
                os.path.join(tmpdir, 'OpenMPI', '1.10.2-GCC-4.9.3-2.25'),
                '\n'.join([
                    '#%Module',
                    'setenv EBROOTOPENMPI %s' % tmpdir,
                    'setenv EBVERSIONOPENMPI 1.10.2',
                ]))
            write_file(
                os.path.join(tmpdir, 'gompi', '2016a'), '\n'.join([
                    '#%Module',
                    'module load GCC/4.9.3-2.25',
                    'module load OpenMPI/1.10.2-GCC-4.9.3-2.25',
                ]))
            os.environ['MODULEPATH'] = tmpdir
            toolchain = {'name': 'gompi', 'version': '2016a'}

        # extend easyconfig to make sure mandatory custom easyconfig paramters are defined
        extra_options = app_class.extra_options()
        for (key, val) in extra_options.items():
            if val[2] == MANDATORY:
                # use default value if any is set, otherwise use "foo"
                if val[0]:
                    test_param = val[0]
                else:
                    test_param = 'foo'
                extra_txt += '%s = "%s"\n' % (key, test_param)

        # write easyconfig file
        self.writeEC(ebname,
                     name=name,
                     version=version,
                     extratxt=extra_txt,
                     toolchain=toolchain)

        # take into account that for some easyblock, particular dependencies are hard required early on
        # (in prepare_step for exampel);
        # we just set the corresponding $EBROOT* environment variables here to fool it...
        req_deps = {
            # QScintilla easyblock requires that either PyQt or PyQt5 are available as dependency
            # (PyQt is easier, since PyQt5 is only supported for sufficiently recent QScintilla versions)
            'qscintilla.py': [('PyQt', '4.12')],
            # MotionCor2 and Gctf easyblock requires CUDA as dependency
            'motioncor2.py': [('CUDA', '10.1.105')],
            'gctf.py': [('CUDA', '10.1.105')],
        }
        easyblock_fn = os.path.basename(easyblock)
        for (dep_name, dep_version) in req_deps.get(easyblock_fn, []):
            dep_root_envvar = get_software_root_env_var_name(dep_name)
            os.environ[dep_root_envvar] = '/value/should/not/matter'
            dep_version_envvar = get_software_version_env_var_name(dep_name)
            os.environ[dep_version_envvar] = dep_version

        # initialize easyblock
        # if this doesn't fail, the test succeeds
        app = app_class(EasyConfig(self.eb_file))

        assert app.installdir.startswith(TMPDIR)  # Just to be sure...
        mkdir(app.installdir, parents=True)  # Pretend this exists

        # run all steps, most should be skipped
        orig_workdir = os.getcwd()
        try:
            app.run_all_steps(run_test_cases=False)
        finally:
            change_dir(orig_workdir)

        if os.path.basename(easyblock) == 'modulerc.py':
            # .modulerc must be cleaned up to avoid causing trouble (e.g. "Duplicate version symbol" errors)
            modulerc = os.path.join(TMPDIR, 'modules', 'all', name,
                                    '.modulerc')
            if os.path.exists(modulerc):
                remove_file(modulerc)

            modulerc += '.lua'
            if os.path.exists(modulerc):
                remove_file(modulerc)
        else:
            modfile = os.path.join(TMPDIR, 'modules', 'all', name, version)
            if toolchain:
                modfile = '-'.join(
                    [modfile, toolchain['name'], toolchain['version']])
            luamodfile = '%s.lua' % modfile
            self.assertTrue(
                os.path.exists(modfile) or os.path.exists(luamodfile),
                "Module file %s or %s was generated" % (modfile, luamodfile))

            if os.path.exists(modfile):
                modtxt = read_file(modfile)
            else:
                modtxt = read_file(luamodfile)

            none_regex = re.compile('None')
            self.assertFalse(none_regex.search(modtxt),
                             "None not found in module file: %s" % modtxt)

        # cleanup
        app.close_log()
        remove_file(app.logfile)
        remove_dir(tmpdir)
    else:
        self.assertTrue(False, "Class found in easyblock %s" % easyblock)
Пример #33
0
    def test_step(self):
        """Build and run tests included in the WRF distribution."""
        if self.cfg['runtest']:

            if self.cfg[
                    'buildtype'] in self.parallel_build_types and not build_option(
                        'mpi_tests'):
                self.log.info(
                    "Skipping testing of WRF with build type '%s' since MPI testing is disabled",
                    self.cfg['buildtype'])
                return

            # get list of WRF test cases
            self.testcases = []
            if os.path.exists('test'):
                self.testcases = os.listdir('test')

            elif not self.dry_run:
                raise EasyBuildError(
                    "Test directory not found, failed to determine list of test cases"
                )

            # exclude 2d testcases in parallel WRF builds
            if self.cfg['buildtype'] in self.parallel_build_types:
                self.testcases = [
                    test for test in self.testcases if '2d_' not in test
                ]

            # exclude real testcases
            self.testcases = [
                test for test in self.testcases if not test.endswith("_real")
            ]

            self.log.debug("intermediate list of testcases: %s" %
                           self.testcases)

            # exclude tests that should not be run
            for test in ["em_esmf_exp", "em_scm_xy", "nmm_tropical_cyclone"]:
                if test in self.testcases:
                    self.testcases.remove(test)

            # some tests hang when WRF is built with Intel compilers
            if self.comp_fam == toolchain.INTELCOMP:  # @UndefinedVariable
                for test in ["em_heldsuarez"]:
                    if test in self.testcases:
                        self.testcases.remove(test)

            # determine number of MPI ranks to use in tests (1/2 of available processors + 1);
            # we need to limit max number of MPI ranks (8 is too high for some tests, 4 is OK),
            # since otherwise run may fail because domain size is too small
            n_mpi_ranks = min(self.cfg['parallel'] / 2 + 1, 4)

            # prepare run command

            # stack limit needs to be set to unlimited for WRF to work well
            if self.cfg['buildtype'] in self.parallel_build_types:
                test_cmd = "ulimit -s unlimited && %s && %s" % (
                    self.toolchain.mpi_cmd_for("./ideal.exe", 1),
                    self.toolchain.mpi_cmd_for("./wrf.exe", n_mpi_ranks))
            else:
                test_cmd = "ulimit -s unlimited && ./ideal.exe && ./wrf.exe >rsl.error.0000 2>&1"

            # regex to check for successful test run
            re_success = re.compile("SUCCESS COMPLETE WRF")

            def run_test():
                """Run a single test and check for success."""

                # run test
                (_, ec) = run_cmd(test_cmd,
                                  log_all=False,
                                  log_ok=False,
                                  simple=False)

                # read output file
                out_fn = 'rsl.error.0000'
                if os.path.exists(out_fn):
                    out_txt = read_file(out_fn)
                else:
                    out_txt = 'FILE NOT FOUND'

                if ec == 0:
                    # exit code zero suggests success, but let's make sure...
                    if re_success.search(out_txt):
                        self.log.info(
                            "Test %s ran successfully (found '%s' in %s)",
                            test, re_success.pattern, out_fn)
                    else:
                        raise EasyBuildError(
                            "Test %s failed, pattern '%s' not found in %s: %s",
                            test, re_success.pattern, out_fn, out_txt)
                else:
                    # non-zero exit code means trouble, show command output
                    raise EasyBuildError(
                        "Test %s failed with exit code %s, output: %s", test,
                        ec, out_txt)

                # clean up stuff that gets in the way
                fn_prefs = [
                    "wrfinput_", "namelist.output", "wrfout_", "rsl.out.",
                    "rsl.error."
                ]
                for filename in os.listdir('.'):
                    for pref in fn_prefs:
                        if filename.startswith(pref):
                            remove_file(filename)
                            self.log.debug("Cleaned up file %s", filename)

            # build and run each test case individually
            for test in self.testcases:

                self.log.debug("Building and running test %s" % test)

                # build and install
                cmd = "tcsh ./compile %s %s" % (self.par, test)
                run_cmd(cmd, log_all=True, simple=True)

                # run test
                try:
                    prev_dir = change_dir('run')

                    if test in ["em_fire"]:

                        # handle tests with subtests seperately
                        testdir = os.path.join("..", "test", test)

                        for subtest in [
                                x for x in os.listdir(testdir)
                                if os.path.isdir(x)
                        ]:

                            subtestdir = os.path.join(testdir, subtest)

                            # link required files
                            for filename in os.listdir(subtestdir):
                                if os.path.exists(filename):
                                    remove_file(filename)
                                symlink(os.path.join(subtestdir, filename),
                                        filename)

                            # run test
                            run_test()

                    else:

                        # run test
                        run_test()

                    change_dir(prev_dir)

                except OSError as err:
                    raise EasyBuildError(
                        "An error occured when running test %s: %s", test, err)
Пример #34
0
def package_with_fpm(easyblock):
    """
    This function will build a package using fpm and return the directory where the packages are
    """

    workdir = tempfile.mkdtemp(prefix='eb-pkgs-')
    pkgtype = build_option('package_type')
    _log.info("Will be creating %s package(s) in %s", pkgtype, workdir)

    origdir = change_dir(workdir)

    package_naming_scheme = ActivePNS()

    pkgname = package_naming_scheme.name(easyblock.cfg)
    pkgver = package_naming_scheme.version(easyblock.cfg)
    pkgrel = package_naming_scheme.release(easyblock.cfg)

    _log.debug("Got the PNS values name: %s version: %s release: %s", pkgname,
               pkgver, pkgrel)
    cmdlist = [
        PKG_TOOL_FPM,
        '--workdir',
        workdir,
        '--name',
        pkgname,
        '--provides',
        pkgname,
        '-t',
        pkgtype,  # target
        '-s',
        'dir',  # source
        '--version',
        pkgver,
        '--iteration',
        pkgrel,
        '--description',
        easyblock.cfg["description"],
        '--url',
        easyblock.cfg["homepage"],
    ]

    extra_pkg_options = build_option('package_tool_options')
    if extra_pkg_options:
        cmdlist.extend(extra_pkg_options.split(' '))

    if build_option('debug'):
        cmdlist.append('--debug')

    deps = []
    if easyblock.toolchain.name != DUMMY_TOOLCHAIN_NAME:
        toolchain_dict = easyblock.toolchain.as_dict()
        deps.extend([toolchain_dict])

    deps.extend(easyblock.cfg.dependencies())

    _log.debug(
        "The dependencies to be added to the package are: %s",
        pprint.pformat([easyblock.toolchain.as_dict()] +
                       easyblock.cfg.dependencies()))
    for dep in deps:
        if dep.get('external_module', False):
            _log.debug("Skipping dep marked as external module: %s",
                       dep['name'])
        else:
            _log.debug("The dep added looks like %s ", dep)
            dep_pkgname = package_naming_scheme.name(dep)
            cmdlist.extend(["--depends", dep_pkgname])

    # Excluding the EasyBuild logs and test reports that might be in the installdir
    exclude_files_globs = [
        os.path.join(log_path(ec=easyblock.cfg), "*.log"),
        os.path.join(log_path(ec=easyblock.cfg), "*.md"),
    ]
    # stripping off leading / to match expected glob in fpm
    for exclude_files_glob in exclude_files_globs:
        cmdlist.extend([
            '--exclude',
            os.path.join(easyblock.installdir.lstrip(os.sep),
                         exclude_files_glob)
        ])

    cmdlist.extend([
        easyblock.installdir,
        easyblock.module_generator.get_module_filepath(),
    ])
    cmd = ' '.join(cmdlist)
    _log.debug("The flattened cmdlist looks like: %s", cmd)
    run_cmd(cmdlist, log_all=True, simple=True, shell=False)

    _log.info("Created %s package(s) in %s", pkgtype, workdir)

    change_dir(origdir)

    return workdir
Пример #35
0
    def install_step(self):
        """Install ABAQUS using 'setup'."""
        if LooseVersion(self.version) >= LooseVersion('2016'):
            change_dir(os.path.join(self.cfg['start_dir'], '1'))
            qa = {
                "Enter selection (default: Install):": '',
            }
            no_qa = [
                '___',
                '\(\d+ MB\)',
            ]
            std_qa = {
                # disable installation of Tosca (6) and Isight (7)
                "Isight\nEnter selection \(default: Next\):": '6\n7\n\n',
                "(?<!Isight)\nEnter selection \(default: Next\):": '',
                r"SIMULIA[0-9]*doc.*:": os.path.join(self.installdir, 'doc'),
                r"SimulationServices.*:": os.path.join(self.installdir, 'sim'),
                r"Choose the CODE installation directory.*:\n.*\n\n.*:": os.path.join(self.installdir, 'sim'),
                r"SIMULIA/CAE.*:": os.path.join(self.installdir, 'cae'),
                r"location of your Abaqus services \(solvers\).*(\n.*){8}:\s*": os.path.join(self.installdir, 'sim'),
                r"Default.*SIMULIA/Commands\]:\s*": os.path.join(self.installdir, 'Commands'),
                r"Default.*SIMULIA/CAE/plugins.*:\s*": os.path.join(self.installdir, 'plugins'),
                r"License Server 1\s*(\n.*){3}:": 'abaqusfea',  # bypass value for license server
                r"License Server . \(redundant\)\s*(\n.*){3}:": '',
                r"Please choose an action:": '1',
                r"SIMULIA/Tosca.*:": os.path.join(self.installdir, 'tosca'),
                r"location of your existing ANSA installation.*(\n.*){8}:": '',
                r"FLUENT Path.*(\n.*){7}:": '',
            }
            run_cmd_qa('./StartTUI.sh', qa, no_qa=no_qa, std_qa=std_qa, log_all=True, simple=True, maxhits=100)
        else:
            change_dir(self.builddir)
            if self.cfg['install_cmd'] is None:
                self.cfg['install_cmd'] = "%s/%s-%s/setup" % (self.builddir, self.name, self.version.split('-')[0])
                self.cfg['install_cmd'] += " -replay %s" % self.replayfile
                if LooseVersion(self.version) < LooseVersion("6.13"):
                    self.cfg['install_cmd'] += " -nosystemcheck"
            super(EB_ABAQUS, self).install_step()

        if LooseVersion(self.version) >= LooseVersion('2016'):
            # also install hot fixes (if any)
            hotfixes = [src for src in self.src if 'CFA' in src['name']]
            if hotfixes:
                # first install Part_3DEXP_SimulationServices hotfix(es)
                hotfix_dir = os.path.join(self.builddir, 'Part_3DEXP_SimulationServices.Linux64', '1', 'Software')
                change_dir(hotfix_dir)

                # SIMULIA_ComputeServices part
                subdirs = glob.glob('HF_SIMULIA_ComputeServices.HF*.Linux64')
                if len(subdirs) == 1:
                    subdir = subdirs[0]
                else:
                    raise EasyBuildError("Failed to find expected subdir for hotfix: %s", subdirs)

                cwd = change_dir(os.path.join(subdir, '1'))
                std_qa = {
                    "Enter selection \(default: Next\):": '',
                    "Choose the .*installation directory.*\n.*\n\n.*:": os.path.join(self.installdir, 'sim'),
                    "Enter selection \(default: Install\):": '',
                }
                run_cmd_qa('./StartTUI.sh', {}, std_qa=std_qa, log_all=True, simple=True, maxhits=100)

                # F_CAASIMULIAComputeServicesBuildTime part
                change_dir(cwd)
                subdirs = glob.glob('HF_CAASIMULIAComputeServicesBuildTime.HF*.Linux64')
                if len(subdirs) == 1:
                    subdir = subdirs[0]
                else:
                    raise EasyBuildError("Failed to find expected subdir for hotfix: %s", subdirs)

                change_dir(os.path.join(cwd, subdir, '1'))
                run_cmd_qa('./StartTUI.sh', {}, std_qa=std_qa, log_all=True, simple=True, maxhits=100)

                # next install Part_SIMULIA_Abaqus_CAE hotfix
                hotfix_dir = os.path.join(self.builddir, 'Part_SIMULIA_Abaqus_CAE.Linux64', '1', 'Software')
                change_dir(hotfix_dir)

                subdirs = glob.glob('SIMULIA_Abaqus_CAE.HF*.Linux64')
                if len(subdirs) == 1:
                    subdir = subdirs[0]
                else:
                    raise EasyBuildError("Failed to find expected subdir for hotfix: %s", subdirs)

                cwd = change_dir(os.path.join(subdir, '1'))
                std_qa = {
                    "Enter selection \(default: Next\):": '',
                    "Choose the .*installation directory.*\n.*\n\n.*:": os.path.join(self.installdir, 'cae'),
                    "Enter selection \(default: Install\):": '',
                    "Please choose an action:": '',
                }
                run_cmd_qa('./StartTUI.sh', {}, std_qa=std_qa, log_all=True, simple=True, maxhits=100)
Пример #36
0
    def configure_step(self, srcdir=None, builddir=None):
        """Configure build using cmake"""

        setup_cmake_env(self.toolchain)

        if builddir is None and self.cfg.get('separate_build_dir', True):
            builddir = os.path.join(self.builddir, 'easybuild_obj')
            # For separate_build_dir we want a clean folder. So remove if it exists
            # This can happen when multiple iterations are done (e.g. shared, static, ...)
            if os.path.exists(builddir):
                self.log.warning('Build directory %s already exists (from previous iterations?). Removing...',
                                 builddir)
                remove_dir(builddir)

        if builddir:
            mkdir(builddir, parents=True)
            change_dir(builddir)
            default_srcdir = self.cfg['start_dir']
        else:
            default_srcdir = '.'

        if srcdir is None:
            if self.cfg.get('srcdir', None) is not None:
                # Note that the join returns srcdir if it is absolute
                srcdir = os.path.join(default_srcdir, self.cfg['srcdir'])
            else:
                srcdir = default_srcdir

        options = ['-DCMAKE_INSTALL_PREFIX=%s' % self.installdir]

        if self.installdir.startswith('/opt') or self.installdir.startswith('/usr'):
            # https://cmake.org/cmake/help/latest/module/GNUInstallDirs.html
            localstatedir = os.path.join(self.installdir, 'var')
            runstatedir = os.path.join(localstatedir, 'run')
            sysconfdir = os.path.join(self.installdir, 'etc')
            options.append("-DCMAKE_INSTALL_LOCALSTATEDIR=%s" % localstatedir)
            options.append("-DCMAKE_INSTALL_RUNSTATEDIR=%s" % runstatedir)
            options.append("-DCMAKE_INSTALL_SYSCONFDIR=%s" % sysconfdir)

        if '-DCMAKE_BUILD_TYPE=' in self.cfg['configopts']:
            if self.cfg.get('build_type') is not None:
                self.log.warning('CMAKE_BUILD_TYPE is set in configopts. Ignoring build_type')
        else:
            options.append('-DCMAKE_BUILD_TYPE=%s' % self.build_type)

        # Add -fPIC flag if necessary
        if self.toolchain.options['pic']:
            options.append('-DCMAKE_POSITION_INDEPENDENT_CODE=ON')

        if self.cfg['generator']:
            options.append('-G "%s"' % self.cfg['generator'])

        # sjb: comment this section out as it involves bringing in a framework change as well
        # pass --sysroot value down to CMake,
        # and enable using absolute paths to compiler commands to avoid
        # that CMake picks up compiler from sysroot rather than toolchain compiler...
        # sysroot = build_option('sysroot')
        # if sysroot:
        #     options.append('-DCMAKE_SYSROOT=%s' % sysroot)
        #     self.log.info("Using absolute path to compiler commands because of alterate sysroot %s", sysroot)
        #     self.cfg['abs_path_compilers'] = True

        # Set flag for shared libs if requested
        # Not adding one allows the project to choose a default
        build_shared_libs = self.cfg.get('build_shared_libs')
        if build_shared_libs is not None:
            # Contrary to other options build_shared_libs takes precedence over configopts which may be unexpected.
            # This is to allow self.lib_ext to be determined correctly.
            # Usually you want to remove -DBUILD_SHARED_LIBS from configopts and set build_shared_libs to True or False
            # If you need it in configopts don't set build_shared_libs (or explicitely set it to `None` (Default))
            if '-DBUILD_SHARED_LIBS=' in self.cfg['configopts']:
                print_warning('Ignoring BUILD_SHARED_LIBS is set in configopts because build_shared_libs is set')
            self.cfg.update('configopts', '-DBUILD_SHARED_LIBS=%s' % ('ON' if build_shared_libs else 'OFF'))

        env_to_options = {
            'CC': 'CMAKE_C_COMPILER',
            'CFLAGS': 'CMAKE_C_FLAGS',
            'CXX': 'CMAKE_CXX_COMPILER',
            'CXXFLAGS': 'CMAKE_CXX_FLAGS',
            'F90': 'CMAKE_Fortran_COMPILER',
            'FFLAGS': 'CMAKE_Fortran_FLAGS',
        }
        for env_name, option in env_to_options.items():
            value = os.getenv(env_name)
            if value is not None:
                if option.endswith('_COMPILER') and self.cfg.get('abs_path_compilers', False):
                    value = which(value)
                    self.log.info("Using absolute path to compiler command: %s", value)
                options.append("-D%s='%s'" % (option, value))

        if build_option('rpath'):
            # instruct CMake not to fiddle with RPATH when --rpath is used, since it will undo stuff on install...
            # https://github.com/LLNL/spack/blob/0f6a5cd38538e8969d11bd2167f11060b1f53b43/lib/spack/spack/build_environment.py#L416
            options.append('-DCMAKE_SKIP_RPATH=ON')

        # show what CMake is doing by default
        options.append('-DCMAKE_VERBOSE_MAKEFILE=ON')

        if not self.cfg.get('allow_system_boost', False):
            # don't pick up on system Boost if Boost is included as dependency
            # - specify Boost location via -DBOOST_ROOT
            # - instruct CMake to not search for Boost headers/libraries in other places
            # - disable search for Boost CMake package configuration file
            boost_root = get_software_root('Boost')
            if boost_root:
                options.extend([
                    '-DBOOST_ROOT=%s' % boost_root,
                    '-DBoost_NO_SYSTEM_PATHS=ON',
                    '-DBoost_NO_BOOST_CMAKE=ON',
                ])

        options_string = ' '.join(options)

        if self.cfg.get('configure_cmd') == DEFAULT_CONFIGURE_CMD:
            command = ' '.join([
                self.cfg['preconfigopts'],
                DEFAULT_CONFIGURE_CMD,
                options_string,
                self.cfg['configopts'],
                srcdir])
        else:
            command = ' '.join([
                self.cfg['preconfigopts'],
                self.cfg.get('configure_cmd'),
                self.cfg['configopts']])

        (out, _) = run_cmd(command, log_all=True, simple=False)

        return out
Пример #37
0
    def configure_step(self):
        """
        Configure VMD for building.
        """
        # make sure required dependencies are available
        deps = {}
        for dep in ['FLTK', 'Mesa', 'netCDF', 'Python', 'Tcl', 'Tk']:
            deps[dep] = get_software_root(dep)
            if deps[dep] is None:
                raise EasyBuildError("Required dependency %s is missing", dep)

        # optional dependencies
        for dep in ['ACTC', 'CUDA', 'OptiX']:
            deps[dep] = get_software_root(dep)

        # specify Tcl/Tk locations & libraries
        tclinc = os.path.join(deps['Tcl'], 'include')
        tcllib = os.path.join(deps['Tcl'], 'lib')
        env.setvar('TCL_INCLUDE_DIR', tclinc)
        env.setvar('TCL_LIBRARY_DIR', tcllib)

        env.setvar('TK_INCLUDE_DIR', os.path.join(deps['Tk'], 'include'))
        env.setvar('TK_LIBRARY_DIR', os.path.join(deps['Tk'], 'lib'))

        tclshortver = '.'.join(get_software_version('Tcl').split('.')[:2])
        self.cfg.update('buildopts', 'TCLLDFLAGS="-ltcl%s"' % tclshortver)

        # Python locations
        pyshortver = '.'.join(get_software_version('Python').split('.')[:2])
        env.setvar(
            'PYTHON_INCLUDE_DIR',
            os.path.join(deps['Python'], 'include/python%s' % pyshortver))
        pylibdir = det_pylibdir()
        python_libdir = os.path.join(deps['Python'], os.path.dirname(pylibdir))
        env.setvar('PYTHON_LIBRARY_DIR', python_libdir)

        # numpy include location, easiest way to determine it is via numpy.get_include()
        out, ec = run_cmd(
            "python -c 'import numpy; print numpy.get_include()'",
            simple=False)
        if ec:
            raise EasyBuildError(
                "Failed to determine numpy include directory: %s", out)
        else:
            env.setvar('NUMPY_INCLUDE_DIR', out.strip())

        # compiler commands
        self.cfg.update('buildopts', 'CC="%s"' % os.getenv('CC'))
        self.cfg.update('buildopts', 'CCPP="%s"' % os.getenv('CXX'))

        # source tarballs contains a 'plugins' and 'vmd-<version>' directory
        vmddir = os.path.join(self.cfg['start_dir'],
                              '%s-%s' % (self.name.lower(), self.version))

        # plugins need to be built first (see http://www.ks.uiuc.edu/Research/vmd/doxygen/compiling.html)
        change_dir(os.path.join(self.cfg['start_dir'], 'plugins'))
        cmd = "make LINUXAMD64 TCLLIB='-F%s' TCLINC='-I%s' %s" % (
            tcllib, tclinc, self.cfg['buildopts'])
        run_cmd(cmd, log_all=True, simple=False)

        # create plugins distribution
        plugindir = os.path.join(vmddir, 'plugins')
        env.setvar('PLUGINDIR', plugindir)
        self.log.info("Generating VMD plugins in %s", plugindir)
        run_cmd("make distrib %s" % self.cfg['buildopts'],
                log_all=True,
                simple=False)

        # explicitely mention whether or not we're building with CUDA/OptiX support
        if deps['CUDA']:
            self.log.info("Building with CUDA %s support",
                          get_software_version('CUDA'))
            if deps['OptiX']:
                self.log.info("Building with Nvidia OptiX %s support",
                              get_software_version('OptiX'))
            else:
                self.log.warn("Not building with Nvidia OptiX support!")
        else:
            self.log.warn("Not building with CUDA nor OptiX support!")

        # see http://www.ks.uiuc.edu/Research/vmd/doxygen/configure.html
        # LINUXAMD64: Linux 64-bit
        # LP64: build VMD as 64-bit binary
        # IMD: enable support for Interactive Molecular Dynamics (e.g. to connect to NAMD for remote simulations)
        # PTHREADS: enable support for POSIX threads
        # COLVARS: enable support for collective variables (related to NAMD/LAMMPS)
        # NOSILENT: verbose build command
        self.cfg.update('configopts',
                        "LINUXAMD64 LP64 IMD PTHREADS COLVARS NOSILENT",
                        allow_duplicate=False)

        # add additional configopts based on available dependencies
        for key in deps:
            if deps[key]:
                if key == 'Mesa':
                    self.cfg.update('configopts',
                                    "OPENGL MESA",
                                    allow_duplicate=False)
                elif key == 'OptiX':
                    self.cfg.update('configopts',
                                    "LIBOPTIX",
                                    allow_duplicate=False)
                elif key == 'Python':
                    self.cfg.update('configopts',
                                    "PYTHON NUMPY",
                                    allow_duplicate=False)
                else:
                    self.cfg.update('configopts',
                                    key.upper(),
                                    allow_duplicate=False)

        # configure for building with Intel compilers specifically
        if self.toolchain.comp_family() == toolchain.INTELCOMP:
            self.cfg.update('configopts', 'ICC', allow_duplicate=False)

        # specify install location using environment variables
        env.setvar('VMDINSTALLBINDIR', os.path.join(self.installdir, 'bin'))
        env.setvar('VMDINSTALLLIBRARYDIR',
                   os.path.join(self.installdir, 'lib'))

        # configure in vmd-<version> directory
        change_dir(vmddir)
        run_cmd("%s ./configure %s" %
                (self.cfg['preconfigopts'], self.cfg['configopts']))

        # change to 'src' subdirectory, ready for building
        change_dir(os.path.join(vmddir, 'src'))
Пример #38
0
    def configure_step(self):
        """Run CMake for stage 1 Clang."""

        self.llvm_obj_dir_stage1 = os.path.join(self.builddir, 'llvm.obj.1')
        if self.cfg['bootstrap']:
            self.llvm_obj_dir_stage2 = os.path.join(self.builddir,
                                                    'llvm.obj.2')
            self.llvm_obj_dir_stage3 = os.path.join(self.builddir,
                                                    'llvm.obj.3')

        if LooseVersion(self.version) >= LooseVersion('3.3'):
            disable_san_tests = False
            # all sanitizer tests will fail when there's a limit on the vmem
            # this is ugly but I haven't found a cleaner way so far
            (vmemlim, ec) = run_cmd("ulimit -v", regexp=False)
            if not vmemlim.startswith("unlimited"):
                disable_san_tests = True
                self.log.warn(
                    "There is a virtual memory limit set of %s KB. The tests of the "
                    "sanitizers will be disabled as they need unlimited virtual "
                    "memory unless --strict=error is used." % vmemlim.strip())

            # the same goes for unlimited stacksize
            (stacklim, ec) = run_cmd("ulimit -s", regexp=False)
            if stacklim.startswith("unlimited"):
                disable_san_tests = True
                self.log.warn(
                    "The stacksize limit is set to unlimited. This causes the ThreadSanitizer "
                    "to fail. The sanitizers tests will be disabled unless --strict=error is used."
                )

            if (disable_san_tests or self.cfg['skip_sanitizer_tests']
                ) and build_option('strict') != run.ERROR:
                self.log.debug("Disabling the sanitizer tests")
                self.disable_sanitizer_tests()

        # Create and enter build directory.
        mkdir(self.llvm_obj_dir_stage1)
        change_dir(self.llvm_obj_dir_stage1)

        # GCC and Clang are installed in different prefixes and Clang will not
        # find the GCC installation on its own.
        # First try with GCCcore, as GCC built on top of GCCcore is just a wrapper for GCCcore and binutils,
        # instead of a full-fledge compiler
        gcc_prefix = get_software_root('GCCcore')

        # If that doesn't work, try with GCC
        if gcc_prefix is None:
            gcc_prefix = get_software_root('GCC')

        # If that doesn't work either, print error and exit
        if gcc_prefix is None:
            raise EasyBuildError("Can't find GCC or GCCcore to use")

        self.cfg.update('configopts',
                        "-DGCC_INSTALL_PREFIX='%s' " % gcc_prefix)
        self.log.debug("Using %s as GCC_INSTALL_PREFIX", gcc_prefix)

        self.cfg['configopts'] += "-DCMAKE_BUILD_TYPE=Release "
        if self.cfg['assertions']:
            self.cfg['configopts'] += "-DLLVM_ENABLE_ASSERTIONS=ON "
        else:
            self.cfg['configopts'] += "-DLLVM_ENABLE_ASSERTIONS=OFF "

        self.cfg['configopts'] += '-DLLVM_TARGETS_TO_BUILD="%s" ' % ';'.join(
            self.cfg['build_targets'])

        if self.cfg['parallel']:
            self.make_parallel_opts = "-j %s" % self.cfg['parallel']

        self.log.info("Configuring")
        super(EB_Clang, self).configure_step(srcdir=self.llvm_src_dir)
Пример #39
0
    def configure_common(self):
        """Common configuration for all toolchains"""

        # openmp introduces 2 major differences
        # -automatic is default: -noautomatic -auto-scalar
        # some mem-bandwidth optimisation
        if self.cfg['type'] == 'psmp':
            self.openmp = self.toolchain.get_flag('openmp')

        # determine which opt flags to use
        if self.cfg['typeopt']:
            optflags = 'OPT'
            regflags = 'OPT2'
        else:
            optflags = 'NOOPT'
            regflags = 'NOOPT'

        # make sure a MPI-2 able MPI lib is used
        mpi2 = False
        if hasattr(self.toolchain, 'MPI_FAMILY') and self.toolchain.MPI_FAMILY is not None:
            known_mpi2_fams = [toolchain.MPICH, toolchain.MPICH2, toolchain.MVAPICH2, toolchain.OPENMPI,
                               toolchain.INTELMPI]
            mpi_fam = self.toolchain.mpi_family()
            if mpi_fam in known_mpi2_fams:
                mpi2 = True
                self.log.debug("Determined MPI2 compatibility based on MPI toolchain component: %s" % mpi_fam)
            else:
                self.log.debug("Cannot determine MPI2 compatibility based on MPI toolchain component: %s" % mpi_fam)
        else:
            # can't use toolchain.mpi_family, because of system toolchain
            mpi2libs = ['impi', 'MVAPICH2', 'OpenMPI', 'MPICH2', 'MPICH']
            for mpi2lib in mpi2libs:
                if get_software_root(mpi2lib):
                    mpi2 = True
                    self.log.debug("Determined MPI2 compatibility based on loaded MPI module: %s")
                else:
                    self.log.debug("MPI-2 supporting MPI library %s not loaded.")

        if not mpi2:
            raise EasyBuildError("CP2K needs MPI-2, no known MPI-2 supporting library loaded?")

        cppflags = os.getenv('CPPFLAGS')
        ldflags = os.getenv('LDFLAGS')
        cflags = os.getenv('CFLAGS')
        fflags = os.getenv('FFLAGS')
        fflags_lowopt = re.sub('-O[0-9]', '-O1', fflags)
        options = {
            'CC': os.getenv('MPICC'),
            'CPP': '',
            'FC': '%s %s' % (os.getenv('MPIF90'), self.openmp),
            'LD': '%s %s' % (os.getenv('MPIF90'), self.openmp),
            'AR': 'ar -r',
            'CPPFLAGS': '',

            'FPIC': self.fpic,
            'DEBUG': self.debug,

            'FCFLAGS': '$(FCFLAGS%s)' % optflags,
            'FCFLAGS2': '$(FCFLAGS%s)' % regflags,

            'CFLAGS': ' %s %s %s $(FPIC) $(DEBUG) %s ' % (cflags, cppflags, ldflags, self.cfg['extracflags']),
            'DFLAGS': ' -D__parallel -D__BLACS -D__SCALAPACK -D__FFTSG %s' % self.cfg['extradflags'],

            'LIBS': os.getenv('LIBS', ''),

            'FCFLAGSNOOPT': '$(DFLAGS) $(CFLAGS) -O0  $(FREE) $(FPIC) $(DEBUG)',
            'FCFLAGSOPT': '%s $(FREE) $(SAFE) $(FPIC) $(DEBUG)' % fflags,
            'FCFLAGSOPT2': '%s $(FREE) $(SAFE) $(FPIC) $(DEBUG)' % fflags_lowopt,
        }

        libint = get_software_root('LibInt')
        if libint:
            options['DFLAGS'] += ' -D__LIBINT'

            libintcompiler = "%s %s" % (os.getenv('CC'), os.getenv('CFLAGS'))

            # Build libint-wrapper, if required
            libint_wrapper = ''

            # required for old versions of GCC
            if not self.compilerISO_C_BINDING:
                options['DFLAGS'] += ' -D__HAS_NO_ISO_C_BINDING'

                # determine path for libint_tools dir
                libinttools_paths = ['libint_tools', 'tools/hfx_tools/libint_tools']
                libinttools_path = None
                for path in libinttools_paths:
                    path = os.path.join(self.cfg['start_dir'], path)
                    if os.path.isdir(path):
                        libinttools_path = path
                        change_dir(libinttools_path)
                if not libinttools_path:
                    raise EasyBuildError("No libinttools dir found")

                # build libint wrapper
                cmd = "%s -c libint_cpp_wrapper.cpp -I%s/include" % (libintcompiler, libint)
                if not run_cmd(cmd, log_all=True, simple=True):
                    raise EasyBuildError("Building the libint wrapper failed")
                libint_wrapper = '%s/libint_cpp_wrapper.o' % libinttools_path

            # determine Libint libraries based on major version number
            libint_maj_ver = get_software_version('Libint').split('.')[0]
            if libint_maj_ver == '1':
                libint_libs = "$(LIBINTLIB)/libderiv.a $(LIBINTLIB)/libint.a $(LIBINTLIB)/libr12.a"
            elif libint_maj_ver == '2':
                libint_libs = "$(LIBINTLIB)/libint2.a"
            else:
                raise EasyBuildError("Don't know how to handle libint version %s", libint_maj_ver)
            self.log.info("Using Libint version %s" % (libint_maj_ver))

            options['LIBINTLIB'] = '%s/lib' % libint
            options['LIBS'] += ' %s -lstdc++ %s' % (libint_libs, libint_wrapper)

        else:
            # throw a warning, since CP2K without Libint doesn't make much sense
            self.log.warning("Libint module not loaded, so building without Libint support")

        libxc = get_software_root('libxc')
        if libxc:
            cur_libxc_version = get_software_version('libxc')
            if LooseVersion(self.version) >= LooseVersion('6.1'):
                libxc_min_version = '4.0.3'
                options['DFLAGS'] += ' -D__LIBXC'
            else:
                libxc_min_version = '2.0.1'
                options['DFLAGS'] += ' -D__LIBXC2'

            if LooseVersion(cur_libxc_version) < LooseVersion(libxc_min_version):
                raise EasyBuildError("This version of CP2K is not compatible with libxc < %s" % libxc_min_version)

            if LooseVersion(cur_libxc_version) >= LooseVersion('4.0.3'):
                # cfr. https://www.cp2k.org/howto:compile#k_libxc_optional_wider_choice_of_xc_functionals
                options['LIBS'] += ' -L%s/lib -lxcf03 -lxc' % libxc
            elif LooseVersion(cur_libxc_version) >= LooseVersion('2.2'):
                options['LIBS'] += ' -L%s/lib -lxcf90 -lxc' % libxc
            else:
                options['LIBS'] += ' -L%s/lib -lxc' % libxc
            self.log.info("Using Libxc-%s" % cur_libxc_version)
        else:
            self.log.info("libxc module not loaded, so building without libxc support")

        return options
Пример #40
0
    def install_step(self):
        """
        Actual installation
        - create silent cfg file
        - execute command
        """
        impiver = LooseVersion(self.version)
        if impiver >= LooseVersion('4.0.1'):
            # impi starting from version 4.0.1.x uses standard installation procedure.

            silent_cfg_names_map = {}

            if impiver < LooseVersion('4.1.1'):
                # since impi v4.1.1, silent.cfg has been slightly changed to be 'more standard'
                silent_cfg_names_map.update({
                    'activation_name':
                    ACTIVATION_NAME_2012,
                    'license_file_name':
                    LICENSE_FILE_NAME_2012,
                })

            super(EB_impi,
                  self).install_step(silent_cfg_names_map=silent_cfg_names_map)

            # impi v4.1.1 and v5.0.1 installers create impi/<version> subdir, so stuff needs to be moved afterwards
            if impiver == LooseVersion(
                    '4.1.1.036') or impiver >= LooseVersion('5.0.1.035'):
                super(EB_impi, self).move_after_install()
        else:
            # impi up until version 4.0.0.x uses custom installation procedure.
            silent = """[mpi]
INSTALLDIR=%(ins)s
LICENSEPATH=%(lic)s
INSTALLMODE=NONRPM
INSTALLUSER=NONROOT
UPDATE_LD_SO_CONF=NO
PROCEED_WITHOUT_PYTHON=yes
AUTOMOUNTED_CLUSTER=yes
EULA=accept
[mpi-rt]
INSTALLDIR=%(ins)s
LICENSEPATH=%(lic)s
INSTALLMODE=NONRPM
INSTALLUSER=NONROOT
UPDATE_LD_SO_CONF=NO
PROCEED_WITHOUT_PYTHON=yes
AUTOMOUNTED_CLUSTER=yes
EULA=accept

""" % {
                'lic': self.license_file,
                'ins': self.installdir
            }

            # already in correct directory
            silentcfg = os.path.join(os.getcwd(), "silent.cfg")
            write_file(silentcfg, silent)
            self.log.debug("Contents of %s: %s", silentcfg, silent)

            tmpdir = os.path.join(os.getcwd(), self.version, 'mytmpdir')
            mkdir(tmpdir, parents=True)

            cmd = "./install.sh --tmp-dir=%s --silent=%s" % (tmpdir, silentcfg)
            run_cmd(cmd, log_all=True, simple=True)

        # recompile libfabric (if requested)
        # some Intel MPI versions (like 2019 update 6) no longer ship libfabric sources
        libfabric_path = os.path.join(self.installdir, 'libfabric')
        if impiver >= LooseVersion('2019') and self.cfg['libfabric_rebuild']:
            if self.cfg['ofi_internal']:
                libfabric_src_tgz_fn = 'src.tgz'
                if os.path.exists(
                        os.path.join(libfabric_path, libfabric_src_tgz_fn)):
                    change_dir(libfabric_path)
                    srcdir = extract_file(libfabric_src_tgz_fn,
                                          os.getcwd(),
                                          change_into_dir=False)
                    change_dir(srcdir)
                    libfabric_installpath = os.path.join(
                        self.installdir, 'intel64', 'libfabric')

                    make = 'make'
                    if self.cfg['parallel']:
                        make += ' -j %d' % self.cfg['parallel']

                    cmds = [
                        './configure --prefix=%s %s' %
                        (libfabric_installpath,
                         self.cfg['libfabric_configopts']), make,
                        'make install'
                    ]
                    for cmd in cmds:
                        run_cmd(cmd, log_all=True, simple=True)
                else:
                    self.log.info(
                        "Rebuild of libfabric is requested, but %s does not exist, so skipping...",
                        libfabric_src_tgz_fn)
            else:
                raise EasyBuildError(
                    "Rebuild of libfabric is requested, but ofi_internal is set to False."
                )
Пример #41
0
    def configure_step(self):
        """Add some extra configure options."""

        if LooseVersion(self.version) >= LooseVersion('2.6.0'):
            # Libint 2.6.0 requires first compiling the Libint compiler,
            # by running configure with appropriate options, followed by 'make export'
            # and unpacking the resulting source tarball;
            # see https://github.com/evaleev/libint/wiki#compiling-libint-compiler

            # CMake is recommended, but configuring with Fortran support doesn't work correctly yet in Libint 2.6.0
            # so stick to traditional configure script for now
            print_msg("configuring Libint compiler...")

            # first run autogen.sh script to generate initial configure script
            run_cmd("./autogen.sh")

            cmd = ' '.join([
                self.cfg['preconfigopts'],
                './configure',
                self.cfg['configopts'],
                self.cfg['libint_compiler_configopts'],
            ])
            run_cmd(cmd)

            print_msg("generating Libint library...")
            run_cmd("make export")

            source_fn = 'libint-%s.tgz' % self.version
            if os.path.exists(source_fn):
                extract_file(source_fn, os.getcwd(), change_into_dir=False)
                change_dir('libint-%s' % self.version)
            else:
                raise EasyBuildError(
                    "Could not find generated source tarball after 'make export'!"
                )

        # Libint < 2.7.0 can be configured using configure script,
        # Libint >= 2.7.0 should be configured via cmake
        if LooseVersion(self.version) < LooseVersion('2.7.0'):

            # also build shared libraries (not enabled by default)
            self.cfg.update('configopts', "--enable-shared")

            if self.toolchain.options['pic']:
                # Enforce consistency.
                self.cfg.update('configopts', "--with-pic")

            if LooseVersion(
                    self.version) >= LooseVersion('2.0') and LooseVersion(
                        self.version) < LooseVersion('2.1'):
                # the code in libint is automatically generated and hence it is in some
                # parts so complex that -O2 or -O3 compiler optimization takes forever
                self.cfg.update('configopts', "--with-cxx-optflags='-O1'")

            elif LooseVersion(self.version) >= LooseVersion('2.1'):
                # pass down $CXXFLAGS to --with-cxxgen-optflags configure option;
                # mainly to avoid warning about it not being set (but $CXXFLAGS is picked up anyway in practice)
                self.cfg.update(
                    'configopts',
                    "--with-cxxgen-optflags='%s'" % os.getenv('CXXFLAGS'))

            # --enable-fortran is only a known configure option for Libint library, not for Libint compiler,
            # so only add --enable-fortran *after* configuring & generating Libint compiler
            if self.cfg['with_fortran']:
                self.cfg.update('configopts', '--enable-fortran')

            self.cfg['configure_cmd'] = DEFAULT_CONFIGURE_CMD
            ConfigureMake.configure_step(self)

        else:
            if self.cfg['with_fortran']:
                self.cfg.update('configopts', '-DENABLE_FORTRAN=ON')

            # specify current directory as source directory (that contains CMakeLists.txt),
            # since that's the path to the unpacked source tarball for Libint library (created by 'make export')
            super(EB_Libint, self).configure_step(srcdir=os.getcwd())
Пример #42
0
    def post_install_step(self):
        """
        Install group libraries and interfaces (if desired).
        """
        super(EB_imkl, self).post_install_step()

        shlib_ext = get_shared_lib_ext()

        # reload the dependencies
        self.load_dependency_modules()

        if self.cfg['m32']:
            extra = {
                'libmkl.%s' % shlib_ext:
                'GROUP (-lmkl_intel -lmkl_intel_thread -lmkl_core)',
                'libmkl_em64t.a':
                'GROUP (libmkl_intel.a libmkl_intel_thread.a libmkl_core.a)',
                'libmkl_solver.a': 'GROUP (libmkl_solver.a)',
                'libmkl_scalapack.a': 'GROUP (libmkl_scalapack_core.a)',
                'libmkl_lapack.a':
                'GROUP (libmkl_intel.a libmkl_intel_thread.a libmkl_core.a)',
                'libmkl_cdft.a': 'GROUP (libmkl_cdft_core.a)'
            }
        else:
            extra = {
                'libmkl.%s' % shlib_ext:
                'GROUP (-lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core)',
                'libmkl_em64t.a':
                'GROUP (libmkl_intel_lp64.a libmkl_intel_thread.a libmkl_core.a)',
                'libmkl_solver.a': 'GROUP (libmkl_solver_lp64.a)',
                'libmkl_scalapack.a': 'GROUP (libmkl_scalapack_lp64.a)',
                'libmkl_lapack.a':
                'GROUP (libmkl_intel_lp64.a libmkl_intel_thread.a libmkl_core.a)',
                'libmkl_cdft.a': 'GROUP (libmkl_cdft_core.a)'
            }

        if LooseVersion(self.version) >= LooseVersion('10.3'):
            libsubdir = os.path.join('mkl', 'lib', 'intel64')
        else:
            if self.cfg['m32']:
                libsubdir = os.path.join('lib', '32')
            else:
                libsubdir = os.path.join('lib', 'em64t')

        for fil, txt in extra.items():
            dest = os.path.join(self.installdir, libsubdir, fil)
            if not os.path.exists(dest):
                try:
                    f = open(dest, 'w')
                    f.write(txt)
                    f.close()
                    self.log.info("File %s written" % dest)
                except IOError as err:
                    raise EasyBuildError("Can't write file %s: %s", dest, err)

        # build the mkl interfaces, if desired
        if self.cfg['interfaces']:

            if LooseVersion(self.version) >= LooseVersion('10.3'):
                intsubdir = os.path.join('mkl', 'interfaces')
                inttarget = 'libintel64'
            else:
                intsubdir = 'interfaces'
                if self.cfg['m32']:
                    inttarget = 'lib32'
                else:
                    inttarget = 'libem64t'

            cmd = "make -f makefile %s" % inttarget

            # blas95 and lapack95 need more work, ignore for now
            # blas95 and lapack also need include/.mod to be processed
            fftw2libs = ['fftw2xc', 'fftw2xf']
            fftw3libs = ['fftw3xc', 'fftw3xf']

            interfacedir = os.path.join(self.installdir, intsubdir)
            change_dir(interfacedir)
            self.log.info("Changed to interfaces directory %s", interfacedir)

            compopt = None
            # determine whether we're using a non-Intel GCC-based or PGI-based toolchain
            # can't use toolchain.comp_family, because of dummy toolchain used when installing imkl
            if get_software_root('icc') is None:
                # check for PGI first, since there's a GCC underneath PGI too...
                if get_software_root('PGI'):
                    compopt = 'compiler=pgi'
                elif get_software_root('GCC'):
                    compopt = 'compiler=gnu'
                else:
                    raise EasyBuildError(
                        "Not using Intel/GCC/PGI compilers, don't know how to build wrapper libs"
                    )
            else:
                compopt = 'compiler=intel'

            # patch makefiles for cdft wrappers when PGI is used as compiler
            if get_software_root('PGI'):
                regex_subs = [
                    # pgi should be considered as a valid compiler
                    ("intel gnu", "intel gnu pgi"),
                    # transform 'gnu' case to 'pgi' case
                    (r"ifeq \(\$\(compiler\),gnu\)", "ifeq ($(compiler),pgi)"),
                    ('=gcc', '=pgcc'),
                    # correct flag to use C99 standard
                    ('-std=c99', '-c99'),
                    # -Wall and -Werror are not valid options for pgcc, no close equivalent
                    ('-Wall', ''),
                    ('-Werror', ''),
                ]
                for lib in self.cdftlibs:
                    apply_regex_substitutions(
                        os.path.join(interfacedir, lib, 'makefile'),
                        regex_subs)

            for lib in fftw2libs + fftw3libs + self.cdftlibs:
                buildopts = [compopt]
                if lib in fftw3libs:
                    buildopts.append('install_to=$INSTALL_DIR')
                elif lib in self.cdftlibs:
                    if self.mpi_spec is not None:
                        buildopts.append('mpi=%s' % self.mpi_spec)

                precflags = ['']
                if lib.startswith('fftw2x') and not self.cfg['m32']:
                    # build both single and double precision variants
                    precflags = [
                        'PRECISION=MKL_DOUBLE', 'PRECISION=MKL_SINGLE'
                    ]

                intflags = ['']
                if lib in self.cdftlibs and not self.cfg['m32']:
                    # build both 32-bit and 64-bit interfaces
                    intflags = ['interface=lp64', 'interface=ilp64']

                allopts = [
                    list(opts)
                    for opts in itertools.product(intflags, precflags)
                ]

                for flags, extraopts in itertools.product(['', '-fPIC'],
                                                          allopts):
                    tup = (lib, flags, buildopts, extraopts)
                    self.log.debug(
                        "Building lib %s with: flags %s, buildopts %s, extraopts %s"
                        % tup)

                    tmpbuild = tempfile.mkdtemp(dir=self.builddir)
                    self.log.debug("Created temporary directory %s" % tmpbuild)

                    # always set INSTALL_DIR, SPEC_OPT, COPTS and CFLAGS
                    # fftw2x(c|f): use $INSTALL_DIR, $CFLAGS and $COPTS
                    # fftw3x(c|f): use $CFLAGS
                    # fftw*cdft: use $INSTALL_DIR and $SPEC_OPT
                    env.setvar('INSTALL_DIR', tmpbuild)
                    env.setvar('SPEC_OPT', flags)
                    env.setvar('COPTS', flags)
                    env.setvar('CFLAGS', flags)

                    try:
                        intdir = os.path.join(interfacedir, lib)
                        os.chdir(intdir)
                        self.log.info("Changed to interface %s directory %s" %
                                      (lib, intdir))
                    except OSError as err:
                        raise EasyBuildError(
                            "Can't change to interface %s directory %s: %s",
                            lib, intdir, err)

                    fullcmd = "%s %s" % (cmd, ' '.join(buildopts + extraopts))
                    res = run_cmd(fullcmd, log_all=True, simple=True)
                    if not res:
                        raise EasyBuildError(
                            "Building %s (flags: %s, fullcmd: %s) failed", lib,
                            flags, fullcmd)

                    for fn in os.listdir(tmpbuild):
                        src = os.path.join(tmpbuild, fn)
                        if flags == '-fPIC':
                            # add _pic to filename
                            ff = fn.split('.')
                            fn = '.'.join(ff[:-1]) + '_pic.' + ff[-1]
                        dest = os.path.join(self.installdir, libsubdir, fn)
                        try:
                            if os.path.isfile(src):
                                shutil.move(src, dest)
                                self.log.info("Moved %s to %s" % (src, dest))
                        except OSError as err:
                            raise EasyBuildError("Failed to move %s to %s: %s",
                                                 src, dest, err)

                    rmtree2(tmpbuild)
Пример #43
0
    def configure_step(self):
        """
        Custom configure and build procedure for Siesta.
        - There are two main builds to do, siesta and transiesta
        - In addition there are multiple support tools to build
        """

        start_dir = self.cfg['start_dir']
        self.obj_dir = os.path.join(start_dir, 'Obj')
        arch_make = os.path.join(self.obj_dir, 'arch.make')
        self.bindir = os.path.join(start_dir, 'bin')

        self.pkg_conf_requires = []
        self.pkg_conf_libs = []

        loose_ver = LooseVersion(self.version)

        # Option defaults depending on version
        if self.cfg['with_transiesta'] is None:
            self.cfg['with_transiesta'] = loose_ver <= LooseVersion('4.1-b4')

        if self.cfg['with_libsiesta'] is None:
            self.cfg['with_libsiesta'] = loose_ver >= LooseVersion('4.2')

        # Features depending on version and options
        self.jpar = ""
        if loose_ver >= LooseVersion('4.1'):
            self.jpar = "-j {}".format(self.cfg['parallel'])

        if self.cfg['with_transiesta'] and loose_ver >= LooseVersion(
                '4.1-rc2'):
            # Error out if the user has specified options wrong
            raise EasyBuildError(
                "You cannot use `with_transiesta` for newer versions of Siesta."
                " Transiesta is always built-in to Siesta.")
        elif self.cfg['with_transiesta'] and loose_ver >= LooseVersion(
                '4.1-b3'):
            # Although for compatibility with older easyconfigs,
            # we may need to silently alter the user's whishes
            self.cfg['with_transiesta'] = False

        if self.cfg['with_libsiesta'] and loose_ver <= LooseVersion('4.1'):
            raise EasyBuildError(
                "libsiesta is an addition in an upcoming version after 4.1.")

        # enable OpenMP support if desired
        env_var_suff = ''
        if self.toolchain.options.get('openmp', None):
            env_var_suff = '_MT'

        # Other libraries
        scalapack = os.environ['LIBSCALAPACK' + env_var_suff]
        blacs = os.environ['LIBSCALAPACK' + env_var_suff]
        lapack = os.environ['LIBLAPACK' + env_var_suff]
        blas = os.environ['LIBBLAS' + env_var_suff]
        if get_software_root('imkl') or get_software_root('FFTW'):
            fftw = os.environ['LIBFFT' + env_var_suff]
        else:
            fftw = None

        regex_newlines = []
        regex_subs = [
            ('dc_lapack.a', ''),
            (r'^NETCDF_INTERFACE\s*=.*$', ''),
            ('libsiestaBLAS.a', ''),
            ('libsiestaLAPACK.a', ''),
            # Needed here to allow 4.1-b1 to be built with openmp
            (r"^(LDFLAGS\s*=).*$",
             r"\1 %s %s" % (os.environ['FCFLAGS'], os.environ['LDFLAGS'])),
        ]
        # Be really sure we are using -fPIC in case of libsiesta.a:
        if self.cfg['with_libsiesta']:
            regex_subs.append((r"^(FPPFLAGS\s*:?=.*)$", r"\1 -fPIC"))
            regex_newlines.append(
                (r"^(FPPFLAGS\s*:?=.*)$", r"\1\nCFLAGS+= -fPIC"))

        netcdff_loc = get_software_root('netCDF-Fortran')
        if netcdff_loc:
            # Needed for gfortran at least
            regex_newlines.append(
                (r"^(ARFLAGS_EXTRA\s*=.*)$",
                 r"\1\nNETCDF_INCFLAGS = -I%s/include" % netcdff_loc))

        if fftw:
            fft_inc, fft_lib = os.environ['FFT_INC_DIR'], os.environ[
                'FFT_LIB_DIR']
            fppflags = r"\1\nFFTW_INCFLAGS = -I%s\nFFTW_LIBS = -L%s %s" % (
                fft_inc, fft_lib, fftw)
            regex_newlines.append((r'(FPPFLAGS\s*:?=.*)$', fppflags))

        # Make a temp installdir during the build of the various parts
        mkdir(self.bindir)

        # change to actual build dir
        change_dir(self.obj_dir)

        # Populate start_dir with makefiles
        run_cmd(os.path.join(start_dir, 'Src', 'obj_setup.sh'),
                log_all=True,
                simple=True,
                log_output=True)

        if loose_ver < LooseVersion('4.1-b2'):
            # MPI?
            if self.toolchain.options.get('usempi', None):
                self.cfg.update('configopts', '--enable-mpi')

            # BLAS and LAPACK
            self.cfg.update('configopts', '--with-blas="%s"' % blas)
            self.cfg.update('configopts', '--with-lapack="%s"' % lapack)

            # ScaLAPACK (and BLACS)
            self.cfg.update('configopts', '--with-scalapack="%s"' % scalapack)
            self.cfg.update('configopts', '--with-blacs="%s"' % blacs)

            # NetCDF-Fortran
            if netcdff_loc:
                self.cfg.update('configopts', '--with-netcdf=-lnetcdff')

            # Configure is run in self.obj_dir, configure script is in ../Src
            super(EB_Siesta, self).configure_step(cmd_prefix='../Src/')

            regex_subs_Makefile = [
                (r'CFLAGS\)-c', r'CFLAGS) -c'),
            ]
            apply_regex_substitutions('Makefile', regex_subs_Makefile)

        else:  # there's no configure on newer versions

            if self.toolchain.comp_family() in [toolchain.INTELCOMP]:
                copy_file(os.path.join(self.obj_dir, 'intel.make'), arch_make)
            elif self.toolchain.comp_family() in [toolchain.GCC]:
                copy_file(os.path.join(self.obj_dir, 'gfortran.make'),
                          arch_make)
            else:
                raise EasyBuildError(
                    "There is currently no support for compiler: %s",
                    self.toolchain.comp_family())

            regex_subs.append((r"^(FPPFLAGS\s*:?=.*)$", r"\1 -DF2003"))

            if self.toolchain.options.get('usempi', None):
                regex_subs.extend([
                    (r"^(CC\s*=\s*).*$", r"\1%s" % os.environ['MPICC']),
                    (r"^(FC\s*=\s*).*$", r"\1%s" % os.environ['MPIF90']),
                    (r"^(FPPFLAGS\s*:?=.*)$", r"\1 -DMPI"),
                ])
                regex_newlines.append(
                    (r"^(FPPFLAGS\s*:?=.*)$",
                     r"\1\nMPI_INTERFACE = libmpi_f90.a\nMPI_INCLUDE = ."))
                complibs = scalapack
            else:
                complibs = lapack

            regex_subs.extend([
                (r"^(LIBS\s*=).*$", r"\1 %s" % complibs),
                # Needed for a couple of the utils
                (r"^(FFLAGS\s*=\s*).*$", r"\1 -fPIC %s" % os.environ['FCFLAGS']
                 ),
            ])
            regex_newlines.append(
                (r"^(COMP_LIBS\s*=.*)$", r"\1\nWXML = libwxml.a"))

            if netcdff_loc:
                regex_subs.extend([
                    (r"^(LIBS\s*=.*)$", r"\1 $(NETCDF_LIBS)"),
                    (r"^(FPPFLAGS\s*:?=.*)$",
                     r"\1 -DCDF -DNCDF -DNCDF_4 -DNCDF_PARALLEL $(NETCDF_INCLUDE)"
                     ),
                    (r"^(COMP_LIBS\s*=.*)$", r"\1 libncdf.a libfdict.a"),
                ])
                netcdf_lib_and_inc = "NETCDF_LIBS = -lnetcdff\nNETCDF_INCLUDE = -I%s/include" % netcdff_loc
                netcdf_lib_and_inc += "\nINCFLAGS = $(NETCDF_INCLUDE)"
                regex_newlines.append(
                    (r"^(COMP_LIBS\s*=.*)$", r"\1\n%s" % netcdf_lib_and_inc))
                self.pkg_conf_requires += ["netcdf-fortran"]

            xmlf90 = get_software_root('xmlf90')
            if xmlf90:
                regex_subs.append((r"^(XMLF90_ROOT\s*=).*$", r"\1%s" % xmlf90))
                self.pkg_conf_requires += ["xmlf90"]

            libpsml = get_software_root('libPSML')
            if libpsml:
                regex_subs.append(
                    (r"^(PSML_ROOT\s*=).*$.*", r"\1%s" % libpsml))
                self.pkg_conf_requires += ["libpsml"]

            libgridxc = get_software_root('libGridXC')
            if libgridxc:
                regex_subs.append(
                    (r"^(GRIDXC_ROOT\s*=).*$", r"\1%s" % libgridxc))
                self.pkg_conf_requires += ["libgridxc"]

            libxc = get_software_root('libxc')
            if libxc:
                regex_subs.append((r"^#(LIBXC_ROOT\s*=).*$", r"\1 %s" % libxc))
                self.pkg_conf_requires += ["libxc"]

            elpa = get_software_root('ELPA')
            if elpa:
                elpa_ver = get_software_version('ELPA')
                regex_subs.extend([
                    (r"^(FPPFLAGS\s*:?=.*)$", r"\1 -DSIESTA__ELPA"),
                    (r"^(FPPFLAGS\s*:?=.*)$",
                     r"\1 -I%s/include/elpa-%s/modules" % (elpa, elpa_ver)),
                    (r"^(LIBS\s*=.*)$", r"\1 -L%s/lib -lelpa" % elpa),
                ])
                self.pkg_conf_requires += ["elpa-%s" % elpa_ver]

            elsi = get_software_root('ELSI')
            if elsi:
                if not os.path.isfile(
                        os.path.join(elsi, 'lib',
                                     'libelsi.%s' % get_shared_lib_ext())):
                    raise EasyBuildError(
                        "This easyblock requires ELSI shared libraries instead of static"
                    )

                regex_subs.extend([
                    (r"^(FPPFLAGS\s*:?=.*)$", r"\1 -DSIESTA__ELSI"),
                    (r"^(FPPFLAGS\s*:?=.*)$", r"\1 -I%s/include" % elsi),
                    (r"^(LIBS\s*=.*)$",
                     r"\1 $(FFTW_LIBS) -L%s/lib -lelsi" % elsi),
                ])
                self.pkg_conf_requires += ["elsi"]

            metis = get_software_root('METIS')
            if metis:
                regex_subs.extend([
                    (r"^(FPPFLAGS\s*:?=.*)$", r"\1 -DSIESTA__METIS"),
                    (r"^(LIBS\s*=.*)$", r"\1 -L%s/lib -lmetis" % metis),
                ])
                self.pkg_conf_requires += ["metis"]

        apply_regex_substitutions(arch_make, regex_subs)

        # individually apply substitutions that add lines
        for regex_nl in regex_newlines:
            apply_regex_substitutions(arch_make, [regex_nl])

        allutils = self.cfg['with_utils']
        someutils = self.cfg['with_utils_names']
        self.expected_utils = []
        if allutils or someutils:
            # Make the utils
            change_dir(os.path.join(start_dir, 'Util'))

            # clean_all.sh might be missing executable bit...
            adjust_permissions('./clean_all.sh',
                               stat.S_IXUSR,
                               recursive=False,
                               relative=True)
            run_cmd('./clean_all.sh',
                    log_all=True,
                    simple=True,
                    log_output=True)

            if loose_ver >= LooseVersion('4.1'):
                regex_subs_TS = [
                    (r"^default:.*$", r""),
                    (r"^EXE\s*=.*$", r""),
                    (r"^(include\s*..ARCH_MAKE.*)$",
                     r"EXE=tshs2tshs\ndefault: $(EXE)\n\1"),
                    (r"^(INCFLAGS.*)$", r"\1 -I%s" % self.obj_dir),
                ]

                makefile = os.path.join(start_dir, 'Util', 'TS', 'tshs2tshs',
                                        'Makefile')
                apply_regex_substitutions(makefile, regex_subs_TS)

            # SUFFIX rules in wrong place
            regex_subs_suffix = [
                (r'^(\.SUFFIXES:.*)$', r''),
                (r'^(include\s*\$\(ARCH_MAKE\).*)$',
                 r'\1\n.SUFFIXES:\n.SUFFIXES: .c .f .F .o .a .f90 .F90'),
            ]
            makefile = os.path.join(start_dir, 'Util', 'Sockets', 'Makefile')
            apply_regex_substitutions(makefile, regex_subs_suffix)
            makefile = os.path.join(start_dir, 'Util', 'SiestaSubroutine',
                                    'SimpleTest', 'Src', 'Makefile')
            apply_regex_substitutions(makefile, regex_subs_suffix)

            regex_subs_UtilLDFLAGS = [
                (r'(\$\(FC\)\s*-o\s)', r'$(FC) %s %s -o ' %
                 (os.environ['FCFLAGS'], os.environ['LDFLAGS'])),
            ]
            makefile = os.path.join(start_dir, 'Util', 'Optimizer', 'Makefile')
            apply_regex_substitutions(makefile, regex_subs_UtilLDFLAGS)
            makefile = os.path.join(start_dir, 'Util', 'JobList', 'Src',
                                    'Makefile')
            apply_regex_substitutions(makefile, regex_subs_UtilLDFLAGS)

            # remove clean at the end of default target (!!)
            if loose_ver < LooseVersion('4.0.2') or loose_ver == LooseVersion(
                    '4.1-b3'):
                makefile = os.path.join(start_dir, 'Util', 'SiestaSubroutine',
                                        'SimpleTest', 'Src', 'Makefile')
                apply_regex_substitutions(
                    makefile,
                    [(r"simple_mpi_parallel clean", r"simple_mpi_parallel")])
                makefile = os.path.join(start_dir, 'Util', 'SiestaSubroutine',
                                        'ProtoNEB', 'Src', 'Makefile')
                apply_regex_substitutions(makefile,
                                          [(r"protoNEB clean", r"protoNEB")])

            # Now move all the built utils to the temp installdir
            expected_utils = [
                'CMLComp/ccViz',
                'Contrib/APostnikov/eig2bxsf',
                'Contrib/APostnikov/fmpdos',
                'Contrib/APostnikov/md2axsf',
                'Contrib/APostnikov/rho2xsf',
                'Contrib/APostnikov/vib2xsf',
                'Contrib/APostnikov/xv2xsf',
                'COOP/fat',
                'COOP/mprop',
                'Denchar/Src/denchar',
                'DensityMatrix/cdf2dm',
                'DensityMatrix/dm2cdf',
                'Eig2DOS/Eig2DOS',
                'Gen-basis/gen-basis',
                'Gen-basis/ioncat',
                'Gen-basis/ionplot.sh',
                'Grid/cdf2grid',
                'Grid/cdf2xsf',
                'Grid/cdf_laplacian',
                'Grid/g2c_ng',
                'Grid/grid2cdf',
                'Grid/grid2cube',
                'Grid/grid2val',
                'Grid/grid_rotate',
                'Helpers/get_chem_labels',
                'HSX/hs2hsx',
                'HSX/hsx2hs',
                'JobList/Src/countJobs',
                'JobList/Src/getResults',
                'JobList/Src/horizontal',
                'JobList/Src/runJobs',
                'Macroave/Src/macroave',
                'ON/lwf2cdf',
                'Optimizer/simplex',
                'Optimizer/swarm',
                'pdosxml/pdosxml',
                'Projections/orbmol_proj',
                'SiestaSubroutine/FmixMD/Src/driver',
                'SiestaSubroutine/FmixMD/Src/para',
                'SiestaSubroutine/FmixMD/Src/simple',
                'STM/ol-stm/Src/stm',
                'STM/simple-stm/plstm',
                'Vibra/Src/fcbuild',
                'Vibra/Src/vibra',
                'WFS/readwf',
                'WFS/readwfx',
                'WFS/wfs2wfsx',
                'WFS/wfsnc2wfsx',
                'WFS/wfsx2wfs',
            ]

            # skip broken utils in 4.1-MaX-1.0 release, hopefully will be fixed later
            if self.version != '4.1-MaX-1.0':
                expected_utils.extend([
                    'VCA/fractional',
                    'VCA/mixps',
                ])

            expected_utils.extend([
                'Bands/eigfat2plot',
            ])

            if self.version != '4.1-MaX-1.0':
                expected_utils.extend([
                    'SiestaSubroutine/ProtoNEB/Src/protoNEB',
                    'SiestaSubroutine/SimpleTest/Src/simple_pipes_parallel',
                    'SiestaSubroutine/SimpleTest/Src/simple_pipes_serial',
                    'SiestaSubroutine/SimpleTest/Src/simple_sockets_parallel',
                    'SiestaSubroutine/SimpleTest/Src/simple_sockets_serial',
                ])
            expected_utils.extend([
                'Sockets/f2fmaster',
                'Sockets/f2fslave',
            ])
            if self.toolchain.options.get('usempi', None):
                if self.version != '4.1-MaX-1.0':
                    expected_utils.extend([
                        'SiestaSubroutine/SimpleTest/Src/simple_mpi_parallel',
                        'SiestaSubroutine/SimpleTest/Src/simple_mpi_serial',
                    ])

            if loose_ver < LooseVersion('4.1'):
                expected_utils.append('WFS/info_wfsx')
                expected_utils.extend([
                    'COOP/dm_creator',
                    'TBTrans_rep/tbtrans',
                ])

            if loose_ver < LooseVersion('4.0.2'):
                expected_utils.extend([
                    'Bands/new.gnubands',
                ])
            else:
                expected_utils.extend([
                    'Bands/gnubands',
                ])
                # Need to revisit this when 4.1 is officialy released.
                # This is based on b1-b3 releases
                if loose_ver < LooseVersion('4.1'):
                    expected_utils.extend([
                        'Contour/grid1d',
                        'Contour/grid2d',
                        'Optical/optical',
                        'Optical/optical_input',
                        'sies2arc/sies2arc',
                    ])

            if loose_ver >= LooseVersion('4.1'):
                expected_utils.extend([
                    'DensityMatrix/dmbs2dm', 'DensityMatrix/dmUnblock',
                    'Grimme/fdf2grimme', 'SpPivot/pvtsp', 'TS/tselecs.sh',
                    'TS/ts2ts/ts2ts', 'TS/tbtrans', 'TS/phtrans'
                ])
                if self.version != '4.1-MaX-1.0':
                    expected_utils.extend([
                        'TS/tshs2tshs/tshs2tshs',
                    ])
            # Filter to selected utils
            if someutils:
                someutils_lower = set(util.lower() for util in someutils)
                expected_utils = filter(
                    lambda util: util.split("/")[0].lower() in someutils_lower,
                    expected_utils)
            self.expected_utils = expected_utils
Пример #44
0
    def configure_step(self):
        """
        Custom configure and build procedure for Siesta.
        - There are two main builds to do, siesta and transiesta
        - In addition there are multiple support tools to build
        """

        start_dir = self.cfg['start_dir']
        obj_dir = os.path.join(start_dir, 'Obj')
        arch_make = os.path.join(obj_dir, 'arch.make')
        bindir = os.path.join(start_dir, 'bin')

        par = ''
        if LooseVersion(self.version) >= LooseVersion('4.1'):
            par = '-j %s' % self.cfg['parallel']

        # enable OpenMP support if desired
        env_var_suff = ''
        if self.toolchain.options.get('openmp', None):
            env_var_suff = '_MT'

        scalapack = os.environ['LIBSCALAPACK' + env_var_suff]
        blacs = os.environ['LIBSCALAPACK' + env_var_suff]
        lapack = os.environ['LIBLAPACK' + env_var_suff]
        blas = os.environ['LIBBLAS' + env_var_suff]
        if get_software_root('imkl') or get_software_root('FFTW'):
            fftw = os.environ['LIBFFT' + env_var_suff]
        else:
            fftw = None

        regex_newlines = []
        regex_subs = [
            ('dc_lapack.a', ''),
            (r'^NETCDF_INTERFACE\s*=.*$', ''),
            ('libsiestaBLAS.a', ''),
            ('libsiestaLAPACK.a', ''),
            # Needed here to allow 4.1-b1 to be built with openmp
            (r"^(LDFLAGS\s*=).*$",
             r"\1 %s %s" % (os.environ['FCFLAGS'], os.environ['LDFLAGS'])),
        ]

        netcdff_loc = get_software_root('netCDF-Fortran')
        if netcdff_loc:
            # Needed for gfortran at least
            regex_newlines.append(
                (r"^(ARFLAGS_EXTRA\s*=.*)$",
                 r"\1\nNETCDF_INCFLAGS = -I%s/include" % netcdff_loc))

        if fftw:
            fft_inc, fft_lib = os.environ['FFT_INC_DIR'], os.environ[
                'FFT_LIB_DIR']
            fppflags = r"\1\nFFTW_INCFLAGS = -I%s\nFFTW_LIBS = -L%s %s" % (
                fft_inc, fft_lib, fftw)
            regex_newlines.append((r'(FPPFLAGS\s*=.*)$', fppflags))

        # Make a temp installdir during the build of the various parts
        mkdir(bindir)

        # change to actual build dir
        change_dir(obj_dir)

        # Populate start_dir with makefiles
        run_cmd(os.path.join(start_dir, 'Src', 'obj_setup.sh'),
                log_all=True,
                simple=True,
                log_output=True)

        if LooseVersion(self.version) < LooseVersion('4.1-b2'):
            # MPI?
            if self.toolchain.options.get('usempi', None):
                self.cfg.update('configopts', '--enable-mpi')

            # BLAS and LAPACK
            self.cfg.update('configopts', '--with-blas="%s"' % blas)
            self.cfg.update('configopts', '--with-lapack="%s"' % lapack)

            # ScaLAPACK (and BLACS)
            self.cfg.update('configopts', '--with-scalapack="%s"' % scalapack)
            self.cfg.update('configopts', '--with-blacs="%s"' % blacs)

            # NetCDF-Fortran
            if netcdff_loc:
                self.cfg.update('configopts', '--with-netcdf=-lnetcdff')

            # Configure is run in obj_dir, configure script is in ../Src
            super(EB_Siesta, self).configure_step(cmd_prefix='../Src/')

            if LooseVersion(self.version) > LooseVersion('4.0'):
                regex_subs_Makefile = [
                    (r'CFLAGS\)-c', r'CFLAGS) -c'),
                ]
                apply_regex_substitutions('Makefile', regex_subs_Makefile)

        else:  # there's no configure on newer versions

            if self.toolchain.comp_family() in [toolchain.INTELCOMP]:
                copy_file(os.path.join(obj_dir, 'intel.make'), arch_make)
            elif self.toolchain.comp_family() in [toolchain.GCC]:
                copy_file(os.path.join(obj_dir, 'gfortran.make'), arch_make)
            else:
                raise EasyBuildError(
                    "There is currently no support for compiler: %s",
                    self.toolchain.comp_family())

            if self.toolchain.options.get('usempi', None):
                regex_subs.extend([
                    (r"^(CC\s*=\s*).*$", r"\1%s" % os.environ['MPICC']),
                    (r"^(FC\s*=\s*).*$", r"\1%s" % os.environ['MPIF90']),
                    (r"^(FPPFLAGS\s*=.*)$", r"\1 -DMPI"),
                ])
                regex_newlines.append(
                    (r"^(FPPFLAGS\s*=.*)$",
                     r"\1\nMPI_INTERFACE = libmpi_f90.a\nMPI_INCLUDE = ."))
                complibs = scalapack
            else:
                complibs = lapack

            regex_subs.extend([
                (r"^(LIBS\s*=).*$", r"\1 %s" % complibs),
                # Needed for a couple of the utils
                (r"^(FFLAGS\s*=\s*).*$", r"\1 -fPIC %s" % os.environ['FCFLAGS']
                 ),
            ])
            regex_newlines.append(
                (r"^(COMP_LIBS\s*=.*)$", r"\1\nWXML = libwxml.a"))

            if netcdff_loc:
                regex_subs.extend([
                    (r"^(LIBS\s*=.*)$", r"\1 $(NETCDF_LIBS)"),
                    (r"^(FPPFLAGS\s*=.*)$", r"\1 -DCDF"),
                ])
                regex_newlines.append(
                    (r"^(COMP_LIBS\s*=.*)$", r"\1\nNETCDF_LIBS = -lnetcdff"))

        apply_regex_substitutions(arch_make, regex_subs)

        # individually apply substitutions that add lines
        for regex_nl in regex_newlines:
            apply_regex_substitutions(arch_make, [regex_nl])

        run_cmd('make %s' % par, log_all=True, simple=True, log_output=True)

        # Put binary in temporary install dir
        copy_file(os.path.join(obj_dir, 'siesta'), bindir)

        if self.cfg['with_utils']:
            # Make the utils
            change_dir(os.path.join(start_dir, 'Util'))

            # clean_all.sh might be missing executable bit...
            adjust_permissions('./clean_all.sh',
                               stat.S_IXUSR,
                               recursive=False,
                               relative=True)
            run_cmd('./clean_all.sh',
                    log_all=True,
                    simple=True,
                    log_output=True)

            if LooseVersion(self.version) >= LooseVersion('4.1'):
                regex_subs_TS = [
                    (r"^default:.*$", r""),
                    (r"^EXE\s*=.*$", r""),
                    (r"^(include\s*..ARCH_MAKE.*)$",
                     r"EXE=tshs2tshs\ndefault: $(EXE)\n\1"),
                    (r"^(INCFLAGS.*)$", r"\1 -I%s" % obj_dir),
                ]

                makefile = os.path.join(start_dir, 'Util', 'TS', 'tshs2tshs',
                                        'Makefile')
                apply_regex_substitutions(makefile, regex_subs_TS)

            # SUFFIX rules in wrong place
            regex_subs_suffix = [
                (r'^(\.SUFFIXES:.*)$', r''),
                (r'^(include\s*\$\(ARCH_MAKE\).*)$',
                 r'\1\n.SUFFIXES:\n.SUFFIXES: .c .f .F .o .a .f90 .F90'),
            ]
            makefile = os.path.join(start_dir, 'Util', 'Sockets', 'Makefile')
            apply_regex_substitutions(makefile, regex_subs_suffix)
            makefile = os.path.join(start_dir, 'Util', 'SiestaSubroutine',
                                    'SimpleTest', 'Src', 'Makefile')
            apply_regex_substitutions(makefile, regex_subs_suffix)

            regex_subs_UtilLDFLAGS = [
                (r'(\$\(FC\)\s*-o\s)', r'$(FC) %s %s -o ' %
                 (os.environ['FCFLAGS'], os.environ['LDFLAGS'])),
            ]
            makefile = os.path.join(start_dir, 'Util', 'Optimizer', 'Makefile')
            apply_regex_substitutions(makefile, regex_subs_UtilLDFLAGS)
            makefile = os.path.join(start_dir, 'Util', 'JobList', 'Src',
                                    'Makefile')
            apply_regex_substitutions(makefile, regex_subs_UtilLDFLAGS)

            # remove clean at the end of default target
            if self.version == '4.0.1' or self.version == '4.1-b3':
                makefile = os.path.join(start_dir, 'Util', 'SiestaSubroutine',
                                        'SimpleTest', 'Src', 'Makefile')
                apply_regex_substitutions(
                    makefile,
                    [(r"simple_mpi_parallel clean", r"simple_mpi_parallel")])
                makefile = os.path.join(start_dir, 'Util', 'SiestaSubroutine',
                                        'ProtoNEB', 'Src', 'Makefile')
                apply_regex_substitutions(makefile,
                                          [(r"protoNEB clean", r"protoNEB")])

            run_cmd('./build_all.sh',
                    log_all=True,
                    simple=True,
                    log_output=True)

            # Now move all the built utils to the temp installdir
            expected_utils = [
                'Bands/eigfat2plot',
                'CMLComp/ccViz',
                'Contrib/APostnikov/eig2bxsf',
                'Contrib/APostnikov/rho2xsf',
                'Contrib/APostnikov/vib2xsf',
                'Contrib/APostnikov/fmpdos',
                'Contrib/APostnikov/xv2xsf',
                'Contrib/APostnikov/md2axsf',
                'COOP/mprop',
                'COOP/fat',
                'Denchar/Src/denchar',
                'DensityMatrix/dm2cdf',
                'DensityMatrix/cdf2dm',
                'Eig2DOS/Eig2DOS',
                'Gen-basis/ioncat',
                'Gen-basis/gen-basis',
                'Grid/cdf2grid',
                'Grid/cdf_laplacian',
                'Grid/cdf2xsf',
                'Grid/grid2cube',
                'Grid/grid_rotate',
                'Grid/g2c_ng',
                'Grid/grid2cdf',
                'Grid/grid2val',
                'Helpers/get_chem_labels',
                'HSX/hs2hsx',
                'HSX/hsx2hs',
                'JobList/Src/getResults',
                'JobList/Src/countJobs',
                'JobList/Src/runJobs',
                'JobList/Src/horizontal',
                'Macroave/Src/macroave',
                'ON/lwf2cdf',
                'Optimizer/simplex',
                'Optimizer/swarm',
                'pdosxml/pdosxml',
                'Projections/orbmol_proj',
                'SiestaSubroutine/FmixMD/Src/driver',
                'SiestaSubroutine/FmixMD/Src/para',
                'SiestaSubroutine/FmixMD/Src/simple',
                'STM/simple-stm/plstm',
                'STM/ol-stm/Src/stm',
                'VCA/mixps',
                'VCA/fractional',
                'Vibra/Src/vibra',
                'Vibra/Src/fcbuild',
                'WFS/info_wfsx',
                'WFS/wfsx2wfs',
                'WFS/readwfx',
                'WFS/wfsnc2wfsx',
                'WFS/readwf',
                'WFS/wfs2wfsx',
            ]

            if LooseVersion(self.version) <= LooseVersion('4.0'):
                expected_utils.extend([
                    'Bands/new.gnubands',
                    'TBTrans/tbtrans',
                ])

            if LooseVersion(self.version) >= LooseVersion('4.0'):
                expected_utils.extend([
                    'SiestaSubroutine/ProtoNEB/Src/protoNEB',
                    'SiestaSubroutine/SimpleTest/Src/simple_pipes_parallel',
                    'SiestaSubroutine/SimpleTest/Src/simple_pipes_serial',
                    'Sockets/f2fmaster',
                    'Sockets/f2fslave',
                ])

            if LooseVersion(self.version) >= LooseVersion('4.1'):
                expected_utils.extend([
                    'Bands/gnubands',
                    'Grimme/fdf2grimme',
                    'SpPivot/pvtsp',
                    'TS/ts2ts/ts2ts',
                    'TS/tshs2tshs/tshs2tshs',
                    'TS/TBtrans/tbtrans',
                ])

            for util in expected_utils:
                copy_file(os.path.join(start_dir, 'Util', util), bindir)

        if self.cfg['with_transiesta']:
            # Build transiesta
            change_dir(obj_dir)

            run_cmd('make clean', log_all=True, simple=True, log_output=True)
            run_cmd('make %s transiesta' % par,
                    log_all=True,
                    simple=True,
                    log_output=True)

            copy_file(os.path.join(obj_dir, 'transiesta'), bindir)
Пример #45
0
    def install_step(self):
        """MATLAB install procedure using 'install' command."""

        src = os.path.join(self.cfg['start_dir'], 'install')

        # make sure install script is executable
        adjust_permissions(src, stat.S_IXUSR)

        if self.looseversion >= LooseVersion('2016b'):
            jdir = os.path.join(self.cfg['start_dir'], 'sys', 'java', 'jre', 'glnxa64', 'jre', 'bin')
            for perm_dir in [os.path.join(self.cfg['start_dir'], 'bin', 'glnxa64'), jdir]:
                adjust_permissions(perm_dir, stat.S_IXUSR)

        # make sure $DISPLAY is not defined, which may lead to (hard to trace) problems
        # this is a workaround for not being able to specify --nodisplay to the install scripts
        if 'DISPLAY' in os.environ:
            os.environ.pop('DISPLAY')

        if '_JAVA_OPTIONS' not in self.cfg['preinstallopts']:
            java_opts = 'export _JAVA_OPTIONS="%s" && ' % self.cfg['java_options']
            self.cfg['preinstallopts'] = java_opts + self.cfg['preinstallopts']
        if self.looseversion >= LooseVersion('2016b'):
            change_dir(self.builddir)

        # Build the cmd string
        cmdlist = [
            self.cfg['preinstallopts'],
            src,
            '-inputFile',
            self.configfile,
        ]
        if self.looseversion < LooseVersion('2020a'):
            # MATLAB installers < 2020a ignore $TMPDIR (always use /tmp) and might need a large tmpdir
            tmpdir = tempfile.mkdtemp()
            cmdlist.extend([
                '-v',
                '-tmpdir',
                tmpdir,
            ])
        cmdlist.append(self.cfg['installopts'])
        cmd = ' '.join(cmdlist)

        keys = self.cfg['key']
        if keys is None:
            keys = os.getenv('EB_MATLAB_KEY', '00000-00000-00000-00000-00000-00000-00000-00000-00000-00000')
        if isinstance(keys, string_type):
            keys = keys.split(',')

        # Compile the installation key regex outside of the loop
        regkey = re.compile(br"^(# )?fileInstallationKey=.*", re.M)

        # Run an install for each key
        for i, key in enumerate(keys):

            self.log.info('Installing MATLAB with key %s of %s', i + 1, len(keys))

            try:
                config = read_file(self.configfile, mode='rb')
                config = regkey.sub(b"fileInstallationKey=%s" % key.encode('utf-8'), config)
                write_file(self.configfile, config)

            except IOError as err:
                raise EasyBuildError("Failed to update config file %s: %s", self.configfile, err)

            (out, _) = run_cmd(cmd, log_all=True, simple=False)

            # check installer output for known signs of trouble
            patterns = [
                "Error: You have entered an invalid File Installation Key",
            ]

            for pattern in patterns:
                regex = re.compile(pattern, re.I)
                if regex.search(out):
                    raise EasyBuildError("Found error pattern '%s' in output of installation command '%s': %s",
                                         regex.pattern, cmd, out)
Пример #46
0
            tmpdir = tempfile.mkdtemp(suffix='-%s-%s' %
                                      (self.name, self.version))
            # remove created directory, since we're not going to use it as is
            os.rmdir(tmpdir)
            # avoid having '['/']' characters in build dir name, NWChem doesn't like that
            start_dir = tmpdir.replace('[', '_').replace(']', '_')
            mkdir(os.path.dirname(start_dir), parents=True)
            symlink(self.cfg['start_dir'], start_dir)
            change_dir(start_dir)
            self.cfg['start_dir'] = start_dir
        except OSError, err:
            raise EasyBuildError(
                "Failed to symlink build dir to a shorter path name: %s", err)

        # change to actual build dir
        change_dir('src')

        nwchem_modules = self.cfg['modules']

        # set required NWChem environment variables
        env.setvar('NWCHEM_TOP', self.cfg['start_dir'])
        if len(self.cfg['start_dir']) > 64:
            # workaround for:
            # "The directory name chosen for NWCHEM_TOP is longer than the maximum allowed value of 64 characters"
            # see also https://svn.pnl.gov/svn/nwchem/trunk/src/util/util_nwchem_srcdir.F
            self.setvar_env_makeopt('NWCHEM_LONG_PATHS', 'Y')

        env.setvar('NWCHEM_TARGET', self.cfg['target'])
        env.setvar('MSG_COMMS', self.cfg['msg_comms'])
        env.setvar('ARMCI_NETWORK', self.cfg['armci_network'])
        if self.cfg['armci_network'] in ["OPENIB"]:
Пример #47
0
    def configure_step(self, srcdir=None, builddir=None):
        """Configure build using cmake"""

        # Set the search paths for CMake
        tc_ipaths = self.toolchain.get_variable("CPPFLAGS", list)
        tc_lpaths = self.toolchain.get_variable("LDFLAGS", list)
        cpaths = os.getenv('CPATH', '').split(os.pathsep)
        lpaths = os.getenv('LD_LIBRARY_PATH', '').split(os.pathsep)
        include_paths = os.pathsep.join(nub(tc_ipaths + cpaths))
        library_paths = os.pathsep.join(nub(tc_lpaths + lpaths))
        setvar("CMAKE_INCLUDE_PATH", include_paths)
        setvar("CMAKE_LIBRARY_PATH", library_paths)

        if builddir is None and self.cfg.get('separate_build_dir', False):
            builddir = os.path.join(self.builddir, 'easybuild_obj')

        if builddir:
            mkdir(builddir, parents=True)
            change_dir(builddir)
            default_srcdir = self.cfg['start_dir']
        else:
            default_srcdir = '.'

        if srcdir is None:
            if self.cfg.get('srcdir', None) is not None:
                srcdir = self.cfg['srcdir']
            else:
                srcdir = default_srcdir

        options = ['-DCMAKE_INSTALL_PREFIX=%s' % self.installdir]
        env_to_options = {
            'CC': 'CMAKE_C_COMPILER',
            'CFLAGS': 'CMAKE_C_FLAGS',
            'CXX': 'CMAKE_CXX_COMPILER',
            'CXXFLAGS': 'CMAKE_CXX_FLAGS',
            'F90': 'CMAKE_Fortran_COMPILER',
            'FFLAGS': 'CMAKE_Fortran_FLAGS',
        }
        for env_name, option in env_to_options.items():
            value = os.getenv(env_name)
            if value is not None:
                if option.endswith('_COMPILER') and self.cfg.get(
                        'abs_path_compilers', False):
                    value = which(value)
                    self.log.info(
                        "Using absolute path to compiler command: %s", value)
                options.append("-D%s='%s'" % (option, value))

        if build_option('rpath'):
            # instruct CMake not to fiddle with RPATH when --rpath is used, since it will undo stuff on install...
            # https://github.com/LLNL/spack/blob/0f6a5cd38538e8969d11bd2167f11060b1f53b43/lib/spack/spack/build_environment.py#L416
            options.append('-DCMAKE_SKIP_RPATH=ON')

        # show what CMake is doing by default
        options.append('-DCMAKE_VERBOSE_MAKEFILE=ON')

        if not self.cfg.get('allow_system_boost', False):
            # don't pick up on system Boost if Boost is included as dependency
            # - specify Boost location via -DBOOST_ROOT
            # - instruct CMake to not search for Boost headers/libraries in other places
            # - disable search for Boost CMake package configuration file
            boost_root = get_software_root('Boost')
            if boost_root:
                options.extend([
                    '-DBOOST_ROOT=%s' % boost_root,
                    '-DBoost_NO_SYSTEM_PATHS=ON',
                    '-DBoost_NO_BOOST_CMAKE=ON',
                ])

        options_string = ' '.join(options)

        if self.cfg.get('configure_cmd') == DEFAULT_CONFIGURE_CMD:
            command = ' '.join([
                self.cfg['preconfigopts'], DEFAULT_CONFIGURE_CMD,
                options_string, self.cfg['configopts'], srcdir
            ])
        else:
            command = ' '.join([
                self.cfg['preconfigopts'],
                self.cfg.get('configure_cmd'), self.cfg['configopts']
            ])

        (out, _) = run_cmd(command, log_all=True, simple=False)

        return out
Пример #48
0
    def install_step(self):
        """Custom install procedure for RepeatMasker."""
        super(EB_RepeatMasker, self).install_step()

        # check for required dependencies
        perl_root = get_software_root('Perl')
        if perl_root:
            perl = os.path.join(perl_root, 'bin', 'perl')
        else:
            raise EasyBuildError("Missing required dependency: Perl")

        trf_root = get_software_root('TRF')
        if trf_root:
            trf = os.path.join(trf_root, 'trf')
        else:
            raise EasyBuildError("Missing required dependency: TRF")

        # determine which search engine to use
        # see also http://www.repeatmasker.org/RMDownload.html
        cand_search_engines = ['CrossMatch', 'RMBlast', 'WUBlast', 'HMMER']
        search_engine = None
        for dep in cand_search_engines:
            if get_software_root(dep):
                if search_engine is None:
                    search_engine = dep
                else:
                    raise EasyBuildError(
                        "Found multiple candidate search engines: %s and %s",
                        search_engine, dep)

        if search_engine is None:
            raise EasyBuildError(
                "No search engine found, one of these must be included as dependency: %s",
                ' '.join(cand_search_engines))

        change_dir(self.installdir)

        patch_perl_script_autoflush('configure')

        search_engine_map = {
            'CrossMatch': '1',
            'RMBlast': '2',
            'WUBlast': '3',
            'HMMER': '4',
        }
        search_engine_bindir = os.path.join(get_software_root(search_engine),
                                            'bin')

        cmd = "perl ./configure"
        qa = {
            '<PRESS ENTER TO CONTINUE>': '',
            # select search engine
            'Enter Selection:': search_engine_map[search_engine],
        }
        std_qa = {
            r'\*\*PERL PROGRAM\*\*\n([^*]*\n)+Enter path.*':
            perl,
            r'\*\*REPEATMASKER INSTALLATION DIRECTORY\*\*\n([^*]*\n)+Enter path.*':
            self.installdir,
            r'\*\*TRF PROGRAM\*\*\n([^*]*\n)+Enter path.*':
            trf,
            # search engine installation path (location of /bin subdirectory)
            # also enter 'Y' to confirm + '5' ("Done") to complete selection process for search engine
            r'\*\*.* INSTALLATION PATH\*\*\n([^*]*\n)+Enter path.*':
            search_engine_bindir + '\nY\n5',
        }
        run_cmd_qa(cmd,
                   qa,
                   std_qa=std_qa,
                   log_all=True,
                   simple=True,
                   log_ok=True)
Пример #49
0
    def configure_step(self):
        """
        Configure for GCC build:
        - prepare extra source dirs (GMP, MPFR, MPC, ...)
        - create obj dir to build in (GCC doesn't like to be built in source dir)
        - add configure and make options, according to .eb spec file
        - decide whether or not to do a staged build (which is required to enable PPL/CLooG support)
        - set platform_lib based on config.guess output
        """

        sysroot = build_option('sysroot')
        if sysroot:
            # based on changes made to GCC in Gentoo Prefix
            # https://gitweb.gentoo.org/repo/gentoo.git/tree/profiles/features/prefix/standalone/profile.bashrc

            # add --with-sysroot configure option, to instruct GCC to consider
            # value set for EasyBuild's --sysroot configuration option as the root filesystem of the operating system
            # (see https://gcc.gnu.org/install/configure.html)
            self.cfg.update('configopts', '--with-sysroot=%s' % sysroot)

            # avoid that --sysroot is passed to linker by patching value for SYSROOT_SPEC in gcc/gcc.c
            apply_regex_substitutions(os.path.join('gcc', 'gcc.c'),
                                      [('--sysroot=%R', '')])

            # prefix dynamic linkers with sysroot
            # this patches lines like:
            # #define GLIBC_DYNAMIC_LINKER64 "/lib64/ld-linux-x86-64.so.2"
            # for PowerPC (rs6000) we have to set DYNAMIC_LINKER_PREFIX to sysroot
            gcc_config_headers = glob.glob(
                os.path.join('gcc', 'config', '*', '*linux*.h'))
            regex_subs = [
                ('(_DYNAMIC_LINKER.*[":])/lib', r'\1%s/lib' % sysroot),
                ('(DYNAMIC_LINKER_PREFIX\\s+)""', r'\1"%s"' % sysroot),
            ]
            for gcc_config_header in gcc_config_headers:
                apply_regex_substitutions(gcc_config_header, regex_subs)

        # self.configopts will be reused in a 3-staged build,
        # configopts is only used in first configure
        self.configopts = self.cfg['configopts']

        # I) prepare extra source dirs, e.g. for GMP, MPFR, MPC (if required), so GCC can build them
        stage1_info = self.prep_extra_src_dirs("stage1")
        configopts = stage1_info['configopts']

        # II) update config options

        # enable specified language support
        if self.cfg['languages']:
            self.configopts += " --enable-languages=%s" % ','.join(
                self.cfg['languages'])

        if self.cfg['withnvptx']:
            if self.iter_idx == 0:
                self.configopts += " --without-cuda-driver"
                self.configopts += " --enable-offload-targets=nvptx-none"
            else:
                # register installed GCC as compiler to use nvptx
                path = "%s/bin:%s" % (self.installdir, os.getenv('PATH'))
                env.setvar('PATH', path)

                ld_lib_path = "%(dir)s/lib64:%(dir)s/lib:%(val)s" % {
                    'dir': self.installdir,
                    'val': os.getenv('LD_LIBRARY_PATH')
                }
                env.setvar('LD_LIBRARY_PATH', ld_lib_path)
                extra_source = {1: "nvptx-tools", 2: "newlib"}[self.iter_idx]
                extra_source_dirs = glob.glob(
                    os.path.join(self.builddir, '%s-*' % extra_source))
                if len(extra_source_dirs) != 1:
                    raise EasyBuildError("Failed to isolate %s source dir" %
                                         extra_source)
                if self.iter_idx == 1:
                    # compile nvptx-tools
                    change_dir(extra_source_dirs[0])
                else:  # self.iter_idx == 2
                    # compile nvptx target compiler
                    symlink(os.path.join(extra_source_dirs[0], 'newlib'),
                            'newlib')
                    self.create_dir("build-nvptx-gcc")
                    self.cfg.update('configopts', self.configopts)
                    self.cfg.update(
                        'configopts',
                        "--with-build-time-tools=%s/nvptx-none/bin" %
                        self.installdir)
                    self.cfg.update('configopts', "--target=nvptx-none")
                    host_type = self.determine_build_and_host_type()[1]
                    self.cfg.update(
                        'configopts',
                        "--enable-as-accelerator-for=%s" % host_type)
                    self.cfg.update('configopts', "--disable-sjlj-exceptions")
                    self.cfg.update('configopts',
                                    "--enable-newlib-io-long-long")
                    self.cfg['configure_cmd_prefix'] = '../'
                return super(EB_GCC, self).configure_step()

        # enable building of libiberty, if desired
        if self.cfg['withlibiberty']:
            self.configopts += " --enable-install-libiberty"

        # enable link-time-optimization (LTO) support, if desired
        if self.cfg['withlto']:
            self.configopts += " --enable-lto"
        else:
            self.configopts += " --disable-lto"

        # configure for a release build
        self.configopts += " --enable-checking=release "
        # enable multilib: allow both 32 and 64 bit
        if self.cfg['multilib']:
            glibc_32bit = [
                "glibc.i686",  # Fedora, RedHat-based
                "glibc.ppc",  # "" on Power
                "libc6-dev-i386",  # Debian-based
                "gcc-c++-32bit",  # OpenSuSE, SLES
            ]
            if not any([check_os_dependency(dep) for dep in glibc_32bit]):
                raise EasyBuildError(
                    "Using multilib requires 32-bit glibc (install one of %s, depending on your OS)",
                    ', '.join(glibc_32bit))
            self.configopts += " --enable-multilib --with-multilib-list=m32,m64"
        else:
            self.configopts += " --disable-multilib"
        # build both static and dynamic libraries (???)
        self.configopts += " --enable-shared=yes --enable-static=yes "

        # use POSIX threads
        self.configopts += " --enable-threads=posix "

        # enable plugin support
        self.configopts += " --enable-plugins "

        # use GOLD as default linker
        if self.cfg['use_gold_linker']:
            self.configopts += " --enable-gold=default --enable-ld --with-plugin-ld=ld.gold"
        else:
            self.configopts += " --enable-gold --enable-ld=default"

        # enable bootstrap build for self-containment (unless for staged build)
        if not self.stagedbuild:
            configopts += " --enable-bootstrap"
        else:
            configopts += " --disable-bootstrap"

        if self.stagedbuild:
            #
            # STAGE 1: configure GCC build that will be used to build PPL/CLooG
            #
            self.log.info(
                "Starting with stage 1 of 3-staged build to enable CLooG and/or PPL, ISL support..."
            )
            self.stage1installdir = os.path.join(self.builddir,
                                                 'GCC_stage1_eb')
            configopts += " --prefix=%(p)s --with-local-prefix=%(p)s" % {
                'p': self.stage1installdir
            }

        else:
            # unstaged build, so just run standard configure/make/make install
            # set prefixes
            self.log.info("Performing regular GCC build...")
            configopts += " --prefix=%(p)s --with-local-prefix=%(p)s" % {
                'p': self.installdir
            }

        # prioritize lib over lib{64,32,x32} for all architectures by overriding default MULTILIB_OSDIRNAMES config
        # only do this when multilib is not enabled
        if self.cfg['prefer_lib_subdir'] and not self.cfg['multilib']:
            cfgfile = 'gcc/config/i386/t-linux64'
            multilib_osdirnames = "MULTILIB_OSDIRNAMES = m64=../lib:../lib64 m32=../lib:../lib32 mx32=../lib:../libx32"
            self.log.info("Patching MULTILIB_OSDIRNAMES in %s with '%s'",
                          cfgfile, multilib_osdirnames)
            write_file(cfgfile, multilib_osdirnames, append=True)
        elif self.cfg['multilib']:
            self.log.info(
                "Not patching MULTILIB_OSDIRNAMES since use of --enable-multilib is enabled"
            )

        # III) create obj dir to build in, and change to it
        #     GCC doesn't like to be built in the source dir
        if self.stagedbuild:
            objdir = self.create_dir("stage1_obj")
            self.stage1prefix = objdir
        else:
            objdir = self.create_dir("obj")

        # IV) actual configure, but not on default path
        cmd = "../configure  %s %s" % (self.configopts, configopts)

        self.run_configure_cmd(cmd)

        self.disable_lto_mpfr_old_gcc(objdir)
Пример #50
0
 def test_step(self):
     """Custom test step for Siesta."""
     change_dir(os.path.join(self.cfg['start_dir'], 'Obj', 'Tests'))
     super(EB_Siesta, self).test_step()
 def build_step(self):
     """Build fastStructure using setup.py."""
     cwd = change_dir('vars')
     run_cmd("python setup.py build_ext --inplace")
     change_dir(cwd)
     run_cmd("python setup.py build_ext --inplace")
Пример #52
0
    def build_step(self):
        """All the build steps of Siesta"""
        allutils = self.cfg['with_utils']
        someutils = self.cfg['with_utils_names']
        start_dir = self.cfg['start_dir']
        loose_ver = LooseVersion(self.version)

        # Main executable
        change_dir(self.obj_dir)
        run_cmd('make %s' % self.jpar,
                log_all=True,
                simple=True,
                log_output=True)
        copy_file(os.path.join(self.obj_dir, 'siesta'), self.bindir)

        # Utilities
        change_dir(os.path.join(start_dir, 'Util'))
        if allutils or someutils:
            # build_all.sh might be missing executable bit...
            adjust_permissions('./build_all.sh',
                               stat.S_IXUSR,
                               recursive=False,
                               relative=True)
            run_cmd('./build_all.sh %s' % self.jpar,
                    log_all=True,
                    simple=True,
                    log_output=True)

            # Rerun the tbtrans dir to get phtrans
            if loose_ver >= LooseVersion('4.1'):
                copy_file('TS/TBtrans/tbtrans', 'TS/tbtrans')
                change_dir(os.path.join(start_dir, 'Util', 'TS', 'TBtrans'))
                run_cmd('make clean',
                        log_all=True,
                        simple=True,
                        log_output=True)
                run_cmd('make phtrans %s' % self.jpar,
                        log_all=True,
                        simple=True,
                        log_output=True)
                change_dir(os.path.join(start_dir, 'Util'))
                copy_file('TS/TBtrans/phtrans', 'TS/phtrans')

            # Copy over utils to the temporary dir
            for util in self.expected_utils:
                copy_file(os.path.join(start_dir, 'Util', util), self.bindir)

        # Transiesta executable (older siestas)
        if self.cfg['with_transiesta']:
            change_dir(self.obj_dir)

            ts_clean_target = 'clean'
            if loose_ver >= LooseVersion('4.1-b4'):
                ts_clean_target += '-transiesta'

            run_cmd('make %s' % ts_clean_target,
                    log_all=True,
                    simple=True,
                    log_output=True)
            run_cmd('make %s transiesta' % self.jpar,
                    log_all=True,
                    simple=True,
                    log_output=True)

            copy_file(os.path.join(self.obj_dir, 'transiesta'), self.bindir)
Пример #53
0
    def test_step(self):
        """Run regression test."""

        if self.cfg['runtest']:

            # we need to specify location of 'data' directory in *build* dir,
            # since we've configured CP2K to look into the installation directory
            # (where 'data' will be copied to in install step)
            setvar('CP2K_DATA_DIR', os.path.join(self.cfg['start_dir'], 'data'))

            if not build_option('mpi_tests'):
                self.log.info("Skipping testing of CP2K since MPI testing is disabled")
                return

            if self.cfg['omp_num_threads']:
                setvar('OMP_NUM_THREADS', self.cfg['omp_num_threads'])

            # change to root of build dir
            change_dir(self.builddir)

            # use regression test reference output if available
            # try and find an unpacked directory that starts with 'LAST-'
            regtest_refdir = None
            for d in os.listdir(self.builddir):
                if d.startswith("LAST-"):
                    regtest_refdir = d
                    break

            # location of do_regtest script
            cfg_fn = "cp2k_regtest.cfg"
            regtest_script = os.path.join(self.cfg['start_dir'], 'tools', 'regtesting', 'do_regtest')
            regtest_cmd = "%s -nosvn -nobuild -config %s" % (regtest_script, cfg_fn)
            # older version of CP2K
            if not os.path.exists(regtest_script):
                regtest_script = os.path.join(self.cfg['start_dir'], 'tools', 'do_regtest')
                regtest_cmd = "%s -nocvs -quick -nocompile -config %s" % (regtest_script, cfg_fn)

            # patch do_regtest so that reference output is used
            if regtest_refdir:
                self.log.info("Using reference output available in %s" % regtest_refdir)
                try:
                    for line in fileinput.input(regtest_script, inplace=1, backup='.orig.refout'):
                        line = re.sub(r"^(dir_last\s*=\${dir_base})/.*$", r"\1/%s" % regtest_refdir, line)
                        sys.stdout.write(line)
                except IOError as err:
                    raise EasyBuildError("Failed to modify '%s': %s", regtest_script, err)

            else:
                self.log.info("No reference output found for regression test, just continuing without it...")

            # prefer using 4 cores, since some tests require/prefer square (n^2) numbers or powers of 2 (2^n)
            test_core_cnt = min(self.cfg['parallel'], 4)
            if get_avail_core_count() < test_core_cnt:
                raise EasyBuildError("Cannot run MPI tests as not enough cores (< %s) are available", test_core_cnt)
            else:
                self.log.info("Using %s cores for the MPI tests" % test_core_cnt)

            # configure regression test
            cfg_txt = '\n'.join([
                'FORT_C_NAME="%(f90)s"',
                'dir_base=%(base)s',
                'cp2k_version=%(cp2k_version)s',
                'dir_triplet=%(triplet)s',
                'export ARCH=${dir_triplet}',
                'cp2k_dir=%(cp2k_dir)s',
                'leakcheck="YES"',
                'maxtasks=%(maxtasks)s',
                'cp2k_run_prefix="%(mpicmd_prefix)s"',
            ]) % {
                'f90': os.getenv('F90'),
                'base': os.path.dirname(os.path.normpath(self.cfg['start_dir'])),
                'cp2k_version': self.cfg['type'],
                'triplet': self.typearch,
                'cp2k_dir': os.path.basename(os.path.normpath(self.cfg['start_dir'])),
                'maxtasks': self.cfg['maxtasks'],
                'mpicmd_prefix': self.toolchain.mpi_cmd_for('', test_core_cnt),
            }

            write_file(cfg_fn, cfg_txt)
            self.log.debug("Contents of %s: %s" % (cfg_fn, cfg_txt))

            # run regression test
            (regtest_output, ec) = run_cmd(regtest_cmd, log_all=True, simple=False, log_output=True)

            if ec == 0:
                self.log.info("Regression test output:\n%s" % regtest_output)
            else:
                raise EasyBuildError("Regression test failed (non-zero exit code): %s", regtest_output)

            # pattern to search for regression test summary
            re_pattern = "number\s+of\s+%s\s+tests\s+(?P<cnt>[0-9]+)"

            # find total number of tests
            regexp = re.compile(re_pattern % "", re.M | re.I)
            res = regexp.search(regtest_output)
            tot_cnt = None
            if res:
                tot_cnt = int(res.group('cnt'))
            else:
                raise EasyBuildError("Finding total number of tests in regression test summary failed")

            # function to report on regtest results
            def test_report(test_result):
                """Report on tests with given result."""

                postmsg = ''

                test_result = test_result.upper()
                regexp = re.compile(re_pattern % test_result, re.M | re.I)

                cnt = None
                res = regexp.search(regtest_output)
                if not res:
                    raise EasyBuildError("Finding number of %s tests in regression test summary failed",
                                         test_result.lower())
                else:
                    cnt = int(res.group('cnt'))

                logmsg = "Regression test reported %s / %s %s tests"
                logmsg_values = (cnt, tot_cnt, test_result.lower())

                # failed tests indicate problem with installation
                # wrong tests are only an issue when there are excessively many
                if (test_result == "FAILED" and cnt > 0) or (test_result == "WRONG" and (cnt / tot_cnt) > 0.1):
                    if self.cfg['ignore_regtest_fails']:
                        self.log.warning(logmsg, *logmsg_values)
                        self.log.info("Ignoring failures in regression test, as requested.")
                    else:
                        raise EasyBuildError(logmsg, *logmsg_values)
                elif test_result == "CORRECT" or cnt == 0:
                    self.log.info(logmsg, *logmsg_values)
                else:
                    self.log.warning(logmsg, *logmsg_values)

                return postmsg

            # number of failed/wrong tests, will report error if count is positive
            self.postmsg += test_report("FAILED")
            self.postmsg += test_report("WRONG")

            # number of new tests, will be high if a non-suitable regtest reference was used
            # will report error if count is positive (is that what we want?)
            self.postmsg += test_report("NEW")

            # number of correct tests: just report
            test_report("CORRECT")
Пример #54
0
    def test_changed_files_pull_request(self):
        """Specific checks only done for the (easyconfig) files that were changed in a pull request."""

        # $TRAVIS_PULL_REQUEST should be a PR number, otherwise we're not running tests for a PR
        if re.match('^[0-9]+$', os.environ.get('TRAVIS_PULL_REQUEST',
                                               '(none)')):

            # target branch should be anything other than 'master';
            # usually is 'develop', but could also be a release branch like '3.7.x'
            travis_branch = os.environ.get('TRAVIS_BRANCH', None)
            if travis_branch and travis_branch != 'master':

                if not self.parsed_easyconfigs:
                    self.process_all_easyconfigs()

                # relocate to top-level directory of repository to run 'git diff' command
                top_dir = os.path.dirname(
                    os.path.dirname(get_paths_for('easyconfigs')[0]))
                cwd = change_dir(top_dir)

                # get list of changed easyconfigs
                cmd = "git diff --name-only --diff-filter=AM %s...HEAD" % travis_branch
                out, ec = run_cmd(cmd, simple=False)
                changed_ecs_filenames = [
                    os.path.basename(f) for f in out.strip().split('\n')
                    if f.endswith('.eb')
                ]
                print("\nList of changed easyconfig files in this PR: %s" %
                      '\n'.join(changed_ecs_filenames))

                change_dir(cwd)

                # grab parsed easyconfigs for changed easyconfig files
                changed_ecs = []
                for ec_fn in changed_ecs_filenames:
                    match = None
                    for ec in self.parsed_easyconfigs:
                        if os.path.basename(ec['spec']) == ec_fn:
                            match = ec['ec']
                            break

                    if match:
                        changed_ecs.append(match)
                    else:
                        # if no easyconfig is found, it's possible some archived easyconfigs were touched in the PR...
                        # so as a last resort, try to find the easyconfig file in __archive__
                        easyconfigs_path = get_paths_for("easyconfigs")[0]
                        specs = glob.glob('%s/__archive__/*/*/%s' %
                                          (easyconfigs_path, ec_fn))
                        if len(specs) == 1:
                            ec = process_easyconfig(specs[0])[0]
                            changed_ecs.append(ec['ec'])
                        else:
                            error_msg = "Failed to find parsed easyconfig for %s" % ec_fn
                            error_msg += " (and could not isolate it in easyconfigs archive either)"
                            self.assertTrue(False, error_msg)

                # run checks on changed easyconfigs
                self.check_sha256_checksums(changed_ecs)
                self.check_python_packages(changed_ecs)
                self.check_sanity_check_paths(changed_ecs)
Пример #55
0
    def test_cases_step(self):
        """Run provided list of test cases, or provided examples is no test cases were specified."""

        # run all examples if no test cases were specified
        # order and grouping is important for some of these tests (e.g., [o]h3tr*
        # Some of the examples are deleted
        # missing md parameter files: dna.nw, mache.nw, 18c6NaK.nw, membrane.nw, sdm.nw
        # method not implemented (unknown thory) or keyword not found: triplet.nw, C2H6.nw, pspw_MgO.nw, ccsdt_polar_small.nw, CG.nw
        # no convergence: diamond.nw
        # Too much memory required: ccsd_polar_big.nw
        if type(self.cfg['tests']) is bool:
            examples = [
                ('qmd', ['3carbo_dft.nw', '3carbo.nw', 'h2o_scf.nw']),
                ('pspw',
                 ['C2.nw', 'C6.nw', 'Carbene.nw', 'Na16.nw', 'NaCl.nw']),
                ('tcepolar', ['ccsd_polar_small.nw']),
                ('dirdyvtst/h3', ['h3tr1.nw', 'h3tr2.nw']),
                ('dirdyvtst/h3', ['h3tr3.nw']), ('dirdyvtst/h3', ['h3tr4.nw']),
                ('dirdyvtst/h3', ['h3tr5.nw']),
                ('dirdyvtst/oh3', ['oh3tr1.nw', 'oh3tr2.nw']),
                ('dirdyvtst/oh3', ['oh3tr3.nw']),
                ('dirdyvtst/oh3', ['oh3tr4.nw']),
                ('dirdyvtst/oh3', ['oh3tr5.nw']),
                ('pspw/session1', [
                    'band.nw', 'si4.linear.nw', 'si4.rhombus.nw',
                    'S2-drift.nw', 'silicon.nw', 'S2.nw', 'si4.rectangle.nw'
                ]), ('md/myo', ['myo.nw']), ('md/nak', ['NaK.nw']),
                ('md/crown', ['crown.nw']), ('md/hrc', ['hrc.nw']),
                ('md/benzene', ['benzene.nw'])
            ]

            self.cfg['tests'] = [(os.path.join(self.examples_dir, d), l)
                                 for (d, l) in examples]
            self.log.info("List of examples to be run as test cases: %s" %
                          self.cfg['tests'])

        try:
            # symlink $HOME/.nwchemrc to local copy of default nwchemrc
            default_nwchemrc = os.path.join(self.installdir, 'data',
                                            'default.nwchemrc')

            # make a local copy of the default .nwchemrc file at a fixed path, so we can symlink to it
            # this makes sure that multiple parallel builds can reuse the same symlink, even for different builds
            # there is apparently no way to point NWChem to a particular config file other that $HOME/.nwchemrc
            try:
                local_nwchemrc_dir = os.path.dirname(self.local_nwchemrc)
                if not os.path.exists(local_nwchemrc_dir):
                    os.makedirs(local_nwchemrc_dir)
                shutil.copy2(default_nwchemrc, self.local_nwchemrc)

                # only try to create symlink if it's not there yet
                # we've verified earlier that the symlink is what we expect it to be if it's there
                if not os.path.islink(self.home_nwchemrc):
                    symlink(self.local_nwchemrc, self.home_nwchemrc)
            except OSError, err:
                raise EasyBuildError("Failed to symlink %s to %s: %s",
                                     self.home_nwchemrc, self.local_nwchemrc,
                                     err)

            # run tests, keep track of fail ratio
            cwd = os.getcwd()

            fail = 0.0
            tot = 0.0

            success_regexp = re.compile("Total times\s*cpu:.*wall:.*")

            test_cases_logfn = os.path.join(self.installdir, config.log_path(),
                                            'test_cases.log')
            test_cases_log = open(test_cases_logfn, "w")

            for (testdir, tests) in self.cfg['tests']:

                # run test in a temporary dir
                tmpdir = tempfile.mkdtemp(prefix='nwchem_test_')
                change_dir(tmpdir)

                # copy all files in test case dir
                for item in os.listdir(testdir):
                    test_file = os.path.join(testdir, item)
                    if os.path.isfile(test_file):
                        self.log.debug("Copying %s to %s" %
                                       (test_file, tmpdir))
                        shutil.copy2(test_file, tmpdir)

                # run tests
                for testx in tests:
                    cmd = "nwchem %s" % testx
                    msg = "Running test '%s' (from %s) in %s..." % (
                        cmd, testdir, tmpdir)
                    self.log.info(msg)
                    test_cases_log.write("\n%s\n" % msg)
                    (out, ec) = run_cmd(cmd,
                                        simple=False,
                                        log_all=False,
                                        log_ok=False,
                                        log_output=True)

                    # check exit code and output
                    if ec:
                        msg = "Test %s failed (exit code: %s)!" % (testx, ec)
                        self.log.warning(msg)
                        test_cases_log.write('FAIL: %s' % msg)
                        fail += 1
                    else:
                        if success_regexp.search(out):
                            msg = "Test %s successful!" % testx
                            self.log.info(msg)
                            test_cases_log.write('SUCCESS: %s' % msg)
                        else:
                            msg = "No 'Total times' found for test %s (but exit code is %s)!" % (
                                testx, ec)
                            self.log.warning(msg)
                            test_cases_log.write('FAIL: %s' % msg)
                            fail += 1

                    test_cases_log.write("\nOUTPUT:\n\n%s\n\n" % out)

                    tot += 1

                # go back
                change_dir(cwd)
                shutil.rmtree(tmpdir)

            fail_ratio = fail / tot
            fail_pcnt = fail_ratio * 100

            msg = "%d of %d tests failed (%s%%)!" % (fail, tot, fail_pcnt)
            self.log.info(msg)
            test_cases_log.write('\n\nSUMMARY: %s' % msg)

            test_cases_log.close()
            self.log.info("Log for test cases saved at %s" % test_cases_logfn)

            if fail_ratio > self.cfg['max_fail_ratio']:
                max_fail_pcnt = self.cfg['max_fail_ratio'] * 100
                raise EasyBuildError(
                    "Over %s%% of test cases failed, assuming broken build.",
                    max_fail_pcnt)

            # cleanup
            try:
                shutil.rmtree(self.examples_dir)
                shutil.rmtree(local_nwchemrc_dir)
            except OSError, err:
                raise EasyBuildError("Cleanup failed: %s", err)
Пример #56
0
    def configure_step(self):
        """Run CMake for stage 1 Clang."""

        self.llvm_obj_dir_stage1 = os.path.join(self.builddir, 'llvm.obj.1')
        if self.cfg['bootstrap']:
            self.llvm_obj_dir_stage2 = os.path.join(self.builddir,
                                                    'llvm.obj.2')
            self.llvm_obj_dir_stage3 = os.path.join(self.builddir,
                                                    'llvm.obj.3')

        if LooseVersion(self.version) >= LooseVersion('3.3'):
            disable_san_tests = False
            # all sanitizer tests will fail when there's a limit on the vmem
            # this is ugly but I haven't found a cleaner way so far
            (vmemlim, ec) = run_cmd("ulimit -v", regexp=False)
            if not vmemlim.startswith("unlimited"):
                disable_san_tests = True
                self.log.warn(
                    "There is a virtual memory limit set of %s KB. The tests of the "
                    "sanitizers will be disabled as they need unlimited virtual "
                    "memory unless --strict=error is used." % vmemlim.strip())

            # the same goes for unlimited stacksize
            (stacklim, ec) = run_cmd("ulimit -s", regexp=False)
            if stacklim.startswith("unlimited"):
                disable_san_tests = True
                self.log.warn(
                    "The stacksize limit is set to unlimited. This causes the ThreadSanitizer "
                    "to fail. The sanitizers tests will be disabled unless --strict=error is used."
                )

            if (disable_san_tests or self.cfg['skip_sanitizer_tests']
                ) and build_option('strict') != run.ERROR:
                self.log.debug("Disabling the sanitizer tests")
                self.disable_sanitizer_tests()

        # Create and enter build directory.
        mkdir(self.llvm_obj_dir_stage1)
        change_dir(self.llvm_obj_dir_stage1)

        # GCC and Clang are installed in different prefixes and Clang will not
        # find the GCC installation on its own.
        # First try with GCCcore, as GCC built on top of GCCcore is just a wrapper for GCCcore and binutils,
        # instead of a full-fledge compiler
        gcc_prefix = get_software_root('GCCcore')

        # If that doesn't work, try with GCC
        if gcc_prefix is None:
            gcc_prefix = get_software_root('GCC')

        # If that doesn't work either, print error and exit
        if gcc_prefix is None:
            raise EasyBuildError("Can't find GCC or GCCcore to use")

        self.cfg.update('configopts', "-DGCC_INSTALL_PREFIX='%s'" % gcc_prefix)
        self.log.debug("Using %s as GCC_INSTALL_PREFIX", gcc_prefix)

        # Configure some default options
        self.cfg.update('configopts', "-DCMAKE_BUILD_TYPE=Release")
        if self.cfg["enable_rtti"]:
            self.cfg.update('configopts', '-DLLVM_REQUIRES_RTTI=ON')
            self.cfg.update('configopts', '-DLLVM_ENABLE_RTTI=ON')
            self.cfg.update('configopts', '-DLLVM_ENABLE_EH=ON')
        if self.cfg["default_openmp_runtime"]:
            self.cfg.update(
                'configopts', '-DCLANG_DEFAULT_OPENMP_RUNTIME=%s' %
                self.cfg["default_openmp_runtime"])

        if self.cfg['assertions']:
            self.cfg.update('configopts', "-DLLVM_ENABLE_ASSERTIONS=ON")
        else:
            self.cfg.update('configopts', "-DLLVM_ENABLE_ASSERTIONS=OFF")

        if self.cfg["usepolly"]:
            self.cfg.update('configopts', "-DLINK_POLLY_INTO_TOOLS=ON")

        build_targets = self.cfg['build_targets']
        if build_targets is None:
            arch = get_cpu_architecture()
            default_targets = DEFAULT_TARGETS_MAP.get(arch, None)
            if default_targets:
                # If CUDA is included as a dep, add NVPTX as a target (could also support AMDGPU if we knew how)
                if get_software_root("CUDA"):
                    default_targets += ["NVPTX"]
                self.cfg['build_targets'] = build_targets = default_targets
                self.log.debug(
                    "Using %s as default build targets for CPU/GPU architecture %s.",
                    default_targets, arch)
            else:
                raise EasyBuildError(
                    "No default build targets defined for CPU architecture %s.",
                    arch)

        unknown_targets = [
            target for target in build_targets if target not in CLANG_TARGETS
        ]

        if unknown_targets:
            raise EasyBuildError(
                "Some of the chosen build targets (%s) are not in %s.",
                ', '.join(unknown_targets), ', '.join(CLANG_TARGETS))

        if LooseVersion(self.version) < LooseVersion(
                '3.4') and "R600" in build_targets:
            raise EasyBuildError(
                "Build target R600 not supported in < Clang-3.4")

        if LooseVersion(self.version) > LooseVersion(
                '3.3') and "MBlaze" in build_targets:
            raise EasyBuildError(
                "Build target MBlaze is not supported anymore in > Clang-3.3")

        if self.cfg["usepolly"] and "NVPTX" in build_targets:
            self.cfg.update('configopts', "-DPOLLY_ENABLE_GPGPU_CODEGEN=ON")

        self.cfg.update(
            'configopts',
            '-DLLVM_TARGETS_TO_BUILD="%s"' % ';'.join(build_targets))

        if self.cfg['parallel']:
            self.make_parallel_opts = "-j %s" % self.cfg['parallel']

        self.log.info("Configuring")
        super(EB_Clang, self).configure_step(srcdir=self.llvm_src_dir)
Пример #57
0
class EB_NWChem(ConfigureMake):
    """Support for building/installing NWChem."""
    def __init__(self, *args, **kwargs):
        """Initialisation of custom class variables for NWChem."""
        super(EB_NWChem, self).__init__(*args, **kwargs)

        self.test_cases_dir = None
        # path for symlink to local copy of default .nwchemrc, required by NWChem at runtime
        # this path is hardcoded by NWChem, and there's no way to make it use a config file at another path...
        self.home_nwchemrc = os.path.join(os.getenv('HOME'), '.nwchemrc')
        # temporary directory that is common across multiple nodes in a cluster;
        # we can't rely on tempfile.gettempdir() since that follows $TMPDIR,
        # which is typically set to a unique directory in jobs;
        # use /tmp as default, allow customisation via $EB_NWCHEM_TMPDIR environment variable
        common_tmp_dir = os.getenv('EB_NWCHEM_TMPDIR', '/tmp')
        # local NWChem .nwchemrc config file, to which symlink will point
        # using this approach, multiple parallel builds (on different nodes) can use the same symlink
        self.local_nwchemrc = os.path.join(common_tmp_dir, os.getenv('USER'),
                                           'easybuild_nwchem', '.nwchemrc')

    @staticmethod
    def extra_options():
        """Custom easyconfig parameters for NWChem."""

        extra_vars = {
            'target': ["LINUX64", "Target platform", CUSTOM],
            # possible options for ARMCI_NETWORK on LINUX64 with Infiniband:
            # OPENIB, MPI-MT, MPI-SPAWN, MELLANOX
            'armci_network': ["OPENIB", "Network protocol to use", CUSTOM],
            'msg_comms': ["MPI", "Type of message communication", CUSTOM],
            'modules': ["all", "NWChem modules to build", CUSTOM],
            'lib_defines':
            ["", "Additional defines for C preprocessor", CUSTOM],
            'tests': [True, "Run example test cases", CUSTOM],
            # lots of tests fail, so allow a certain fail ratio
            'max_fail_ratio': [0.5, "Maximum test case fail ratio", CUSTOM],
        }
        return ConfigureMake.extra_options(extra_vars)

    def setvar_env_makeopt(self, name, value):
        """Set a variable both in the environment and a an option to make."""
        env.setvar(name, value)
        self.cfg.update('buildopts', "%s='%s'" % (name, value))

    def configure_step(self):
        """Custom configuration procedure for NWChem."""

        # check whether a (valid) symlink to a .nwchemrc config file exists (via a dummy file if necessary)
        # fail early if the link is not what's we expect, since running the test cases will likely fail in this case
        try:
            if os.path.exists(self.home_nwchemrc) or os.path.islink(
                    self.home_nwchemrc):
                # create a dummy file to check symlink
                if not os.path.exists(self.local_nwchemrc):
                    write_file(self.local_nwchemrc, 'dummy')

                self.log.debug(
                    "Contents of %s: %s", os.path.dirname(self.local_nwchemrc),
                    os.listdir(os.path.dirname(self.local_nwchemrc)))

                if os.path.islink(self.home_nwchemrc):
                    home_nwchemrc_target = os.readlink(self.home_nwchemrc)
                    if home_nwchemrc_target != self.local_nwchemrc:
                        raise EasyBuildError(
                            "Found %s, but it's not a symlink to %s. "
                            "Please (re)move %s while installing NWChem; it can be restored later",
                            self.home_nwchemrc, self.local_nwchemrc,
                            self.home_nwchemrc)
                # ok to remove, we'll recreate it anyway
                remove_file(self.local_nwchemrc)
        except (IOError, OSError), err:
            raise EasyBuildError("Failed to validate %s symlink: %s",
                                 self.home_nwchemrc, err)

        # building NWChem in a long path name is an issue, so let's try to make sure we have a short one
        try:
            # NWChem insists that version is in name of build dir
            tmpdir = tempfile.mkdtemp(suffix='-%s-%s' %
                                      (self.name, self.version))
            # remove created directory, since we're not going to use it as is
            os.rmdir(tmpdir)
            # avoid having '['/']' characters in build dir name, NWChem doesn't like that
            start_dir = tmpdir.replace('[', '_').replace(']', '_')
            mkdir(os.path.dirname(start_dir), parents=True)
            symlink(self.cfg['start_dir'], start_dir)
            change_dir(start_dir)
            self.cfg['start_dir'] = start_dir
        except OSError, err:
            raise EasyBuildError(
                "Failed to symlink build dir to a shorter path name: %s", err)
    def test_step(self):
        """Run WIEN2k test benchmarks. """
        def run_wien2k_test(cmd_arg):
            """Run a WPS command, and check for success."""

            cmd = "x_lapw lapw1 %s" % cmd_arg
            (out, _) = run_cmd(cmd, log_all=True, simple=False)

            re_success = re.compile(r"LAPW1\s+END")
            if not re_success.search(out):
                raise EasyBuildError(
                    "Test '%s' in %s failed (pattern '%s' not found)?", cmd,
                    os.getcwd(), re_success.pattern)
            else:
                self.log.info("Test '%s' seems to have run successfully: %s" %
                              (cmd, out))

        if self.cfg['runtest']:
            if not self.cfg['testdata']:
                raise EasyBuildError("List of URLs for testdata not provided.")

            # prepend $PATH with install directory, define $SCRATCH which is used by the tests
            env.setvar('PATH', "%s:%s" % (self.installdir, os.environ['PATH']))
            try:
                cwd = os.getcwd()

                # create temporary directory
                tmpdir = tempfile.mkdtemp()
                os.chdir(tmpdir)
                self.log.info("Running tests in %s" % tmpdir)

                scratch = os.path.join(tmpdir, 'scratch')
                mkdir(scratch)
                env.setvar('SCRATCH', scratch)

                # download data
                testdata_paths = {}
                for testdata in self.cfg['testdata']:
                    td_path = self.obtain_file(testdata)
                    if not td_path:
                        raise EasyBuildError(
                            "Downloading file from %s failed?", testdata)
                    testdata_paths.update(
                        {os.path.basename(testdata): td_path})

                self.log.debug('testdata_paths: %s' % testdata_paths)

                # unpack serial benchmark
                serial_test_name = "test_case"
                srcdir = extract_file(testdata_paths['%s.tar.gz' %
                                                     serial_test_name],
                                      tmpdir,
                                      change_into_dir=False)
                change_dir(srcdir)

                # run serial benchmark
                os.chdir(os.path.join(tmpdir, serial_test_name))
                run_wien2k_test("-c")

                # unpack parallel benchmark (in serial benchmark dir)
                parallel_test_name = "mpi-benchmark"
                srcdir = extract_file(testdata_paths['%s.tar.gz' %
                                                     parallel_test_name],
                                      tmpdir,
                                      change_into_dir=False)
                change_dir(srcdir)

                # run parallel benchmark
                os.chdir(os.path.join(tmpdir, serial_test_name))
                run_wien2k_test("-p")

                os.chdir(cwd)
                remove_dir(tmpdir)

            except OSError as err:
                raise EasyBuildError(
                    "Failed to run WIEN2k benchmark tests: %s", err)

            self.log.debug("Current dir: %s" % os.getcwd())
Пример #59
0
    def configure_step(self):
        """Run CMake for stage 1 Clang."""

        if all(dep['name'] != 'ncurses' for dep in self.cfg['dependencies']):
            print_warning(
                'Clang requires ncurses to run, did you forgot to add it to dependencies?'
            )

        self.llvm_obj_dir_stage1 = os.path.join(self.builddir, 'llvm.obj.1')
        if self.cfg['bootstrap']:
            self.llvm_obj_dir_stage2 = os.path.join(self.builddir,
                                                    'llvm.obj.2')
            self.llvm_obj_dir_stage3 = os.path.join(self.builddir,
                                                    'llvm.obj.3')

        if LooseVersion(self.version) >= LooseVersion('3.3'):
            disable_san_tests = False
            # all sanitizer tests will fail when there's a limit on the vmem
            # this is ugly but I haven't found a cleaner way so far
            (vmemlim, ec) = run_cmd("ulimit -v", regexp=False)
            if not vmemlim.startswith("unlimited"):
                disable_san_tests = True
                self.log.warn(
                    "There is a virtual memory limit set of %s KB. The tests of the "
                    "sanitizers will be disabled as they need unlimited virtual "
                    "memory unless --strict=error is used." % vmemlim.strip())

            # the same goes for unlimited stacksize
            (stacklim, ec) = run_cmd("ulimit -s", regexp=False)
            if stacklim.startswith("unlimited"):
                disable_san_tests = True
                self.log.warn(
                    "The stacksize limit is set to unlimited. This causes the ThreadSanitizer "
                    "to fail. The sanitizers tests will be disabled unless --strict=error is used."
                )

            if (disable_san_tests or self.cfg['skip_sanitizer_tests']
                ) and build_option('strict') != run.ERROR:
                self.log.debug("Disabling the sanitizer tests")
                self.disable_sanitizer_tests()

        # Create and enter build directory.
        mkdir(self.llvm_obj_dir_stage1)
        change_dir(self.llvm_obj_dir_stage1)

        # GCC and Clang are installed in different prefixes and Clang will not
        # find the GCC installation on its own.
        # First try with GCCcore, as GCC built on top of GCCcore is just a wrapper for GCCcore and binutils,
        # instead of a full-fledge compiler
        gcc_prefix = get_software_root('GCCcore')

        # If that doesn't work, try with GCC
        if gcc_prefix is None:
            gcc_prefix = get_software_root('GCC')

        # If that doesn't work either, print error and exit
        if gcc_prefix is None:
            raise EasyBuildError("Can't find GCC or GCCcore to use")

        self.cfg.update('configopts', "-DGCC_INSTALL_PREFIX='%s'" % gcc_prefix)
        self.log.debug("Using %s as GCC_INSTALL_PREFIX", gcc_prefix)

        # Configure some default options
        if self.cfg["enable_rtti"]:
            self.cfg.update('configopts', '-DLLVM_REQUIRES_RTTI=ON')
            self.cfg.update('configopts', '-DLLVM_ENABLE_RTTI=ON')
            self.cfg.update('configopts', '-DLLVM_ENABLE_EH=ON')
        if self.cfg["default_openmp_runtime"]:
            self.cfg.update(
                'configopts', '-DCLANG_DEFAULT_OPENMP_RUNTIME=%s' %
                self.cfg["default_openmp_runtime"])

        if self.cfg['assertions']:
            self.cfg.update('configopts', "-DLLVM_ENABLE_ASSERTIONS=ON")
        else:
            self.cfg.update('configopts', "-DLLVM_ENABLE_ASSERTIONS=OFF")

        if self.cfg["usepolly"]:
            self.cfg.update('configopts', "-DLINK_POLLY_INTO_TOOLS=ON")

        # If Z3 is included as a dep, enable support in static analyzer (if enabled)
        if self.cfg["static_analyzer"] and LooseVersion(
                self.version) >= LooseVersion('9.0.0'):
            z3_root = get_software_root("Z3")
            if z3_root:
                self.cfg.update('configopts', "-DLLVM_ENABLE_Z3_SOLVER=ON")
                self.cfg.update('configopts',
                                "-DLLVM_Z3_INSTALL_DIR=%s" % z3_root)

        build_targets = self.cfg['build_targets']
        if build_targets is None:
            arch = get_cpu_architecture()
            default_targets = DEFAULT_TARGETS_MAP.get(arch, None)
            if default_targets:
                # If CUDA is included as a dep, add NVPTX as a target (could also support AMDGPU if we knew how)
                if get_software_root("CUDA"):
                    default_targets += ["NVPTX"]
                self.cfg['build_targets'] = build_targets = default_targets
                self.log.debug(
                    "Using %s as default build targets for CPU/GPU architecture %s.",
                    default_targets, arch)
            else:
                raise EasyBuildError(
                    "No default build targets defined for CPU architecture %s.",
                    arch)

        unknown_targets = [
            target for target in build_targets if target not in CLANG_TARGETS
        ]

        if unknown_targets:
            raise EasyBuildError(
                "Some of the chosen build targets (%s) are not in %s.",
                ', '.join(unknown_targets), ', '.join(CLANG_TARGETS))

        if LooseVersion(self.version) < LooseVersion(
                '3.4') and "R600" in build_targets:
            raise EasyBuildError(
                "Build target R600 not supported in < Clang-3.4")

        if LooseVersion(self.version) > LooseVersion(
                '3.3') and "MBlaze" in build_targets:
            raise EasyBuildError(
                "Build target MBlaze is not supported anymore in > Clang-3.3")

        if self.cfg["usepolly"] and "NVPTX" in build_targets:
            self.cfg.update('configopts', "-DPOLLY_ENABLE_GPGPU_CODEGEN=ON")

        self.cfg.update(
            'configopts',
            '-DLLVM_TARGETS_TO_BUILD="%s"' % ';'.join(build_targets))

        if self.cfg['parallel']:
            self.make_parallel_opts = "-j %s" % self.cfg['parallel']

        # If hwloc is included as a dep, use it in OpenMP runtime for affinity
        hwloc_root = get_software_root('hwloc')
        if hwloc_root:
            self.cfg.update('configopts', '-DLIBOMP_USE_HWLOC=ON')
            self.cfg.update('configopts',
                            '-DLIBOMP_HWLOC_INSTALL_DIR=%s' % hwloc_root)

        # If 'NVPTX' is in the build targets we assume the user would like OpenMP offload support as well
        if 'NVPTX' in build_targets:
            # list of CUDA compute capabilities to use can be specifed in two ways (where (2) overrules (1)):
            # (1) in the easyconfig file, via the custom cuda_compute_capabilities;
            # (2) in the EasyBuild configuration, via --cuda-compute-capabilities configuration option;
            ec_cuda_cc = self.cfg['cuda_compute_capabilities']
            cfg_cuda_cc = build_option('cuda_compute_capabilities')
            cuda_cc = cfg_cuda_cc or ec_cuda_cc or []
            if not cuda_cc:
                raise EasyBuildError(
                    "Can't build Clang with CUDA support "
                    "without specifying 'cuda-compute-capabilities'")
            default_cc = self.cfg['default_cuda_capability'] or min(cuda_cc)
            if not self.cfg['default_cuda_capability']:
                print_warning(
                    "No default CUDA capability defined! "
                    "Using '%s' taken as minimum from 'cuda_compute_capabilities'"
                    % default_cc)
            cuda_cc = [cc.replace('.', '') for cc in cuda_cc]
            default_cc = default_cc.replace('.', '')
            self.cfg.update(
                'configopts',
                '-DCLANG_OPENMP_NVPTX_DEFAULT_ARCH=sm_%s' % default_cc)
            self.cfg.update(
                'configopts', '-DLIBOMPTARGET_NVPTX_COMPUTE_CAPABILITIES=%s' %
                ','.join(cuda_cc))
        # If we don't want to build with CUDA (not in dependencies) trick CMakes FindCUDA module into not finding it by
        # using the environment variable which is used as-is and later checked for a falsy value when determining
        # whether CUDA was found
        if not get_software_root('CUDA'):
            setvar('CUDA_NVCC_EXECUTABLE', 'IGNORE')

        self.log.info("Configuring")
        super(EB_Clang, self).configure_step(srcdir=self.llvm_src_dir)
Пример #60
0
    def install_step(self):
        """Symlink target OpenSSL installation"""

        if all(self.system_ssl[key]
               for key in ('bin', 'engines', 'include', 'lib')):
            # note: symlink to individual files, not directories,
            # since directory symlinks get resolved easily...

            # link OpenSSL libraries in system
            lib64_dir = os.path.join(self.installdir, 'lib64')
            lib64_engines_dir = os.path.join(
                lib64_dir, os.path.basename(self.system_ssl['engines']))
            mkdir(lib64_engines_dir, parents=True)

            # link existing known libraries
            ssl_syslibdir = os.path.dirname(self.system_ssl['lib'])
            lib_files = [
                os.path.join(ssl_syslibdir, x) for x in self.target_ssl_libs
            ]
            for libso in lib_files:
                symlink(libso, os.path.join(lib64_dir,
                                            os.path.basename(libso)))

            # link engines library files
            engine_lib_pattern = [
                os.path.join(self.system_ssl['engines'], '*')
            ]
            for engine_lib in expand_glob_paths(engine_lib_pattern):
                symlink(
                    engine_lib,
                    os.path.join(lib64_engines_dir,
                                 os.path.basename(engine_lib)))

            # relative symlink for unversioned libraries
            cwd = change_dir(lib64_dir)
            for libso in self.target_ssl_libs:
                unversioned_lib = '%s.%s' % (libso.split('.')[0],
                                             get_shared_lib_ext())
                symlink(libso, unversioned_lib, use_abspath_source=False)
            change_dir(cwd)

            # link OpenSSL headers in system
            include_dir = os.path.join(self.installdir, 'include',
                                       self.name.lower())
            mkdir(include_dir, parents=True)
            include_pattern = [os.path.join(self.system_ssl['include'], '*')]
            for header_file in expand_glob_paths(include_pattern):
                symlink(
                    header_file,
                    os.path.join(include_dir, os.path.basename(header_file)))

            # link OpenSSL binary in system
            bin_dir = os.path.join(self.installdir, 'bin')
            mkdir(bin_dir)
            symlink(self.system_ssl['bin'],
                    os.path.join(bin_dir, self.name.lower()))

        else:
            # install OpenSSL component
            print_warning(
                "Not all OpenSSL components found, falling back to OpenSSL in EasyBuild!"
            )
            super(EB_OpenSSL_wrapper, self).install_step()