Ejemplo n.º 1
0
    def test_apply_regex_substitutions(self):
        """Test apply_regex_substitutions function."""
        testfile = os.path.join(self.test_prefix, 'test.txt')
        testtxt = '\n'.join([
            "CC = gcc",
            "CFLAGS = -O3 -g",
            "FC = gfortran",
            "FFLAGS = -O3 -g -ffixed-form",
        ])
        ft.write_file(testfile, testtxt)

        regex_subs = [
            (r"^(CC)\s*=\s*.*$", r"\1 = ${CC}"),
            (r"^(FC\s*=\s*).*$", r"\1${FC}"),
            (r"^(.FLAGS)\s*=\s*-O3\s-g(.*)$", r"\1 = -O2\2"),
        ]
        ft.apply_regex_substitutions(testfile, regex_subs)

        expected_testtxt = '\n'.join([
            "CC = ${CC}",
            "CFLAGS = -O2",
            "FC = ${FC}",
            "FFLAGS = -O2 -ffixed-form",
        ])
        new_testtxt = ft.read_file(testfile)
        self.assertEqual(new_testtxt, expected_testtxt)
Ejemplo n.º 2
0
    def configure_step(self):
        """Configure SCOTCH build: locate the template makefile, copy it to a general Makefile.inc and patch it."""

        # pick template makefile
        comp_fam = self.toolchain.comp_family()
        if comp_fam == toolchain.INTELCOMP:  # @UndefinedVariable
            makefilename = 'Makefile.inc.x86-64_pc_linux2.icc'
        elif comp_fam == toolchain.GCC:  # @UndefinedVariable
            makefilename = 'Makefile.inc.x86-64_pc_linux2'
        else:
            raise EasyBuildError("Unknown compiler family used: %s", comp_fam)

        srcdir = os.path.join(self.cfg['start_dir'], 'src')

        # create Makefile.inc
        makefile_inc = os.path.join(srcdir, 'Makefile.inc')
        copy_file(os.path.join(srcdir, 'Make.inc', makefilename), makefile_inc)
        self.log.debug("Successfully copied Makefile.inc to src dir: %s", makefile_inc)

        # the default behaviour of these makefiles is still wrong
        # e.g., compiler settings, and we need -lpthread
        regex_subs = [
            (r"^CCS\s*=.*$", "CCS\t= $(CC)"),
            (r"^CCP\s*=.*$", "CCP\t= $(MPICC)"),
            (r"^CCD\s*=.*$", "CCD\t= $(MPICC)"),
            # append -lpthread to LDFLAGS
            (r"^LDFLAGS\s*=(?P<ldflags>.*$)", "LDFLAGS\t=\g<ldflags> -lpthread"),
        ]
        apply_regex_substitutions(makefile_inc, regex_subs)

        # change to src dir for building
        change_dir(srcdir)
Ejemplo n.º 3
0
 def find_build_subdir(pattern):
     """Changes to the sub directory that matches the given pattern"""
     subdir = glob.glob(os.path.join(self.builddir, pattern))
     if subdir:
         change_dir(subdir[0])
         apply_regex_substitutions('makefile', makefiles_fixes)
         super(EB_BWISE, self).build_step()
         return subdir[0]
     else:
         raise EasyBuildError("Could not find a subdirectory matching the pattern %s", pattern)
Ejemplo n.º 4
0
    def configure_step(self):
        """Custom configuration procedure for binutils: statically link to zlib, configure options."""

        # determine list of 'lib' directories to use rpath for;
        # this should 'harden' the resulting binutils to bootstrap GCC (no trouble when other libstdc++ is build etc)
        libdirs = []
        for libdir in ['/usr/lib', '/usr/lib64', '/usr/lib/x86_64-linux-gnu/']:
            # also consider /lib, /lib64
            alt_libdir = libdir.replace('usr/', '')

            if os.path.exists(libdir):
                libdirs.append(libdir)
                if os.path.exists(alt_libdir) and not os.path.samefile(libdir, alt_libdir):
                    libdirs.append(alt_libdir)

            elif os.path.exists(alt_libdir):
                libdirs.append(alt_libdir)

        libs = ' '.join('-Wl,-rpath=%s' % libdir for libdir in libdirs)

        # statically link to zlib if it is a (build) dependency
        zlibroot = get_software_root('zlib')
        if zlibroot:
            self.cfg.update('configopts', '--with-system-zlib')
            libz_path = os.path.join(zlibroot, get_software_libdir('zlib'), 'libz.a')

            # for recent binutils versions, we need to override ZLIB in Makefile.in of components
            if LooseVersion(self.version) >= LooseVersion('2.26'):
                regex_subs = [
                    (r"^(ZLIB\s*=\s*).*$", r"\1%s" % libz_path),
                    (r"^(ZLIBINC\s*=\s*).*$", r"\1-I%s" % os.path.join(zlibroot, 'include')),
                ]
                for makefile in glob.glob(os.path.join(self.cfg['start_dir'], '*', 'Makefile.in')):
                    apply_regex_substitutions(makefile, regex_subs)

            # for older versions, injecting the path to the static libz library into $LIBS works
            else:
                libs += ' ' + libz_path

        self.cfg.update('preconfigopts', "env LIBS='%s'" % libs)
        self.cfg.update('prebuildopts', "env LIBS='%s'" % libs)

        # use correct sysroot, to make sure 'ld' also considers system libraries
        self.cfg.update('configopts', '--with-sysroot=/')

        # build both static and shared libraries for recent binutils versions (default is only static)
        if LooseVersion(self.version) > LooseVersion('2.24'):
            self.cfg.update('configopts', "--enable-shared --enable-static")

        # enable gold linker with plugin support, use ld as default linker (for recent versions of binutils)
        if LooseVersion(self.version) > LooseVersion('2.24'):
            self.cfg.update('configopts', "--enable-gold --enable-plugins --enable-ld=default")

        # complete configuration with configure_method of parent
        super(EB_binutils, self).configure_step()
Ejemplo n.º 5
0
    def install_step(self):
        """
        Custom install procedure for Molpro.
        For source install:
        * put license token in place in $installdir/.token
        * run 'make tuning'
        * install with 'make install'
        For binary install:
        * run interactive installer
        """

        if self.cfg['precompiled_binaries']:
            """Build by running the command with the inputfiles"""
            try:
                os.chdir(self.cfg['start_dir'])
            except OSError as err:
                raise EasyBuildError("Failed to move (back) to %s: %s", self.cfg['start_dir'], err)

            for src in self.src:
                if LooseVersion(self.version) >= LooseVersion('2015'):
                    # install dir must be non-existent
                    shutil.rmtree(self.installdir)
                    cmd = "./{0} -batch -prefix {1}".format(src['name'], self.installdir)
                else:
                    cmd = "./{0} -batch -instbin {1}/bin -instlib {1}/lib".format(src['name'], self.installdir)

                # questions whose text must match exactly as asked
                qa = {
                    "Please give your username for accessing molpro\n": '',
                    "Please give your password for accessing molpro\n": '',
                }
                # questions whose text may be matched as a regular expression
                stdqa = {
                    r"Enter installation directory for executable files \[.*\]\n": os.path.join(self.installdir, 'bin'),
                    r"Enter installation directory for library files \[.*\]\n": os.path.join(self.installdir, 'lib'),
                    r"directory .* does not exist, try to create [Y]/n\n": '',
                }
                run_cmd_qa(cmd, qa=qa, std_qa=stdqa, log_all=True, simple=True)
        else:
            if os.path.isfile(self.license_token):
                run_cmd("make tuning")

            super(EB_Molpro, self).install_step()

            # put original LAUNCHER definition back in place in bin/molpro that got installed,
            # since the value used during installation point to temporary files
            molpro_path = os.path.join(self.full_prefix, 'bin', 'molpro')
            apply_regex_substitutions(molpro_path, [(r"^(LAUNCHER\s*=\s*).*$", r"\1 %s" % self.orig_launcher)])

        if self.cleanup_token_symlink:
            try:
                os.remove(self.license_token)
                self.log.debug("Symlink to license token %s removed", self.license_token)
            except OSError as err:
                raise EasyBuildError("Failed to remove %s: %s", self.license_token, err)
    def configure_step(self):
        """Custom configuration procedure for Bazel."""

        binutils_root = get_software_root('binutils')
        gcc_root = get_software_root('GCCcore') or get_software_root('GCC')
        gcc_ver = get_software_version('GCCcore') or get_software_version('GCC')

        # only patch Bazel scripts if binutils & GCC installation prefix could be determined
        if binutils_root and gcc_root:

            res = glob.glob(os.path.join(gcc_root, 'lib', 'gcc', '*', gcc_ver, 'include'))
            if res and len(res) == 1:
                gcc_lib_inc = res[0]
            else:
                raise EasyBuildError("Failed to pinpoint location of GCC include files: %s", res)

            gcc_lib_inc_fixed = os.path.join(os.path.dirname(gcc_lib_inc), 'include-fixed')
            if not os.path.exists(gcc_lib_inc_fixed):
                raise EasyBuildError("Derived directory %s does not exist", gcc_lib_inc_fixed)

            gcc_cplusplus_inc = os.path.join(gcc_root, 'include', 'c++', gcc_ver)
            if not os.path.exists(gcc_cplusplus_inc):
                raise EasyBuildError("Derived directory %s does not exist", gcc_cplusplus_inc)

            # replace hardcoded paths in CROSSTOOL
            regex_subs = [
                (r'-B/usr/bin', '-B%s' % os.path.join(binutils_root, 'bin')),
                (r'(cxx_builtin_include_directory:.*)/usr/lib/gcc', r'\1%s' % gcc_lib_inc),
                (r'(cxx_builtin_include_directory:.*)/usr/local/include', r'\1%s' % gcc_lib_inc_fixed),
                (r'(cxx_builtin_include_directory:.*)/usr/include', r'\1%s' % gcc_cplusplus_inc),
            ]
            for tool in ['ar', 'cpp', 'dwp', 'gcc', 'ld']:
                path = which(tool)
                if path:
                    regex_subs.append((os.path.join('/usr', 'bin', tool), path))
                else:
                    raise EasyBuildError("Failed to determine path to '%s'", tool)

            apply_regex_substitutions(os.path.join('tools', 'cpp', 'CROSSTOOL'), regex_subs)

            # replace hardcoded paths in (unix_)cc_configure.bzl
            regex_subs = [
                (r'-B/usr/bin', '-B%s' % os.path.join(binutils_root, 'bin')),
                (r'"/usr/bin', '"' + os.path.join(binutils_root, 'bin')),
            ]
            for conf_bzl in ['cc_configure.bzl', 'unix_cc_configure.bzl']:
                filepath = os.path.join('tools', 'cpp', conf_bzl)
                if os.path.exists(filepath):
                    apply_regex_substitutions(filepath, regex_subs)
        else:
            self.log.info("Not patching Bazel build scripts, installation prefix for binutils/GCC not found")

        # enable building in parallel
        env.setvar('EXTRA_BAZEL_ARGS', '--jobs=%d' % self.cfg['parallel'])
    def configure_step(self):
        """Configure Qt using interactive `configure` script."""

        self.cfg.update('configopts', '-release')

        platform = None
        comp_fam = self.toolchain.comp_family()
        if self.cfg['platform']:
            platform = self.cfg['platform']
        # if no platform is specified, try to derive it based on compiler in toolchain
        elif comp_fam in [toolchain.GCC]:  #@UndefinedVariable
            platform = 'linux-g++-64'
        elif comp_fam in [toolchain.INTELCOMP]:  #@UndefinedVariable
            if LooseVersion(self.version) >= LooseVersion('4'):
                platform = 'linux-icc-64'
            else:
                platform = 'linux-icc'
                # fix -fPIC flag (-KPIC is not correct for recent Intel compilers)
                qmake_conf = os.path.join('mkspecs', platform, 'qmake.conf')
                apply_regex_substitutions(qmake_conf, [('-KPIC', '-fPIC')])
                
        if platform:
            self.cfg.update('configopts', "-platform %s" % platform)
        else:
            raise EasyBuildError("Don't know which platform to set based on compiler family.")

        # configure Qt such that xmlpatterns is also installed
        # -xmlpatterns is not a known configure option for Qt 5.x, but there xmlpatterns support is enabled by default
        if LooseVersion(self.version) >= LooseVersion('4') and LooseVersion(self.version) < LooseVersion('5'):
            self.cfg.update('configopts', '-xmlpatterns')

        cmd = "%s ./configure -prefix %s %s" % (self.cfg['preconfigopts'], self.installdir, self.cfg['configopts'])
        qa = {
            "Type 'o' if you want to use the Open Source Edition.": 'o',
            "Do you accept the terms of either license?": 'yes',
            "Which edition of Qt do you want to use?": 'o',
        }
        no_qa = [
            "for .*pro",
            r"%s.*" % os.getenv('CXX', '').replace('+', '\\+'),  # need to escape + in 'g++'
            "Reading .*",
            "WARNING .*",
            "Project MESSAGE:.*",
            "rm -f .*",
            'Creating qmake...',
            'Checking for .*...',
        ]
        run_cmd_qa(cmd, qa, no_qa=no_qa, log_all=True, simple=True, maxhits=120)
Ejemplo n.º 8
0
    def configure_step(self, *args, **kwargs):
        """Configure build using 'make config' (only for recent versions (>= v5))."""

        if LooseVersion(self.version) >= LooseVersion("5"):

            if build_option('rpath'):
                # patch Makefile to tell CMake not to wipe the RPATHs we inject...
                apply_regex_substitutions('Makefile', [(r'^(CONFIG_FLAGS\s*=\s*)', r'\1 -DCMAKE_SKIP_RPATH=ON ')])

            cmd = "make %s config prefix=%s" % (self.cfg['configopts'], self.installdir)
            run_cmd(cmd, log_all=True, simple=True)

            if 'shared=1' in self.cfg['configopts']:
                self.lib_exts.append('so')
            else:
                self.lib_exts.append('a')
    def post_install_step(self):
        """Custom post install step for IMPI, fix broken env scripts after moving installed files."""
        super(EB_impi, self).post_install_step()

        impiver = LooseVersion(self.version)
        if impiver == LooseVersion('4.1.1.036') or impiver >= LooseVersion('5.0.1.035'):
            if impiver >= LooseVersion('2018.0.128'):
                script_paths = [os.path.join('intel64', 'bin')]
            else:
                script_paths = [os.path.join('intel64', 'bin'), os.path.join('mic', 'bin')]
            # fix broken env scripts after the move
            regex_subs = [(r"^setenv I_MPI_ROOT.*", r"setenv I_MPI_ROOT %s" % self.installdir)]
            for script in [os.path.join(script_path,'mpivars.csh') for script_path in script_paths]:
                apply_regex_substitutions(os.path.join(self.installdir, script), regex_subs)
            regex_subs = [(r"^I_MPI_ROOT=.*", r"I_MPI_ROOT=%s; export I_MPI_ROOT" % self.installdir)]
            for script in [os.path.join(script_path,'mpivars.sh') for script_path in script_paths]:
                apply_regex_substitutions(os.path.join(self.installdir, script), regex_subs)
Ejemplo n.º 10
0
    def build_step(self):
        """Run multiple times for different sources"""

        # BCALM is a git submodule of BWISE but we use it as a dependency because
        # it also has submodules and it's from a different developer
        bcalm = get_software_root('BCALM')
        if not bcalm:
            raise EasyBuildError("BWISE needs BCALM to work")

        makefiles_fixes = [
            (r'^CC=.*$', 'CC=$(CXX)'),
            (r'^CFLAGS=.*$', 'CFLAGS:=$(CFLAGS)'),
            (r'^LDFLAGS=.*$', 'LDFLAGS:=$(LDFLAGS) -fopenmp')
        ]

        def find_build_subdir(pattern):
            """Changes to the sub directory that matches the given pattern"""
            subdir = glob.glob(os.path.join(self.builddir, pattern))
            if subdir:
                change_dir(subdir[0])
                apply_regex_substitutions('makefile', makefiles_fixes)
                super(EB_BWISE, self).build_step()
                return subdir[0]
            else:
                raise EasyBuildError("Could not find a subdirectory matching the pattern %s", pattern)

        # BWISE has 3 independant parts, we build them one by one
        # first BWISE itself
        subdir = find_build_subdir(os.path.join('BWISE-*', 'src'))
        apply_regex_substitutions(os.path.join(subdir, '..', 'Bwise.py'),
                                  [(r'^BWISE_MAIN = .*$', 'BWISE_MAIN = os.environ[\'EBROOTBWISE\']')])

        # Onwards to BGREAT
        subdir = find_build_subdir('BGREAT2-*')
        copy_file(os.path.join(subdir, 'bgreat'), self.cfg['start_dir'])

        # Finally, BTRIM
        subdir = find_build_subdir('BTRIM-*')
        copy_file(os.path.join(subdir, 'btrim'), self.cfg['start_dir'])

        binaries = ['sequencesToNumbers', 'numbersFilter', 'path_counter', 'maximal_sr', 'simulator',
                    'path_to_kmer', 'K2000/*.py', 'K2000/*.sh']
        self.cfg['files_to_copy'] = [(['bgreat', 'btrim', 'Bwise.py'] + ['src/%s' % x for x in binaries], 'bin'),
                                     'data']
Ejemplo n.º 11
0
    def configure_step(self):
        """Custom configuration procedure for QScintilla."""

        srcdir = os.path.join(self.cfg['start_dir'], 'Qt4Qt5')
        try:
            os.chdir(srcdir)
        except OSError as err:
            raise EasyBuildError("Failed to change to %s: %s", srcdir, err)

        # replace template values for install locations in qscintilla.pro configuration file
        regex_subs = [
            (r'\$\$\[QT_HOST_DATA\]', os.path.join(self.installdir, 'data')),
            (r'\$\$\[QT_INSTALL_DATA\]', os.path.join(self.installdir, 'data')),
            (r'\$\$\[QT_INSTALL_HEADERS\]', os.path.join(self.installdir, 'include')),
            (r'\$\$\[QT_INSTALL_LIBS\]', os.path.join(self.installdir, 'lib')),
            (r'\$\$\[QT_INSTALL_TRANSLATIONS\]', os.path.join(self.installdir, 'trans')),
        ]
        apply_regex_substitutions('qscintilla.pro', regex_subs) 

        run_cmd("qmake qscintilla.pro")
Ejemplo n.º 12
0
    def disable_sanitizer_tests(self):
        """Disable the tests of all the sanitizers by removing the test directories from the build system"""
        if LooseVersion(self.version) < LooseVersion('3.6'):
            # for Clang 3.5 and lower, the tests are scattered over several CMakeLists.
            # We loop over them, and patch out the rule that adds the sanitizers tests to the testsuite
            patchfiles = ['lib/asan', 'lib/dfsan', 'lib/lsan', 'lib/msan', 'lib/tsan', 'lib/ubsan']

            for patchfile in patchfiles:
                cmakelists = os.path.join(self.llvm_src_dir, 'projects/compiler-rt', patchfile, 'CMakeLists.txt')
                if os.path.exists(cmakelists):
                    regex_subs = [('.*add_subdirectory\(lit_tests\).*', '')]
                    apply_regex_substitutions(cmakelists, regex_subs)

            # There is a common part seperate for the specific saniters, we disable all the common tests
            cmakelists = os.path.join('projects', 'compiler-rt', 'lib', 'sanitizer_common', 'CMakeLists.txt')
            regex_subs = [('.*add_subdirectory\(tests\).*', '')]
            apply_regex_substitutions(cmakelists, regex_subs)

        else:
            # In Clang 3.6, the sanitizer tests are grouped together in one CMakeLists
            # We patch out adding the subdirectories with the sanitizer tests
            cmakelists_tests = os.path.join(self.llvm_src_dir, 'projects', 'compiler-rt', 'test', 'CMakeLists.txt')
            regex_subs = []
            if LooseVersion(self.version) >= LooseVersion('5.0'):
                regex_subs.append((r'compiler_rt_test_runtime.*san.*', ''))
            else:
                regex_subs.append((r'add_subdirectory\((.*san|sanitizer_common)\)', ''))

            apply_regex_substitutions(cmakelists_tests, regex_subs)
Ejemplo n.º 13
0
    def disable_lto_mpfr_old_gcc(self, objdir):
        """
        # if GCC version used to build stage 1 is too old, build MPFR without LTO in stage 1
        # required for e.g. CentOS 6, cfr. https://github.com/easybuilders/easybuild-easyconfigs/issues/6374
        """
        self.log.info("Checking whether we are trying to build a recent MPFR with an old GCC...")

        # try to figure out MPFR version being built
        mpfr_ver = '0.0'
        mpfr_dirs = glob.glob(os.path.join(self.builddir, 'mpfr-*'))
        if len(mpfr_dirs) == 1:
            mpfr_dir = mpfr_dirs[0]
            res = re.search('(?P<mpfr_ver>[0-9.]+)$', mpfr_dir)
            if res:
                mpfr_ver = res.group('mpfr_ver')
                self.log.info("Found MPFR version %s (based name of MPFR source dir: %s)", mpfr_ver, mpfr_dir)
            else:
                self.log.warning("Failed to determine MPFR version from '%s', assuming v%s", mpfr_dir, mpfr_ver)
        else:
            self.log.warning("Failed to isolate MPFR source dir to determine MPFR version, assuming v%s", mpfr_ver)

        # for MPFR v4.x & newer, we need a recent GCC that supports -flto
        if LooseVersion(mpfr_ver) >= LooseVersion('4.0'):
            # check GCC version being used
            # GCC 4.5 is required for -flto (cfr. https://gcc.gnu.org/gcc-4.5/changes.html)
            gcc_ver = get_gcc_version()
            min_gcc_ver_lto = '4.5'
            if gcc_ver is None:
                self.log.warning("Failed to determine GCC version, assuming it's recent enough...")
            elif LooseVersion(gcc_ver) < LooseVersion(min_gcc_ver_lto):
                self.log.info("Configuring MPFR to build without LTO in stage 1 (GCC %s is too old: < %s)!",
                              gcc_ver, min_gcc_ver_lto)

                # patch GCC's Makefile to inject --disable-lto when building MPFR
                stage1_makefile = os.path.join(objdir, 'Makefile')
                regex_subs = [(r'(--with-gmp-lib=\$\$r/\$\(HOST_SUBDIR\)/gmp/.libs) \\', r'\1 --disable-lto \\')]
                apply_regex_substitutions(stage1_makefile, regex_subs)
            else:
                self.log.info("GCC %s (>= %s) is OK for building MPFR in stage 1 with LTO enabled",
                              gcc_ver, min_gcc_ver_lto)
Ejemplo n.º 14
0
    def configure_step(self):
        """Custom configuration procedure for TINKER."""
        # make sure FFTW is available
        if get_software_root('FFTW') is None:
            raise EasyBuildError("FFTW dependency is not available.")

        os_dirs = {
            LINUX: 'linux',
            DARWIN: 'macosx',
        }
        os_type = get_os_type()
        os_dir = os_dirs.get(os_type)
        if os_dir is None:
            raise EasyBuildError("Failed to determine OS directory for %s (known: %s)", os_type, os_dirs)

        comp_dirs = {
            toolchain.INTELCOMP: 'intel',
            toolchain.GCC: 'gfortran',
        }
        comp_fam = self.toolchain.comp_family()
        comp_dir = comp_dirs.get(comp_fam)
        if comp_dir is None:
            raise EasyBuildError("Failed to determine compiler directory for %s (known: %s)", comp_fam, comp_dirs)

        self.build_subdir = os.path.join(os_dir, comp_dir)
        self.log.info("Using build scripts from %s subdirectory" % self.build_subdir)

        # patch 'link.make' script to use FFTW provided via EasyBuild
        link_make_fp = os.path.join(self.cfg['start_dir'], self.build_subdir, 'link.make')
        regex_subs = [(r"libfftw3_threads.a libfftw3.a", r"-L$EBROOTFFTW/lib -lfftw3_threads -lfftw3")]
        apply_regex_substitutions(link_make_fp, regex_subs)

        # patch *.make files to get rid of hardcoded -openmp flag,
        # which doesn't work anymore with recent Intel compilers
        if comp_fam == toolchain.INTELCOMP:
            make_fps = glob.glob(os.path.join(self.cfg['start_dir'], self.build_subdir, '*.make'))
            regex_subs = [(r'-openmp', r'-fopenmp')]
            for make_fp in make_fps:
                apply_regex_substitutions(make_fps, regex_subs)
Ejemplo n.º 15
0
    def test_apply_regex_substitutions(self):
        """Test apply_regex_substitutions function."""
        testfile = os.path.join(self.test_prefix, 'test.txt')
        testtxt = '\n'.join([
            "CC = gcc",
            "CFLAGS = -O3 -g",
            "FC = gfortran",
            "FFLAGS = -O3 -g -ffixed-form",
        ])
        ft.write_file(testfile, testtxt)

        regex_subs = [
            (r"^(CC)\s*=\s*.*$", r"\1 = ${CC}"),
            (r"^(FC\s*=\s*).*$", r"\1${FC}"),
            (r"^(.FLAGS)\s*=\s*-O3\s-g(.*)$", r"\1 = -O2\2"),
        ]
        ft.apply_regex_substitutions(testfile, regex_subs)

        expected_testtxt = '\n'.join([
            "CC = ${CC}",
            "CFLAGS = -O2",
            "FC = ${FC}",
            "FFLAGS = -O2 -ffixed-form",
        ])
        new_testtxt = ft.read_file(testfile)
        self.assertEqual(new_testtxt, expected_testtxt)

        # passing empty list of substitions is a no-op
        ft.write_file(testfile, testtxt)
        ft.apply_regex_substitutions(testfile, [])
        new_testtxt = ft.read_file(testfile)
        self.assertEqual(new_testtxt, testtxt)

        # clean error on non-existing file
        error_pat = "Failed to patch .*/nosuchfile.txt: .*No such file or directory"
        path = os.path.join(self.test_prefix, 'nosuchfile.txt')
        self.assertErrorRegex(EasyBuildError, error_pat, ft.apply_regex_substitutions, path, regex_subs)
Ejemplo n.º 16
0
    def install_step(self):
        """Custom install procedure for VEP."""

        # patch INSTALL.pl script to use https:// rather than ftp://
        apply_regex_substitutions('INSTALL.pl', [('ftp://', 'https://')])

        # update PERL5LIB so tests can run (done automatically by INSTALL.pl unless --NO_TEST is used)
        perl_majver = get_major_perl_version()
        perllib_envvar = 'PERL%sLIB' % perl_majver
        perllib = os.getenv(perllib_envvar, '')
        api_mods_dir = os.path.join(self.installdir, self.api_mods_subdir)
        self.log.info("Adding %s to $%s (%s)", api_mods_dir, perllib_envvar, perllib)
        env.setvar(perllib_envvar, '%s:%s' % (api_mods_dir, perllib))

        # see https://www.ensembl.org/info/docs/tools/vep/script/vep_download.html#installer
        cmd = ' '.join([
            self.cfg['preinstallopts'],
            'perl',
            'INSTALL.pl',
            # don't try to install optional Bio::DB::HTS (can be provided as an extension instead)
            '--NO_HTSLIB',
            # a: API, f: FASTA
            # not included:
            # c: cache, should be downloaded by user
            # l: Bio::DB::HTS, should be provided via EasyBuild
            # p: plugins
            '--AUTO af',
            # install all species
            '--SPECIES all',
            # don't update VEP during installation
            '--NO_UPDATE',
            # location to install Perl API modules into
            '--DESTDIR ' + api_mods_dir,
            self.cfg['installopts'],
        ])
        run_cmd(cmd, log_all=True, simple=True, log_ok=True)
Ejemplo n.º 17
0
    def configure_step(self):
        """Custom configuration procedure for Molpro: use 'configure -batch'."""

        if not os.path.isfile(self.license_token):
            if self.cfg['license_file'] is not None and os.path.isfile(self.cfg['license_file']):
                # put symlink in place to specified license file in $HOME/.molpro/token
                # other approaches (like defining $MOLPRO_KEY) don't seem to work
                self.cleanup_token_symlink = True
                mkdir(os.path.dirname(self.license_token))
                symlink(self.cfg['license_file'], self.license_token)
                self.log.debug("Symlinked %s to %s", self.cfg['license_file'], self.license_token)
            else:
                self.log.warning("No licence token found at either {0} or via 'license_file'".format(self.license_token))
        
        # Only do the rest of the configuration if we're building from source 
        if not self.cfg['precompiled_binaries']:
            # installation prefix
            self.cfg.update('configopts', "-prefix %s" % self.installdir)

            # compilers

            # compilers & MPI
            if self.toolchain.options.get('usempi', None):
                self.cfg.update('configopts', "-%s -%s" % (os.environ['CC_SEQ'], os.environ['F90_SEQ']))
                if 'MPI_INC_DIR' in os.environ:
                    self.cfg.update('configopts', "-mpp -mppbase %s" % os.environ['MPI_INC_DIR'])
                else:
                    raise EasyBuildError("$MPI_INC_DIR not defined")
            else:
                self.cfg.update('configopts', "-%s -%s" % (os.environ['CC'], os.environ['F90']))

            # BLAS/LAPACK
            if 'BLAS_LIB_DIR' in os.environ:
                self.cfg.update('configopts', "-blas -blaspath %s" % os.environ['BLAS_LIB_DIR'])
            else:
                raise EasyBuildError("$BLAS_LIB_DIR not defined")

            if 'LAPACK_LIB_DIR' in os.environ:
                self.cfg.update('configopts', "-lapack -lapackpath %s" % os.environ['LAPACK_LIB_DIR'])
            else:
                raise EasyBuildError("$LAPACK_LIB_DIR not defined")

            # 32 vs 64 bit
            if self.toolchain.options.get('32bit', None):
                self.cfg.update('configopts', '-i4')
            else:
                self.cfg.update('configopts', '-i8')

            run_cmd("./configure -batch %s" % self.cfg['configopts'])

            cfgfile = os.path.join(self.cfg['start_dir'], 'CONFIG')
            cfgtxt = read_file(cfgfile)

            # determine original LAUNCHER value
            launcher_regex = re.compile('^LAUNCHER=(.*)$', re.M)
            res = launcher_regex.search(cfgtxt)
            if res:
                self.orig_launcher = res.group(1)
                self.log.debug("Found original value for LAUNCHER: %s", self.orig_launcher)
            else:
                raise EasyBuildError("Failed to determine LAUNCHER value")

            # determine full installation prefix
            prefix_regex = re.compile('^PREFIX=(.*)$', re.M)
            res = prefix_regex.search(cfgtxt)
            if res:
                self.full_prefix = res.group(1)
                self.log.debug("Found full installation prefix: %s", self.full_prefix)
            else:
                raise EasyBuildError("Failed to determine full installation prefix")

            # determine MPI launcher command that can be used during build/test
            # obtain command with specific number of cores (required by mpi_cmd_for), then replace that number with '%n'
            launcher = self.toolchain.mpi_cmd_for('%x', self.cfg['parallel'])
            launcher = launcher.replace(' %s' % self.cfg['parallel'], ' %n')

            # patch CONFIG file to change LAUNCHER definition, in order to avoid having to start mpd
            apply_regex_substitutions(cfgfile, [(r"^(LAUNCHER\s*=\s*).*$", r"\1 %s" % launcher)])

            # reread CONFIG and log contents
            cfgtxt = read_file(cfgfile)
            self.log.info("Contents of CONFIG file:\n%s", cfgtxt)
Ejemplo n.º 18
0
    def install_step(self):
        """Custom install procedure for Trinity."""

        version = LooseVersion(self.version)
        if version > LooseVersion('2012') and version < LooseVersion('2012-10-05'):
            self.inchworm()
            self.chrysalis()
            self.kmer()
            self.butterfly()

            bwapluginver = self.cfg['bwapluginver']
            if bwapluginver:
                self.trinityplugin('bwa-%s-patched_multi_map' % bwapluginver)

            if self.cfg['RSEMmod']:
                self.trinityplugin('RSEM-mod', cc=os.getenv('CXX'))

        else:
            self.jellyfish()

            inchworm_flags = self.inchworm(run=False)
            chrysalis_flags = self.chrysalis(run=False)

            cc = os.getenv('CC')
            cxx = os.getenv('CXX')

            lib_flags = ""
            for lib in ['ncurses', 'zlib']:
                libroot = get_software_root(lib)
                if libroot:
                    lib_flags += " -L%s/lib" % libroot

            if version >= LooseVersion('2.0') and version < LooseVersion('3.0'):
                regex_subs = [
                    (r'^( INCHWORM_CONFIGURE_FLAGS\s*=\s*).*$', r'\1%s' % inchworm_flags),
                    (r'^( CHRYSALIS_MAKE_FLAGS\s*=\s*).*$', r'\1%s' % chrysalis_flags),
                ]
            else:
                regex_subs = [
                    (r'^(INCHWORM_CONFIGURE_FLAGS\s*=\s*).*$', r'\1%s' % inchworm_flags),
                    (r'^(CHRYSALIS_MAKE_FLAGS\s*=\s*).*$', r'\1%s' % chrysalis_flags),
                    (r'(/rsem && \$\(MAKE\))\s*$',
                     r'\1 CC=%s CXX="%s %s" CFLAGS_EXTRA="%s"\n' % (cc, cxx, lib_flags, lib_flags)),
                    (r'(/fastool && \$\(MAKE\))\s*$',
                     r'\1 CC="%s -std=c99" CFLAGS="%s ${CFLAGS}"\n' % (cc, lib_flags)),
                ]
            apply_regex_substitutions('Makefile', regex_subs)

            trinity_compiler = None
            comp_fam = self.toolchain.comp_family()
            if comp_fam in [toolchain.INTELCOMP]:
                trinity_compiler = "intel"
            elif comp_fam in [toolchain.GCC]:
                trinity_compiler = "gcc"
            else:
                raise EasyBuildError("Don't know how to set TRINITY_COMPILER for %s compiler", comp_fam)

            explicit_make_args = ''
            if version >= LooseVersion('2.0') and version < LooseVersion('3.0'):
                explicit_make_args = 'all plugins'
                 
            cmd = "make TRINITY_COMPILER=%s %s" % (trinity_compiler, explicit_make_args)
            run_cmd(cmd)

            # butterfly is not included in standard build
            self.butterfly()

        # remove sample data if desired
        if not self.cfg['withsampledata']:
            try:
                shutil.rmtree(os.path.join(self.cfg['start_dir'], 'sample_data'))
            except OSError, err:
                raise EasyBuildError("Failed to remove sample data: %s", err)
                stdqa = {
                    r"Enter installation directory for executable files \[.*\]\n": os.path.join(self.installdir, 'bin'),
                    r"Enter installation directory for library files \[.*\]\n": os.path.join(self.installdir, 'lib'),
                    r"directory .* does not exist, try to create [Y]/n\n": '',
                }
                run_cmd_qa(cmd, qa=qa, std_qa=stdqa, log_all=True, simple=True)
        else:
            if os.path.isfile(self.license_token):
                run_cmd("make tuning")

            super(EB_Molpro, self).install_step()

            # put original LAUNCHER definition back in place in bin/molpro that got installed,
            # since the value used during installation point to temporary files
            molpro_path = os.path.join(self.full_prefix, 'bin', 'molpro')
            apply_regex_substitutions(molpro_path, [(r"^(LAUNCHER\s*=\s*).*$", r"\1 %s" % self.orig_launcher)])

        if self.cleanup_token_symlink:
            try:
                os.remove(self.license_token)
                self.log.debug("Symlink to license token %s removed", self.license_token)
            except OSError, err:
                raise EasyBuildError("Failed to remove %s: %s", self.license_token, err)

    def make_module_req_guess(self):
        """Customize $PATH guesses for Molpro module."""
        guesses = super(EB_Molpro, self).make_module_req_guess()
        guesses.update({
            'PATH': [os.path.join(os.path.basename(self.full_prefix), x) for x in ['bin', 'utilities']],
        })
        return guesses
Ejemplo n.º 20
0
    def test_step(self):
        """Run WPS test (requires large dataset to be downloaded). """

        wpsdir = None

        def run_wps_cmd(cmdname, mpi_cmd=True):
            """Run a WPS command, and check for success."""

            cmd = os.path.join(wpsdir, "%s.exe" % cmdname)

            if mpi_cmd:
                if build_option('mpi_tests'):
                    cmd = self.toolchain.mpi_cmd_for(cmd, 1)
                else:
                    self.log.info("Skipping MPI test for %s, since MPI tests are disabled", cmd)
                    return

            (out, _) = run_cmd(cmd, log_all=True, simple=False)

            re_success = re.compile("Successful completion of %s" % cmdname)
            if not re_success.search(out):
                raise EasyBuildError("%s.exe failed (pattern '%s' not found)?", cmdname, re_success.pattern)

        if self.cfg['runtest']:
            if not self.cfg['testdata']:
                raise EasyBuildError("List of URLs for testdata not provided.")

            wpsdir = os.path.join(self.builddir, self.wps_subdir)

            try:
                # create temporary directory
                tmpdir = tempfile.mkdtemp()
                change_dir(tmpdir)

                # download data
                testdata_paths = []
                for testdata in self.cfg['testdata']:
                    path = self.obtain_file(testdata)
                    if not path:
                        raise EasyBuildError("Downloading file from %s failed?", testdata)
                    testdata_paths.append(path)

                # unpack data
                for path in testdata_paths:
                    extract_file(path, tmpdir)

                namelist_file = os.path.join(tmpdir, 'namelist.wps')

                # GEOGRID

                # setup directories and files
                if LooseVersion(self.version) < LooseVersion("4.0"):
                    geog_data_dir = "geog"
                else:
                    geog_data_dir = "WPS_GEOG"
                for dir_name in os.listdir(os.path.join(tmpdir, geog_data_dir)):
                    symlink(os.path.join(tmpdir, geog_data_dir, dir_name), os.path.join(tmpdir, dir_name))

                # copy namelist.wps file and patch it for geogrid
                copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file)
                regex_subs = [(r"^(\s*geog_data_path\s*=\s*).*$", r"\1 '%s'" % tmpdir)]
                apply_regex_substitutions(namelist_file, regex_subs)

                # GEOGRID.TBL
                geogrid_dir = os.path.join(tmpdir, 'geogrid')
                mkdir(geogrid_dir)
                symlink(os.path.join(wpsdir, 'geogrid', 'GEOGRID.TBL.ARW'),
                        os.path.join(geogrid_dir, 'GEOGRID.TBL'))

                # run geogrid.exe
                run_wps_cmd("geogrid")

                # UNGRIB

                # determine start and end time stamps of grib files
                grib_file_prefix = "fnl_"
                k = len(grib_file_prefix)
                fs = [f for f in sorted(os.listdir('.')) if f.startswith(grib_file_prefix)]
                start = "%s:00:00" % fs[0][k:]
                end = "%s:00:00" % fs[-1][k:]

                # copy namelist.wps file and patch it for ungrib
                copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file)
                regex_subs = [
                    (r"^(\s*start_date\s*=\s*).*$", r"\1 '%s','%s'," % (start, start)),
                    (r"^(\s*end_date\s*=\s*).*$", r"\1 '%s','%s'," % (end, end)),
                ]
                apply_regex_substitutions(namelist_file, regex_subs)

                # copy correct Vtable
                vtable_dir = os.path.join(wpsdir, 'ungrib', 'Variable_Tables')
                if os.path.exists(os.path.join(vtable_dir, 'Vtable.ARW')):
                    copy_file(os.path.join(vtable_dir, 'Vtable.ARW'), os.path.join(tmpdir, 'Vtable'))
                elif os.path.exists(os.path.join(vtable_dir, 'Vtable.ARW.UPP')):
                    copy_file(os.path.join(vtable_dir, 'Vtable.ARW.UPP'), os.path.join(tmpdir, 'Vtable'))
                else:
                    raise EasyBuildError("Could not find Vtable file to use for testing ungrib")

                # run link_grib.csh script
                cmd = "%s %s*" % (os.path.join(wpsdir, "link_grib.csh"), grib_file_prefix)
                run_cmd(cmd, log_all=True, simple=True)

                # run ungrib.exe
                run_wps_cmd("ungrib", mpi_cmd=False)

                # METGRID.TBL

                metgrid_dir = os.path.join(tmpdir, 'metgrid')
                mkdir(metgrid_dir)
                symlink(os.path.join(wpsdir, 'metgrid', 'METGRID.TBL.ARW'),
                        os.path.join(metgrid_dir, 'METGRID.TBL'))

                # run metgrid.exe
                run_wps_cmd('metgrid')

                # clean up
                change_dir(self.builddir)
                remove_dir(tmpdir)

            except OSError as err:
                raise EasyBuildError("Failed to run WPS test: %s", err)
Ejemplo n.º 21
0
    def configure_step(self):
        """
        Configure for GCC build:
        - prepare extra source dirs (GMP, MPFR, MPC, ...)
        - create obj dir to build in (GCC doesn't like to be built in source dir)
        - add configure and make options, according to .eb spec file
        - decide whether or not to do a staged build (which is required to enable PPL/CLooG support)
        - set platform_lib based on config.guess output
        """

        sysroot = build_option('sysroot')
        if sysroot:
            # based on changes made to GCC in Gentoo Prefix
            # https://gitweb.gentoo.org/repo/gentoo.git/tree/profiles/features/prefix/standalone/profile.bashrc

            # add --with-sysroot configure option, to instruct GCC to consider
            # value set for EasyBuild's --sysroot configuration option as the root filesystem of the operating system
            # (see https://gcc.gnu.org/install/configure.html)
            self.cfg.update('configopts', '--with-sysroot=%s' % sysroot)

            # avoid that --sysroot is passed to linker by patching value for SYSROOT_SPEC in gcc/gcc.c
            apply_regex_substitutions(os.path.join('gcc', 'gcc.c'),
                                      [('--sysroot=%R', '')])

            # prefix dynamic linkers with sysroot
            # this patches lines like:
            # #define GLIBC_DYNAMIC_LINKER64 "/lib64/ld-linux-x86-64.so.2"
            gcc_config_headers = glob.glob(
                os.path.join('gcc', 'config', '*', '*linux*.h'))
            regex_subs = [('(_DYNAMIC_LINKER.*[":])/lib',
                           r'\1%s/lib' % sysroot)]
            for gcc_config_header in gcc_config_headers:
                apply_regex_substitutions(gcc_config_header, regex_subs)

        # self.configopts will be reused in a 3-staged build,
        # configopts is only used in first configure
        self.configopts = self.cfg['configopts']

        # I) prepare extra source dirs, e.g. for GMP, MPFR, MPC (if required), so GCC can build them
        stage1_info = self.prep_extra_src_dirs("stage1")
        configopts = stage1_info['configopts']

        # II) update config options

        # enable specified language support
        if self.cfg['languages']:
            self.configopts += " --enable-languages=%s" % ','.join(
                self.cfg['languages'])

        if self.cfg['withnvptx']:
            if self.iter_idx == 0:
                self.configopts += " --without-cuda-driver"
                self.configopts += " --enable-offload-targets=nvptx-none"
            else:
                # register installed GCC as compiler to use nvptx
                path = "%s/bin:%s" % (self.installdir, os.getenv('PATH'))
                env.setvar('PATH', path)

                ld_lib_path = "%(dir)s/lib64:%(dir)s/lib:%(val)s" % {
                    'dir': self.installdir,
                    'val': os.getenv('LD_LIBRARY_PATH')
                }
                env.setvar('LD_LIBRARY_PATH', ld_lib_path)
                extra_source = {1: "nvptx-tools", 2: "newlib"}[self.iter_idx]
                extra_source_dirs = glob.glob(
                    os.path.join(self.builddir, '%s-*' % extra_source))
                if len(extra_source_dirs) != 1:
                    raise EasyBuildError("Failed to isolate %s source dir" %
                                         extra_source)
                if self.iter_idx == 1:
                    # compile nvptx-tools
                    change_dir(extra_source_dirs[0])
                else:  # self.iter_idx == 2
                    # compile nvptx target compiler
                    symlink(os.path.join(extra_source_dirs[0], 'newlib'),
                            'newlib')
                    self.create_dir("build-nvptx-gcc")
                    self.cfg.update('configopts', self.configopts)
                    self.cfg.update(
                        'configopts',
                        "--with-build-time-tools=%s/nvptx-none/bin" %
                        self.installdir)
                    self.cfg.update('configopts', "--target=nvptx-none")
                    host_type = self.determine_build_and_host_type()[1]
                    self.cfg.update(
                        'configopts',
                        "--enable-as-accelerator-for=%s" % host_type)
                    self.cfg.update('configopts', "--disable-sjlj-exceptions")
                    self.cfg.update('configopts',
                                    "--enable-newlib-io-long-long")
                    self.cfg['configure_cmd_prefix'] = '../'
                return super(EB_GCC, self).configure_step()

        # enable building of libiberty, if desired
        if self.cfg['withlibiberty']:
            self.configopts += " --enable-install-libiberty"

        # enable link-time-optimization (LTO) support, if desired
        if self.cfg['withlto']:
            self.configopts += " --enable-lto"
        else:
            self.configopts += " --disable-lto"

        # configure for a release build
        self.configopts += " --enable-checking=release "
        # enable multilib: allow both 32 and 64 bit
        if self.cfg['multilib']:
            glibc_32bit = [
                "glibc.i686",  # Fedora, RedHat-based
                "glibc.ppc",  # "" on Power
                "libc6-dev-i386",  # Debian-based
                "gcc-c++-32bit",  # OpenSuSE, SLES
            ]
            if not any([check_os_dependency(dep) for dep in glibc_32bit]):
                raise EasyBuildError(
                    "Using multilib requires 32-bit glibc (install one of %s, depending on your OS)",
                    ', '.join(glibc_32bit))
            self.configopts += " --enable-multilib --with-multilib-list=m32,m64"
        else:
            self.configopts += " --disable-multilib"
        # build both static and dynamic libraries (???)
        self.configopts += " --enable-shared=yes --enable-static=yes "

        # use POSIX threads
        self.configopts += " --enable-threads=posix "

        # enable plugin support
        self.configopts += " --enable-plugins "

        # use GOLD as default linker
        if self.cfg['use_gold_linker']:
            self.configopts += " --enable-gold=default --enable-ld --with-plugin-ld=ld.gold"
        else:
            self.configopts += " --enable-gold --enable-ld=default"

        # enable bootstrap build for self-containment (unless for staged build)
        if not self.stagedbuild:
            configopts += " --enable-bootstrap"
        else:
            configopts += " --disable-bootstrap"

        if self.stagedbuild:
            #
            # STAGE 1: configure GCC build that will be used to build PPL/CLooG
            #
            self.log.info(
                "Starting with stage 1 of 3-staged build to enable CLooG and/or PPL, ISL support..."
            )
            self.stage1installdir = os.path.join(self.builddir,
                                                 'GCC_stage1_eb')
            configopts += " --prefix=%(p)s --with-local-prefix=%(p)s" % {
                'p': self.stage1installdir
            }

        else:
            # unstaged build, so just run standard configure/make/make install
            # set prefixes
            self.log.info("Performing regular GCC build...")
            configopts += " --prefix=%(p)s --with-local-prefix=%(p)s" % {
                'p': self.installdir
            }

        # prioritize lib over lib{64,32,x32} for all architectures by overriding default MULTILIB_OSDIRNAMES config
        # only do this when multilib is not enabled
        if self.cfg['prefer_lib_subdir'] and not self.cfg['multilib']:
            cfgfile = 'gcc/config/i386/t-linux64'
            multilib_osdirnames = "MULTILIB_OSDIRNAMES = m64=../lib:../lib64 m32=../lib:../lib32 mx32=../lib:../libx32"
            self.log.info("Patching MULTILIB_OSDIRNAMES in %s with '%s'",
                          cfgfile, multilib_osdirnames)
            write_file(cfgfile, multilib_osdirnames, append=True)
        elif self.cfg['multilib']:
            self.log.info(
                "Not patching MULTILIB_OSDIRNAMES since use of --enable-multilib is enabled"
            )

        # III) create obj dir to build in, and change to it
        #     GCC doesn't like to be built in the source dir
        if self.stagedbuild:
            objdir = self.create_dir("stage1_obj")
            self.stage1prefix = objdir
        else:
            objdir = self.create_dir("obj")

        # IV) actual configure, but not on default path
        cmd = "../configure  %s %s" % (self.configopts, configopts)

        self.run_configure_cmd(cmd)

        self.disable_lto_mpfr_old_gcc(objdir)
Ejemplo n.º 22
0
    def configure_step(self):
        """Configure build:
            - set some magic environment variables
            - run configure script
            - adjust configure.wrf file if needed
        """
        # define $NETCDF* for netCDF dependency (used when creating WRF module file)
        set_netcdf_env_vars(self.log)

        # HDF5 (optional) dependency
        hdf5 = get_software_root('HDF5')
        if hdf5:
            # check if this is parallel HDF5
            phdf5_bins = ['h5pcc', 'ph5diff']
            parallel_hdf5 = True
            for f in phdf5_bins:
                if not os.path.exists(os.path.join(hdf5, 'bin', f)):
                    parallel_hdf5 = False
                    break
            if not (hdf5 or parallel_hdf5):
                raise EasyBuildError("Parallel HDF5 module not loaded?")
            else:
                env.setvar('PHDF5', hdf5)
        else:
            self.log.info("HDF5 module not loaded, assuming that's OK...")

        # JasPer dependency check + setting env vars
        jasper = get_software_root('JasPer')
        if jasper:
            jasperlibdir = os.path.join(jasper, "lib")
            env.setvar('JASPERINC', os.path.join(jasper, "include"))
            env.setvar('JASPERLIB', jasperlibdir)

        else:
            if os.getenv('JASPERINC') or os.getenv('JASPERLIB'):
                raise EasyBuildError(
                    "JasPer module not loaded, but JASPERINC and/or JASPERLIB still set?"
                )
            else:
                self.log.info(
                    "JasPer module not loaded, assuming that's OK...")

        # enable support for large file support in netCDF
        env.setvar('WRFIO_NCD_LARGE_FILE_SUPPORT', '1')

        # patch arch/Config_new.pl script, so that run_cmd_qa receives all output to answer questions
        patch_perl_script_autoflush(os.path.join("arch", "Config_new.pl"))

        # determine build type option to look for
        build_type_option = None
        self.comp_fam = self.toolchain.comp_family()
        if self.comp_fam == toolchain.INTELCOMP:  #@UndefinedVariable
            build_type_option = "Linux x86_64 i486 i586 i686, ifort compiler with icc"

        elif self.comp_fam == toolchain.GCC:  #@UndefinedVariable
            build_type_option = "x86_64 Linux, gfortran compiler with gcc"

        else:
            raise EasyBuildError(
                "Don't know how to figure out build type to select.")

        # fetch selected build type (and make sure it makes sense)
        known_build_types = ['serial', 'smpar', 'dmpar', 'dm+sm']
        self.parallel_build_types = ["dmpar", "smpar", "dm+sm"]
        bt = self.cfg['buildtype']

        if not bt in known_build_types:
            raise EasyBuildError(
                "Unknown build type: '%s'. Supported build types: %s", bt,
                known_build_types)

        # fetch option number based on build type option and selected build type
        build_type_question = "\s*(?P<nr>[0-9]+).\s*%s\s*\(%s\)" % (
            build_type_option, bt)

        # run configure script
        cmd = "./configure"
        qa = {
            # named group in match will be used to construct answer
            "Compile for nesting? (1=basic, 2=preset moves, 3=vortex following) [default 1]:":
            "1",
            "Compile for nesting? (0=no nesting, 1=basic, 2=preset moves, 3=vortex following) [default 0]:":
            "0"
        }
        no_qa = [
            "testing for fseeko and fseeko64",
            r"If you wish to change the default options, edit the file:[\s\n]*arch/configure_new.defaults"
        ]
        std_qa = {
            # named group in match will be used to construct answer
            r"%s.*\n(.*\n)*Enter selection\s*\[[0-9]+-[0-9]+\]\s*:" % build_type_question:
            "%(nr)s",
        }

        run_cmd_qa(cmd,
                   qa,
                   no_qa=no_qa,
                   std_qa=std_qa,
                   log_all=True,
                   simple=True)

        cfgfile = 'configure.wrf'

        # make sure correct compilers are being used
        comps = {
            'SCC': os.getenv('CC'),
            'SFC': os.getenv('F90'),
            'CCOMP': os.getenv('CC'),
            'DM_FC': os.getenv('MPIF90'),
            'DM_CC': "%s -DMPI2_SUPPORT" % os.getenv('MPICC'),
        }
        regex_subs = [(r"^(%s\s*=\s*).*$" % k, r"\1 %s" % v)
                      for (k, v) in comps.items()]
        apply_regex_substitutions(cfgfile, regex_subs)

        # rewrite optimization options if desired
        if self.cfg['rewriteopts']:

            # replace default -O3 option in configure.wrf with CFLAGS/FFLAGS from environment
            self.log.info("Rewriting optimization options in %s" % cfgfile)

            # set extra flags for Intel compilers
            # see http://software.intel.com/en-us/forums/showthread.php?t=72109&p=1#146748
            if self.comp_fam == toolchain.INTELCOMP:  #@UndefinedVariable

                # -O3 -heap-arrays is required to resolve compilation error
                for envvar in ['CFLAGS', 'FFLAGS']:
                    val = os.getenv(envvar)
                    if '-O3' in val:
                        env.setvar(envvar, '%s -heap-arrays' % val)
                        self.log.info("Updated %s to '%s'" %
                                      (envvar, os.getenv(envvar)))

            # replace -O3 with desired optimization options
            regex_subs = [
                (r"^(FCOPTIM.*)(\s-O3)(\s.*)$",
                 r"\1 %s \3" % os.getenv('FFLAGS')),
                (r"^(CFLAGS_LOCAL.*)(\s-O3)(\s.*)$",
                 r"\1 %s \3" % os.getenv('CFLAGS')),
            ]
            apply_regex_substitutions(cfgfile, regex_subs)
Ejemplo n.º 23
0
    def configure_step(self):
        """Custom configure procedure for PyTorch."""
        super(EB_PyTorch, self).configure_step()

        # Gather default options. Will be checked against (and can be overwritten by) custom_opts
        options = [
            'PYTORCH_BUILD_VERSION=' + self.version, 'PYTORCH_BUILD_NUMBER=1'
        ]

        # enable verbose mode when --debug is used (to show compiler commands)
        if build_option('debug'):
            options.append('VERBOSE=1')

        # Restrict parallelism
        options.append('MAX_JOBS=%s' % self.cfg['parallel'])

        # BLAS Interface
        if get_software_root('imkl'):
            options.append('BLAS=MKL')
            options.append('INTEL_MKL_DIR=$MKLROOT')
        elif LooseVersion(self.version) >= LooseVersion(
                '1.9.0') and get_software_root('BLIS'):
            options.append('BLAS=BLIS')
            options.append('BLIS_HOME=' + get_software_root('BLIS'))
            options.append('USE_MKLDNN_CBLAS=ON')
        elif get_software_root('OpenBLAS'):
            # This is what PyTorch defaults to if no MKL is found.
            # Make this explicit here to avoid it finding MKL from the system
            options.append('BLAS=Eigen')
            # Still need to set a BLAS lib to use.
            # Valid choices: mkl/open/goto/acml/atlas/accelerate/veclib/generic (+blis for 1.9+)
            options.append('WITH_BLAS=open')
            # Make sure this option is actually passed to CMake
            apply_regex_substitutions(
                os.path.join('tools', 'setup_helpers', 'cmake.py'),
                [("'BLAS',", "'BLAS', 'WITH_BLAS',")])
        else:
            raise EasyBuildError(
                "Did not find a supported BLAS in dependencies. Don't know which BLAS lib to use"
            )

        available_dependency_options = EB_PyTorch.get_dependency_options_for_version(
            self.version)
        dependency_names = set(dep['name'] for dep in self.cfg.dependencies())
        not_used_dep_names = []
        for enable_opt, dep_name in available_dependency_options:
            if dep_name is None:
                continue
            if dep_name in dependency_names:
                options.append(enable_opt)
            else:
                not_used_dep_names.append(dep_name)
        self.log.info(
            'Did not enable options for the following dependencies as they are not used in the EC: %s',
            not_used_dep_names)

        # Use Infiniband by default
        # you can disable this by including 'USE_IBVERBS=0' in 'custom_opts' in the easyconfig file
        options.append('USE_IBVERBS=1')

        if get_software_root('CUDA'):
            options.append('USE_CUDA=1')
            cudnn_root = get_software_root('cuDNN')
            if cudnn_root:
                options.append('CUDNN_LIB_DIR=' +
                               os.path.join(cudnn_root, 'lib64'))
                options.append('CUDNN_INCLUDE_DIR=' +
                               os.path.join(cudnn_root, 'include'))

            nccl_root = get_software_root('NCCL')
            if nccl_root:
                options.append('USE_SYSTEM_NCCL=1')
                options.append('NCCL_INCLUDE_DIR=' +
                               os.path.join(nccl_root, 'include'))

            # list of CUDA compute capabilities to use can be specifed in two ways (where (2) overrules (1)):
            # (1) in the easyconfig file, via the custom cuda_compute_capabilities;
            # (2) in the EasyBuild configuration, via --cuda-compute-capabilities configuration option;
            cuda_cc = build_option('cuda_compute_capabilities'
                                   ) or self.cfg['cuda_compute_capabilities']
            if not cuda_cc:
                raise EasyBuildError(
                    'List of CUDA compute capabilities must be specified, either via '
                    'cuda_compute_capabilities easyconfig parameter or via '
                    '--cuda-compute-capabilities')

            self.log.info(
                'Compiling with specified list of CUDA compute capabilities: %s',
                ', '.join(cuda_cc))
            # This variable is also used at runtime (e.g. for tests) and if it is not set PyTorch will automatically
            # determine the compute capability of a GPU in the system and use that which may fail tests if
            # it is to new for the used nvcc
            env.setvar('TORCH_CUDA_ARCH_LIST', ';'.join(cuda_cc))
        else:
            # Disable CUDA
            options.append('USE_CUDA=0')

        if get_cpu_architecture() == POWER:
            # *NNPACK is not supported on Power, disable to avoid warnings
            options.extend([
                'USE_NNPACK=0', 'USE_QNNPACK=0', 'USE_PYTORCH_QNNPACK=0',
                'USE_XNNPACK=0'
            ])

        # Metal only supported on IOS which likely doesn't work with EB, so disabled
        options.append('USE_METAL=0')

        unique_options = self.cfg['custom_opts']
        for option in options:
            name = option.split('=')[
                0] + '='  # Include the equals sign to avoid partial matches
            if not any(opt.startswith(name) for opt in unique_options):
                unique_options.append(option)

        self.cfg.update('prebuildopts', ' '.join(unique_options) + ' ')
        self.cfg.update('preinstallopts', ' '.join(unique_options) + ' ')
Ejemplo n.º 24
0
    def configure_step(self):
        """Custom configuration procedure for TensorFlow."""

        tmpdir = tempfile.mkdtemp(suffix='-bazel-configure')

        # filter out paths from CPATH and LIBRARY_PATH. This is needed since bazel will pull some dependencies that
        # might conflict with dependencies on the system and/or installed with EB. For example: protobuf
        path_filter = self.cfg['path_filter']
        if path_filter:
            self.log.info(
                "Filtering $CPATH and $LIBRARY_PATH with path filter %s",
                path_filter)
            for var in ['CPATH', 'LIBRARY_PATH']:
                path = os.getenv(var).split(os.pathsep)
                self.log.info("$%s old value was %s" % (var, path))
                filtered_path = os.pathsep.join(
                    [p for fil in path_filter for p in path if fil not in p])
                env.setvar(var, filtered_path)

        wrapper_dir = os.path.join(tmpdir, 'bin')
        use_wrapper = False

        if self.toolchain.comp_family() == toolchain.INTELCOMP:
            # put wrappers for Intel C/C++ compilers in place (required to make sure license server is found)
            # cfr. https://github.com/bazelbuild/bazel/issues/663
            for compiler in ('icc', 'icpc'):
                self.write_wrapper(wrapper_dir, compiler, 'NOT-USED-WITH-ICC')
            use_wrapper = True

        use_mpi = self.toolchain.options.get('usempi', False)
        impi_root = get_software_root('impi')
        mpi_home = ''
        if use_mpi and impi_root:
            # put wrappers for Intel MPI compiler wrappers in place
            # (required to make sure license server and I_MPI_ROOT are found)
            for compiler in (os.getenv('MPICC'), os.getenv('MPICXX')):
                self.write_wrapper(wrapper_dir, compiler,
                                   os.getenv('I_MPI_ROOT'))
            use_wrapper = True
            # set correct value for MPI_HOME
            mpi_home = os.path.join(impi_root, 'intel64')
            self.log.debug("Derived value for MPI_HOME: %s", mpi_home)

        if use_wrapper:
            env.setvar('PATH',
                       os.pathsep.join([wrapper_dir,
                                        os.getenv('PATH')]))

        self.prepare_python()

        self.handle_jemalloc()

        cuda_root = get_software_root('CUDA')
        cudnn_root = get_software_root('cuDNN')
        opencl_root = get_software_root('OpenCL')
        tensorrt_root = get_software_root('TensorRT')
        nccl_root = get_software_root('NCCL')

        config_env_vars = {
            'CC_OPT_FLAGS': os.getenv('CXXFLAGS'),
            'MPI_HOME': mpi_home,
            'PYTHON_BIN_PATH': self.python_cmd,
            'PYTHON_LIB_PATH': os.path.join(self.installdir, self.pylibdir),
            'TF_CUDA_CLANG': '0',
            'TF_ENABLE_XLA': '0',  # XLA JIT support
            'TF_NEED_CUDA': ('0', '1')[bool(cuda_root)],
            'TF_NEED_GCP': '0',  # Google Cloud Platform
            'TF_NEED_GDR': '0',
            'TF_NEED_HDFS': '0',  # Hadoop File System
            'TF_NEED_JEMALLOC': ('0', '1')[self.cfg['with_jemalloc']],
            'TF_NEED_MPI': ('0', '1')[bool(use_mpi)],
            'TF_NEED_OPENCL': ('0', '1')[bool(opencl_root)],
            'TF_NEED_OPENCL_SYCL': '0',
            'TF_NEED_S3': '0',  # Amazon S3 File System
            'TF_NEED_VERBS': '0',
            'TF_NEED_TENSORRT': ('0', '1')[bool(tensorrt_root)],
            'TF_NEED_AWS': '0',  # Amazon AWS Platform
            'TF_NEED_KAFKA': '0',  # Amazon Kafka Platform
        }
        if cuda_root:
            config_env_vars.update({
                'CUDA_TOOLKIT_PATH':
                cuda_root,
                'GCC_HOST_COMPILER_PATH':
                which(os.getenv('CC')),
                'TF_CUDA_COMPUTE_CAPABILITIES':
                ','.join(self.cfg['cuda_compute_capabilities']),
                'TF_CUDA_VERSION':
                get_software_version('CUDA'),
            })
            if cudnn_root:
                config_env_vars.update({
                    'CUDNN_INSTALL_PATH':
                    cudnn_root,
                    'TF_CUDNN_VERSION':
                    get_software_version('cuDNN'),
                })
            else:
                raise EasyBuildError(
                    "TensorFlow has a strict dependency on cuDNN if CUDA is enabled"
                )
            if nccl_root:
                nccl_version = get_software_version('NCCL')
                config_env_vars.update({
                    'NCCL_INSTALL_PATH': nccl_root,
                })
            else:
                nccl_version = '1.3'  # Use simple downloadable version
            config_env_vars.update({
                'TF_NCCL_VERSION': nccl_version,
            })

        for (key, val) in sorted(config_env_vars.items()):
            env.setvar(key, val)

        # patch configure.py (called by configure script) to avoid that Bazel abuses $HOME/.cache/bazel
        regex_subs = [(r"(run_shell\(\['bazel')",
                       r"\1, '--output_base=%s', '--install_base=%s'" %
                       (tmpdir, os.path.join(tmpdir, 'inst_base')))]
        apply_regex_substitutions('configure.py', regex_subs)

        cmd = self.cfg['preconfigopts'] + './configure ' + self.cfg[
            'configopts']
        run_cmd(cmd, log_all=True, simple=True)
Ejemplo n.º 25
0
    def configure_step(self):
        """Configure OpenFOAM build by setting appropriate environment variables."""
        # compiler & compiler flags
        comp_fam = self.toolchain.comp_family()

        extra_flags = ''
        if comp_fam == toolchain.GCC:  # @UndefinedVariable
            if get_software_version('GCC') >= LooseVersion('4.8'):
                # make sure non-gold version of ld is used, since OpenFOAM requires it
                # see http://www.openfoam.org/mantisbt/view.php?id=685
                extra_flags = '-fuse-ld=bfd'

            # older versions of OpenFOAM-Extend require -fpermissive
            if 'extend' in self.name.lower() and LooseVersion(self.version) < LooseVersion('2.0'):
                extra_flags += ' -fpermissive'

            if LooseVersion(self.version) < LooseVersion('3.0'):
                extra_flags += ' -fno-delete-null-pointer-checks'

        elif comp_fam == toolchain.INTELCOMP:  # @UndefinedVariable
            # make sure -no-prec-div is used with Intel compilers
            extra_flags = '-no-prec-div'

        for env_var in ['CFLAGS', 'CXXFLAGS']:
            env.setvar(env_var, "%s %s" % (os.environ.get(env_var, ''), extra_flags))

        # patch out hardcoding of WM_* environment variables
        # for example, replace 'export WM_COMPILER=Gcc' with ': ${WM_COMPILER:=Gcc}; export WM_COMPILER'
        for script in [os.path.join(self.builddir, self.openfoamdir, x) for x in ['etc/bashrc', 'etc/cshrc']]:
            self.log.debug("Patching out hardcoded $WM_* env vars in %s", script)
            # disable any third party stuff, we use EB controlled builds
            regex_subs = [(r"^(setenv|export) WM_THIRD_PARTY_USE_.*[ =].*$", r"# \g<0>")]

            # this does not work for OpenFOAM Extend lower than 2.0
            if 'extend' not in self.name.lower() or LooseVersion(self.version) >= LooseVersion('2.0'):
                key = "WM_PROJECT_VERSION"
                regex_subs += [(r"^(setenv|export) %s=.*$" % key, r"export %s=%s #\g<0>" % (key, self.version))]

            WM_env_var = ['WM_COMPILER', 'WM_MPLIB', 'WM_THIRD_PARTY_DIR']
            # OpenFOAM >= 3.0.0 can use 64 bit integers
            if 'extend' not in self.name.lower() and LooseVersion(self.version) >= LooseVersion('3.0'):
                WM_env_var.append('WM_LABEL_SIZE')
            for env_var in WM_env_var:
                regex_subs.append((r"^(setenv|export) (?P<var>%s)[ =](?P<val>.*)$" % env_var,
                                   r": ${\g<var>:=\g<val>}; export \g<var>"))
            apply_regex_substitutions(script, regex_subs)

        # inject compiler variables into wmake/rules files
        ldirs = glob.glob(os.path.join(self.builddir, self.openfoamdir, 'wmake', 'rules', 'linux*'))
        langs = ['c', 'c++']

        # NOTE: we do not want to change the Debug rules files becuse
        # that would change the cOPT/c++OPT values from their empty setting.
        suffixes = ['', 'Opt']
        wmake_rules_files = [os.path.join(ldir, lang + suff) for ldir in ldirs for lang in langs for suff in suffixes]

        mpicc = os.environ['MPICC']
        mpicxx = os.environ['MPICXX']
        cc_seq = os.environ.get('CC_SEQ', os.environ['CC'])
        cxx_seq = os.environ.get('CXX_SEQ', os.environ['CXX'])

        if self.toolchain.mpi_family() == toolchain.OPENMPI:
            # no -cc/-cxx flags supported in OpenMPI compiler wrappers
            c_comp_cmd = 'OMPI_CC="%s" %s' % (cc_seq, mpicc)
            cxx_comp_cmd = 'OMPI_CXX="%s" %s' % (cxx_seq, mpicxx)
        else:
            # -cc/-cxx should work for all MPICH-based MPIs (including Intel MPI)
            c_comp_cmd = '%s -cc="%s"' % (mpicc, cc_seq)
            cxx_comp_cmd = '%s -cxx="%s"' % (mpicxx, cxx_seq)

        comp_vars = {
            # specify MPI compiler wrappers and compiler commands + sequential compiler that should be used by them
            'cc': c_comp_cmd,
            'CC': cxx_comp_cmd,
            'cOPT': os.environ['CFLAGS'],
            'c++OPT': os.environ['CXXFLAGS'],
        }
        for wmake_rules_file in wmake_rules_files:
            fullpath = os.path.join(self.builddir, self.openfoamdir, wmake_rules_file)
            self.log.debug("Patching compiler variables in %s", fullpath)
            regex_subs = []
            for comp_var, newval in comp_vars.items():
                regex_subs.append((r"^(%s\s*=\s*).*$" % re.escape(comp_var), r"\1%s" % newval))
            apply_regex_substitutions(fullpath, regex_subs)

        # enable verbose build for debug purposes
        # starting with openfoam-extend 3.2, PS1 also needs to be set
        env.setvar("FOAM_VERBOSE", '1')

        # installation directory
        env.setvar("FOAM_INST_DIR", self.installdir)

        # third party directory
        self.thrdpartydir = "ThirdParty-%s" % self.version
        # only if third party stuff is actually installed
        if os.path.exists(self.thrdpartydir):
            os.symlink(os.path.join("..", self.thrdpartydir), self.thrdpartydir)
            env.setvar("WM_THIRD_PARTY_DIR", os.path.join(self.installdir, self.thrdpartydir))

        env.setvar("WM_COMPILER", self.wm_compiler)
        env.setvar("WM_MPLIB", self.wm_mplib)

        # Set Compile options according to build type
        env.setvar("WM_COMPILE_OPTION", self.build_type)

        # parallel build spec
        env.setvar("WM_NCOMPPROCS", str(self.cfg['parallel']))

        # OpenFOAM >= 3.0.0 can use 64 bit integers
        if 'extend' not in self.name.lower() and LooseVersion(self.version) >= LooseVersion('3.0'):
            if self.toolchain.options['i8']:
                env.setvar("WM_LABEL_SIZE", '64')
            else:
                env.setvar("WM_LABEL_SIZE", '32')

        # make sure lib/include dirs for dependencies are found
        openfoam_extend_v3 = 'extend' in self.name.lower() and LooseVersion(self.version) >= LooseVersion('3.0')
        if LooseVersion(self.version) < LooseVersion("2") or openfoam_extend_v3:
            self.log.debug("List of deps: %s" % self.cfg.dependencies())
            for dep in self.cfg.dependencies():
                dep_name = dep['name'].upper(),
                dep_root = get_software_root(dep['name'])
                env.setvar("%s_SYSTEM" % dep_name, "1")
                dep_vars = {
                    "%s_DIR": "%s",
                    "%s_BIN_DIR": "%s/bin",
                    "%s_LIB_DIR": "%s/lib",
                    "%s_INCLUDE_DIR": "%s/include",
                }
                for var, val in dep_vars.iteritems():
                    env.setvar(var % dep_name, val % dep_root)
        else:
            for depend in ['SCOTCH', 'METIS', 'CGAL', 'Paraview']:
                dependloc = get_software_root(depend)
                if dependloc:
                    if depend == 'CGAL' and get_software_root('Boost'):
                        env.setvar("CGAL_ROOT", dependloc)
                        env.setvar("BOOST_ROOT", get_software_root('Boost'))
                    else:
                        env.setvar("%s_ROOT" % depend.upper(), dependloc)
Ejemplo n.º 26
0
    def configure_step(self):
        """Custom configuration procedure for TensorFlow."""

        tmpdir = tempfile.mkdtemp(suffix='-bazel-configure')

        # put wrapper for Intel C compiler in place (required to make sure license server is found)
        # cfr. https://github.com/bazelbuild/bazel/issues/663
        if self.toolchain.comp_family() == toolchain.INTELCOMP:
            wrapper_dir = os.path.join(tmpdir, 'bin')
            icc_wrapper_txt = INTEL_COMPILER_WRAPPER % {
                'compiler_path':
                which('icc'),
                'cpath':
                os.getenv('CPATH'),
                'intel_license_file':
                os.getenv('INTEL_LICENSE_FILE', os.getenv('LM_LICENSE_FILE')),
                'wrapper_dir':
                wrapper_dir,
            }
            icc_wrapper = os.path.join(wrapper_dir, 'icc')
            write_file(icc_wrapper, icc_wrapper_txt)
            env.setvar(
                'PATH',
                ':'.join([os.path.dirname(icc_wrapper),
                          os.getenv('PATH')]))
            if self.dry_run:
                self.dry_run_msg("Wrapper for 'icc' was put in place: %s",
                                 icc_wrapper)
            else:
                adjust_permissions(icc_wrapper, stat.S_IXUSR)
                self.log.info("Using wrapper script for 'icc': %s",
                              which('icc'))

        self.prepare_python()

        cuda_root = get_software_root('CUDA')
        cudnn_root = get_software_root('cuDNN')
        jemalloc_root = get_software_root('jemalloc')
        opencl_root = get_software_root('OpenCL')

        use_mpi = self.toolchain.options.get('usempi', False)

        config_env_vars = {
            'CC_OPT_FLAGS': os.getenv('CXXFLAGS'),
            'MPI_HOME': '',
            'PYTHON_BIN_PATH': self.python_cmd,
            'PYTHON_LIB_PATH': os.path.join(self.installdir, self.pylibdir),
            'TF_CUDA_CLANG': '0',
            'TF_ENABLE_XLA': '0',  # XLA JIT support
            'TF_NEED_CUDA': ('0', '1')[bool(cuda_root)],
            'TF_NEED_GCP': '0',  # Google Cloud Platform
            'TF_NEED_GDR': '0',
            'TF_NEED_HDFS': '0',  # Hadoop File System
            'TF_NEED_JEMALLOC': ('0', '1')[self.cfg['with_jemalloc']],
            'TF_NEED_MPI': ('0', '1')[bool(use_mpi)],
            'TF_NEED_OPENCL': ('0', '1')[bool(opencl_root)],
            'TF_NEED_S3': '0',  # Amazon S3 File System
            'TF_NEED_VERBS': '0',
        }
        if cuda_root:
            config_env_vars.update({
                'CUDA_TOOLKIT_PATH':
                cuda_root,
                'GCC_HOST_COMPILER_PATH':
                which(os.getenv('CC')),
                'TF_CUDA_COMPUTE_CAPABILITIES':
                ','.join(self.cfg['cuda_compute_capabilities']),
                'TF_CUDA_VERSION':
                get_software_version('CUDA'),
            })
        if cudnn_root:
            config_env_vars.update({
                'CUDNN_INSTALL_PATH':
                cudnn_root,
                'TF_CUDNN_VERSION':
                get_software_version('cuDNN'),
            })

        for (key, val) in sorted(config_env_vars.items()):
            env.setvar(key, val)

        # patch configure.py (called by configure script) to avoid that Bazel abuses $HOME/.cache/bazel
        regex_subs = [(r"(run_shell\(\['bazel')",
                       r"\1, '--output_base=%s'" % tmpdir)]
        apply_regex_substitutions('configure.py', regex_subs)

        cmd = self.cfg['preconfigopts'] + './configure ' + self.cfg[
            'configopts']
        run_cmd(cmd, log_all=True, simple=True)
Ejemplo n.º 27
0
    def configure_step(self):
        """Custom configuration procedure for Quantum ESPRESSO."""

        if self.toolchain.options.get('openmp', False) or self.cfg['hybrid']:
            self.cfg.update('configopts', '--enable-openmp')

        if not self.toolchain.options.get('usempi', None):
            self.cfg.update('configopts', '--disable-parallel')

        if not self.cfg['with_scalapack']:
            self.cfg.update('configopts', '--without-scalapack')

        repls = []

        if self.toolchain.comp_family() in [toolchain.INTELCOMP]:
            # set preprocessor command (-E to stop after preprocessing, -C to preserve comments)
            cpp = "%s -E -C" % os.getenv('CC')
            repls.append(('CPP', cpp, False))
            env.setvar('CPP', cpp)

            # also define $FCCPP, but do *not* include -C (comments should not be preserved when preprocessing Fortran)
            env.setvar('FCCPP', "%s -E" % os.getenv('CC'))

        super(EB_QuantumESPRESSO, self).configure_step()

        # compose list of DFLAGS (flag, value, keep_stuff)
        # for guidelines, see include/defs.h.README in sources
        dflags = []

        comp_fam_dflags = {
            toolchain.INTELCOMP: '-D__INTEL',
            toolchain.GCC: '-D__GFORTRAN -D__STD_F95',
        }
        dflags.append(comp_fam_dflags[self.toolchain.comp_family()])

        if self.toolchain.options.get('openmp', False):
            libfft = os.getenv('LIBFFT_MT')
        else:
            libfft = os.getenv('LIBFFT')
        if libfft:
            if "fftw3" in libfft:
                dflags.append('-D__FFTW3')
            else:
                dflags.append('-D__FFTW')
            env.setvar('FFTW_LIBS', libfft)

        if get_software_root('ACML'):
            dflags.append('-D__ACML')

        if self.toolchain.options.get('usempi', None):
            dflags.append('-D__MPI -D__PARA')

        if self.toolchain.options.get('openmp', False) or self.cfg['hybrid']:
            dflags.append(" -D__OPENMP")

        if self.cfg['with_scalapack']:
            dflags.append(" -D__SCALAPACK")

        # always include -w to supress warnings
        dflags.append('-w')

        repls.append(('DFLAGS', ' '.join(dflags), False))

        # complete C/Fortran compiler and LD flags
        if self.toolchain.options.get('openmp', False) or self.cfg['hybrid']:
            repls.append(('LDFLAGS', self.toolchain.get_flag('openmp'), True))
            repls.append(
                ('(?:C|F90|F)FLAGS', self.toolchain.get_flag('openmp'), True))

        # obtain library settings
        libs = []
        for lib in ['BLAS', 'LAPACK', 'FFT', 'SCALAPACK']:
            if self.toolchain.options.get('openmp', False):
                val = os.getenv('LIB%s_MT' % lib)
            else:
                val = os.getenv('LIB%s' % lib)
            repls.append(('%s_LIBS' % lib, val, False))
            libs.append(val)
        libs = ' '.join(libs)

        repls.append(('BLAS_LIBS_SWITCH', 'external', False))
        repls.append(('LAPACK_LIBS_SWITCH', 'external', False))
        repls.append(('LD_LIBS', os.getenv('LIBS'), False))

        # check for external FoX
        if get_software_root('FoX'):
            self.log.debug(
                "Found FoX external module, disabling libfox target in Makefile"
            )
            regex_subs = [
                (r"(libfox: touch-dummy)\n.*",
                 r"\1\n\techo 'libfox: external module used' #"),
            ]
            apply_regex_substitutions('Makefile', regex_subs)

        self.log.debug("List of replacements to perform: %s" % repls)

        if LooseVersion(self.version) >= LooseVersion("6"):
            make_ext = '.inc'
        else:
            make_ext = '.sys'

        # patch make.sys file
        fn = os.path.join(self.cfg['start_dir'], 'make' + make_ext)
        try:
            for line in fileinput.input(fn, inplace=1, backup='.orig.eb'):
                for (k, v, keep) in repls:
                    # need to use [ \t]* instead of \s*, because vars may be undefined as empty,
                    # and we don't want to include newlines
                    if keep:
                        line = re.sub(r"^(%s\s*=[ \t]*)(.*)$" % k,
                                      r"\1\2 %s" % v, line)
                    else:
                        line = re.sub(r"^(%s\s*=[ \t]*).*$" % k, r"\1%s" % v,
                                      line)

                # fix preprocessing directives for .f90 files in make.sys if required
                if self.toolchain.comp_family() in [toolchain.GCC]:
                    line = re.sub(
                        r"\$\(MPIF90\) \$\(F90FLAGS\) -c \$<",
                        "$(CPP) -C $(CPPFLAGS) $< -o $*.F90\n" +
                        "\t$(MPIF90) $(F90FLAGS) -c $*.F90 -o $*.o", line)

                sys.stdout.write(line)
        except IOError, err:
            raise EasyBuildError("Failed to patch %s: %s", fn, err)
Ejemplo n.º 28
0
    def configure_step(self):
        """Dedicated configure step for Mono: install Mono from RPM (if provided), then run configure."""

        # install Mono from RPMs if provided (because we need Mono to build Mono)
        if self.rpms:

            # prepare path for installing RPMs in
            monorpms_path = os.path.join(self.builddir, "monorpms")
            try:
                os.makedirs(os.path.join(monorpms_path, 'rpm'))
            except OSError as err:
                raise EasyBuildError(
                    "Failed to create directories for installing Mono RPMs in: %s",
                    err)

            self.src = self.rpms
            self.rebuildRPM = True

            # rebuild RPMs to make them relocatable
            Rpm.configure_step(self)

            # prepare to install RPMs
            self.log.debug(
                "Initializing temporary RPM repository to install to...")
            cmd = "rpm --initdb --dbpath /rpm --root %s" % monorpms_path
            run_cmd(cmd, log_all=True, simple=True)

            # install RPMs one by one
            for rpm in self.src:
                self.log.debug("Installing RPM %s ..." % rpm['name'])
                if os.path.exists(rpm['path']):
                    cmd = ' '.join([
                        "rpm -i",
                        "--dbpath %(inst)s/rpm",
                        "--force",
                        "--relocate /=%(inst)s",
                        "--badreloc",
                        "--nodeps --nopost",
                        "%(rpm)s",
                    ]) % {
                        'inst': monorpms_path,
                        'rpm': rpm['path'],
                    }
                    run_cmd(cmd, log_all=True, simple=True)
                else:
                    raise EasyBuildError("RPM file %s not found", rpm['path'])

            # create patched version of gmcs command
            self.log.debug("Making our own copy of gmcs (one that works).")

            mygmcs_path = os.path.join(monorpms_path, 'usr', 'bin', 'mygmcs')
            try:
                shutil.copy(os.path.join(monorpms_path, 'usr', 'bin', 'gmcs'),
                            mygmcs_path)
            except OSError as err:
                raise EasyBuildError("Failed to copy gmcs to %s: %s",
                                     mygmcs_path, err)

            rpls = [
                ("exec /usr/bin/mono", "exec %s/usr/bin/mono" % monorpms_path),
                ("`/usr/bin/monodir`", "%s/usr/lib64/mono" % monorpms_path),
            ]
            apply_regex_substitutions(mygmcs_path, rpls)

            self.log.debug("Patched version of gmcs (%s): %s" %
                           (mygmcs_path, read_file(mygmcs_path)))

            # initiate bootstrap: build/install Mono with installed RPMs to temporary path
            tmp_mono_path = os.path.join(self.builddir, "tmp_mono")
            self.log.debug(
                "Build/install temporary Mono version in %s using installed RPMs..."
                % tmp_mono_path)

            par = ''
            if self.cfg['parallel']:
                par = "-j %s" % self.cfg['parallel']

            config_cmd = ' '.join([
                self.cfg['preconfigopts'],
                "./configure",
                "--prefix=" + tmp_mono_path,
                self.cfg['configopts'],
            ])
            build_cmd = ' '.join([
                "%(prebuildopts)s"
                "make %(par)s",
                "EXTERNAL_MCS=%(path)s/usr/bin/mygmcs",
                "EXTERNAL_RUNTIME=%(path)s/usr/bin/mono",
                "%(buildopts)s",
            ]) % {
                'prebuildopts': self.cfg['prebuildopts'],
                'par': par,
                'path': monorpms_path,
                'buildopts': self.cfg['buildopts'],
            }
            install_cmd = "make install"

            for cmd in [config_cmd, build_cmd, install_cmd]:
                run_cmd(cmd, log_all=True, simple=True)

            more_buildopts = ' '.join([
                "EXTERNAL_MCS=%(path)s/bin/gmcs",
                "EXTERNAL_RUNTIME=%(path)s/bin/mono",
            ]) % {
                'path': tmp_mono_path
            }
            self.cfg.update('buildopts', more_buildopts)

            self.src = self.mono_srcs

        # continue with normal configure, and subsequent make, make install
        ConfigureMake.configure_step(self)
Ejemplo n.º 29
0
    def configure_step(self):
        """Configure Boost build using custom tools"""

        # boost_multi_thread is deprecated
        if self.cfg['boost_multi_thread'] is not None:
            self.log.deprecated(
                "boost_multi_thread has been replaced by tagged_layout. "
                "We build with tagged layout and both single and multi threading libraries "
                "from version 1.69.0.", '5.0')
            self.cfg['tagged_layout'] = True

        # mpi sanity check
        if self.cfg['boost_mpi'] and not self.toolchain.options.get(
                'usempi', None):
            raise EasyBuildError(
                "When enabling building boost_mpi, also enable the 'usempi' toolchain option."
            )

        # generate config depending on compiler used
        toolset = self.cfg['toolset']
        if toolset is None:
            if self.toolchain.comp_family() == toolchain.INTELCOMP:
                toolset = 'intel-linux'
            elif self.toolchain.comp_family() == toolchain.GCC:
                toolset = 'gcc'
            else:
                raise EasyBuildError(
                    "Unknown compiler used, don't know what to specify to --with-toolset, aborting."
                )

        cmd = "%s ./bootstrap.sh --with-toolset=%s --prefix=%s %s"
        tup = (self.cfg['preconfigopts'], toolset, self.installdir,
               self.cfg['configopts'])
        run_cmd(cmd % tup, log_all=True, simple=True)

        # Use build_toolset if specified or the bootstrap toolset without the OS suffix
        self.toolset = self.cfg['build_toolset'] or re.sub(
            '-linux$', '', toolset)

        user_config = []

        # Explicitely set the compiler path to avoid B2 checking some standard paths like /opt
        cxx = os.getenv('CXX')
        if cxx:
            cxx = which(cxx, on_error=ERROR)
            # Remove default toolset config which may lead to duplicate toolsets (e.g. for intel-linux)
            apply_regex_substitutions('project-config.jam',
                                      [('using %s ;' % toolset, '')])
            # Add our toolset config with no version and full path to compiler
            user_config.append("using %s : : %s ;" % (self.toolset, cxx))

        if self.cfg['boost_mpi']:

            # configure the boost mpi module
            # http://www.boost.org/doc/libs/1_47_0/doc/html/mpi/getting_started.html
            # let Boost.Build know to look here for the config file

            # Check if using a Cray toolchain and configure MPI accordingly
            if self.toolchain.toolchain_family() == toolchain.CRAYPE:
                if self.toolchain.PRGENV_MODULE_NAME_SUFFIX == 'gnu':
                    craympichdir = os.getenv('CRAY_MPICH2_DIR')
                    craygccversion = os.getenv('GCC_VERSION')
                    # We configure the gcc toolchain below, so make sure the EC doesn't use another toolset
                    if self.toolset != 'gcc':
                        raise EasyBuildError(
                            "For the cray toolchain the 'gcc' toolset must be used."
                        )
                    # Remove the previous "using gcc" line add above (via self.toolset) if present
                    user_config = [
                        x for x in user_config
                        if not x.startswith('using gcc :')
                    ]
                    user_config.extend([
                        'local CRAY_MPICH2_DIR =  %s ;' % craympichdir,
                        'using gcc ',
                        ': %s' % craygccversion,
                        ': CC ',
                        ': <compileflags>-I$(CRAY_MPICH2_DIR)/include ',
                        r'  <linkflags>-L$(CRAY_MPICH2_DIR)/lib \ ',
                        '; ',
                        'using mpi ',
                        ': CC ',
                        ': <find-shared-library>mpich ',
                        ': %s' % self.cfg['mpi_launcher'],
                        ';',
                        '',
                    ])
                else:
                    raise EasyBuildError(
                        "Bailing out: only PrgEnv-gnu supported for now")
            else:
                user_config.append("using mpi : %s ;" % os.getenv("MPICXX"))

        write_file('user-config.jam', '\n'.join(user_config), append=True)
Ejemplo n.º 30
0
    def configure_step(self):
        """Configure Ferret build."""

        buildtype = "x86_64-linux"
        if LooseVersion(self.version) < LooseVersion("7.3"):
            change_dir('FERRET')

        deps = ['HDF5', 'netCDF', 'Java']

        for name in deps:
            if not get_software_root(name):
                raise EasyBuildError("%s module not loaded?", name)

        if LooseVersion(self.version) >= LooseVersion("7.3"):
            copy_file('external_functions/ef_utility/site_specific.mk.in',
                      'external_functions/ef_utility/site_specific.mk')
            copy_file('site_specific.mk.in', 'site_specific.mk')
            fns = [
                "site_specific.mk",
                "external_functions/ef_utility/site_specific.mk",
            ]
        else:
            fns = ["site_specific.mk"]

        regex_subs = [
            (r"^BUILDTYPE\s*=.*", "BUILDTYPE = %s" % buildtype),
            (r"^INSTALL_FER_DIR =.*",
             "INSTALL_FER_DIR = %s" % self.installdir),
        ]

        for name in deps:
            regex_subs.append((r"^(%s.*DIR\s*)=.*" % name.upper(),
                               r"\1 = %s" % get_software_root(name)))

        if LooseVersion(self.version) >= LooseVersion("7.3"):
            regex_subs.extend([
                (r"^DIR_PREFIX =.*",
                 "DIR_PREFIX = %s" % self.cfg['start_dir']),
                (r"^FER_LOCAL_EXTFCNS = $(FER_DIR)",
                 "FER_LOCAL_EXTFCNS = $(INSTALL_FER_DIR)/libs"),
            ])

        for fn in fns:
            apply_regex_substitutions(fn, regex_subs)

        comp_vars = {
            'CC': 'CC',
            'CFLAGS': 'CFLAGS',
            'CPPFLAGS': 'CPPFLAGS',
            'FC': 'F77',
        }

        gfort2ifort = {
            '-fno-second-underscore': ' ',
            '-fno-backslash': ' ',
            '-fdollar-ok': ' ',
            '-ffast-math': ' ',
            '-ffixed-line-length-132': '-132',
            '-fno-automatic': ' ',
            '-ffpe-trap=overflow': ' ',
            '-fimplicit-none': '-implicitnone',
            '-fdefault-real-8': '-r8',
            '-fdefault-double-8': ' ',
            '-Wl,-Bstatic -lgfortran -Wl,-Bdynamic': ' ',
            '-v --verbose -m64': ' ',
            '-export-dynamic': ' ',
            '-DG77_SIGNAL': ' ',
        }

        fn = 'xgks/CUSTOMIZE.%s' % buildtype

        regex_subs = [(r"^(FFLAGS\s*=').*-m64 (.*)",
                       r"\1%s \2" % os.getenv('FFLAGS'))]
        for x, y in comp_vars.items():
            regex_subs.append((r"^(%s\s*)=.*" % x, r"\1='%s'" % os.getenv(y)))

        x11_root = get_software_root('X11')
        if x11_root:
            regex_subs.append(
                (r"^(LD_X11\s*)=.*", r"\1='-L%s/lib -lX11'" % x11_root))
        else:
            regex_subs.append(
                (r"^(LD_X11\s*)=.*", r"\1='-L/usr/lib64/X11 -lX11'"))

        if LooseVersion(self.version) >= LooseVersion(
                "7.3") and self.toolchain.comp_family() == toolchain.INTELCOMP:
            regex_subs.extend(sorted(gfort2ifort.items()))

        apply_regex_substitutions(fn, regex_subs)

        comp_vars = {
            'CC': 'CC',
            'CXX': 'CXX',
            'F77': 'F77',
            'FC': 'F77',
        }

        if LooseVersion(self.version) >= LooseVersion("7.3"):
            fns = [
                'platform_specific.mk.%s' % buildtype,
                'external_functions/ef_utility/platform_specific.mk.%s' %
                buildtype,
            ]
        else:
            fns = [
                'fer/platform_specific_flags.mk.%s' % buildtype,
                'ppl/platform_specific_flags.mk.%s' % buildtype,
                'external_functions/ef_utility/platform_specific_flags.mk.%s' %
                buildtype,
            ]

        regex_subs = []
        for x, y in comp_vars.items():
            regex_subs.append(
                (r"^(\s*%s\s*)=.*" % x, r"\1 = %s" % os.getenv(y)))

        if LooseVersion(self.version) >= LooseVersion("7.3"):
            regex_subs.extend([
                (r"^(\s*LDFLAGS\s*=).*",
                 r"\1 -fPIC %s -lnetcdff -lnetcdf -lhdf5_hl -lhdf5" %
                 os.getenv("LDFLAGS")),
                (r"^(\s*)CDFLIB", r"\1NONEED"),
            ])

        if self.toolchain.comp_family() == toolchain.INTELCOMP:
            regex_subs.append(
                (r"^(\s*LD\s*)=.*", r"\1 = %s -nofor-main" % os.getenv("F77")))
            for x in ["CFLAGS", "FFLAGS"]:
                regex_subs.append((r"^(\s*%s\s*=\s*\$\(CPP_FLAGS\)).*\\" % x,
                                   r"\1 %s \\" % os.getenv(x)))
            if LooseVersion(self.version) >= LooseVersion("7.3"):
                for x in ["CFLAGS", "FFLAGS"]:
                    regex_subs.append((r"^(\s*%s\s*=).*-m64 (.*)" % x,
                                       r"\1%s \2" % os.getenv(x)))
                regex_subs.extend(sorted(gfort2ifort.items()))

                regex_subs.append((r"^(\s*MYDEFINES\s*=.*)\\",
                                   r"\1-DF90_SYSTEM_ERROR_CALLS \\"))

        for fn in fns:
            apply_regex_substitutions(fn, regex_subs)

        if LooseVersion(self.version) >= LooseVersion("7.3"):
            comp_vars = {
                'CC': 'CC',
                'LDFLAGS': 'LDFLAGS',
            }
            fn = 'gksm2ps/Makefile'

            regex_subs = [(r"^(\s*CFLAGS=\")-m64 (.*)",
                           r"\1%s \2" % os.getenv('CFLAGS'))]
            for x, y in comp_vars.items():
                regex_subs.append(
                    (r"^(\s*%s)=.*" % x, r"\1='%s' \\" % os.getenv(y)))

            apply_regex_substitutions(fn, regex_subs)
Ejemplo n.º 31
0
    def configure_step(self):
        """Custom configuration procedure for Bazel."""

        binutils_root = get_software_root('binutils')
        gcc_root = get_software_root('GCCcore') or get_software_root('GCC')
        gcc_ver = get_software_version('GCCcore') or get_software_version(
            'GCC')

        # only patch Bazel scripts if binutils & GCC installation prefix could be determined
        if binutils_root and gcc_root:

            res = glob.glob(
                os.path.join(gcc_root, 'lib', 'gcc', '*', gcc_ver, 'include'))
            if res and len(res) == 1:
                gcc_lib_inc = res[0]
            else:
                raise EasyBuildError(
                    "Failed to pinpoint location of GCC include files: %s",
                    res)

            gcc_lib_inc_fixed = os.path.join(os.path.dirname(gcc_lib_inc),
                                             'include-fixed')
            if not os.path.exists(gcc_lib_inc_fixed):
                raise EasyBuildError("Derived directory %s does not exist",
                                     gcc_lib_inc_fixed)

            gcc_cplusplus_inc = os.path.join(gcc_root, 'include', 'c++',
                                             gcc_ver)
            if not os.path.exists(gcc_cplusplus_inc):
                raise EasyBuildError("Derived directory %s does not exist",
                                     gcc_cplusplus_inc)

            # replace hardcoded paths in CROSSTOOL

            # CROSSTOOL script is no longer there in Bazel 0.24.0
            if LooseVersion(self.version) < LooseVersion('0.24.0'):
                regex_subs = [
                    (r'-B/usr/bin',
                     '-B%s' % os.path.join(binutils_root, 'bin')),
                    (r'(cxx_builtin_include_directory:.*)/usr/lib/gcc',
                     r'\1%s' % gcc_lib_inc),
                    (r'(cxx_builtin_include_directory:.*)/usr/local/include',
                     r'\1%s' % gcc_lib_inc_fixed),
                    (r'(cxx_builtin_include_directory:.*)/usr/include',
                     r'\1%s' % gcc_cplusplus_inc),
                ]
                for tool in ['ar', 'cpp', 'dwp', 'gcc', 'ld']:
                    path = which(tool)
                    if path:
                        regex_subs.append((os.path.join('/usr', 'bin',
                                                        tool), path))
                    else:
                        raise EasyBuildError(
                            "Failed to determine path to '%s'", tool)

                apply_regex_substitutions(
                    os.path.join('tools', 'cpp', 'CROSSTOOL'), regex_subs)

            # replace hardcoded paths in (unix_)cc_configure.bzl
            regex_subs = [
                (r'-B/usr/bin', '-B%s' % os.path.join(binutils_root, 'bin')),
                (r'"/usr/bin', '"' + os.path.join(binutils_root, 'bin')),
            ]
            for conf_bzl in ['cc_configure.bzl', 'unix_cc_configure.bzl']:
                filepath = os.path.join('tools', 'cpp', conf_bzl)
                if os.path.exists(filepath):
                    apply_regex_substitutions(filepath, regex_subs)
        else:
            self.log.info(
                "Not patching Bazel build scripts, installation prefix for binutils/GCC not found"
            )

        # enable building in parallel
        env.setvar('EXTRA_BAZEL_ARGS', '--jobs=%d' % self.cfg['parallel'])
Ejemplo n.º 32
0
    def patch_crosstool_files(self):
        """Patches the CROSSTOOL files to include EasyBuild provided compiler paths"""
        inc_paths, lib_paths = [], []

        gcc_root = get_software_root('GCCcore') or get_software_root('GCC')
        if gcc_root:
            gcc_lib64 = os.path.join(gcc_root, 'lib64')
            lib_paths.append(gcc_lib64)

            gcc_ver = get_software_version('GCCcore') or get_software_version(
                'GCC')

            # figure out location of GCC include files
            res = glob.glob(
                os.path.join(gcc_root, 'lib', 'gcc', '*', gcc_ver, 'include'))
            if res and len(res) == 1:
                gcc_lib_inc = res[0]
                inc_paths.append(gcc_lib_inc)
            else:
                raise EasyBuildError(
                    "Failed to pinpoint location of GCC include files: %s",
                    res)

            # make sure include-fixed directory is where we expect it to be
            gcc_lib_inc_fixed = os.path.join(os.path.dirname(gcc_lib_inc),
                                             'include-fixed')
            if os.path.exists(gcc_lib_inc_fixed):
                inc_paths.append(gcc_lib_inc_fixed)
            else:
                self.log.info(
                    "Derived directory %s does not exist, so discarding it",
                    gcc_lib_inc_fixed)

            # also check on location of include/c++/<gcc version> directory
            gcc_cplusplus_inc = os.path.join(gcc_root, 'include', 'c++',
                                             gcc_ver)
            if os.path.exists(gcc_cplusplus_inc):
                inc_paths.append(gcc_cplusplus_inc)
            else:
                raise EasyBuildError("Derived directory %s does not exist",
                                     gcc_cplusplus_inc)
        else:
            raise EasyBuildError(
                "Failed to determine installation prefix for GCC")

        cuda_root = get_software_root('CUDA')
        if cuda_root:
            inc_paths.append(os.path.join(cuda_root, 'include'))
            lib_paths.append(os.path.join(cuda_root, 'lib64'))

        # fix hardcoded locations of compilers & tools
        cxx_inc_dirs = [
            'cxx_builtin_include_directory: "%s"' % resolve_path(p)
            for p in inc_paths
        ]
        cxx_inc_dirs += [
            'cxx_builtin_include_directory: "%s"' % p for p in inc_paths
        ]
        regex_subs = [
            (r'-B/usr/bin/', '-B%s %s' %
             (self.binutils_bin_path, ' '.join('-L%s/' % p
                                               for p in lib_paths))),
            (r'(cxx_builtin_include_directory:).*', ''),
            (r'^toolchain {', 'toolchain {\n' + '\n'.join(cxx_inc_dirs)),
        ]
        for tool in [
                'ar', 'cpp', 'dwp', 'gcc', 'gcov', 'ld', 'nm', 'objcopy',
                'objdump', 'strip'
        ]:
            path = which(tool)
            if path:
                regex_subs.append((os.path.join('/usr', 'bin', tool), path))
            else:
                raise EasyBuildError("Failed to determine path to '%s'", tool)

        # -fPIE/-pie and -fPIC are not compatible, so patch out hardcoded occurences of -fPIE/-pie if -fPIC is used
        if self.toolchain.options.get('pic', None):
            regex_subs.extend([('-fPIE', '-fPIC'), ('"-pie"', '"-fPIC"')])

        # patch all CROSSTOOL* scripts to fix hardcoding of locations of binutils/GCC binaries
        for path, dirnames, filenames in os.walk(os.getcwd()):
            for filename in filenames:
                if filename.startswith('CROSSTOOL'):
                    full_path = os.path.join(path, filename)
                    self.log.info("Patching %s", full_path)
                    apply_regex_substitutions(full_path, regex_subs)
Ejemplo n.º 33
0
    def configure_step(self):
        """Custom configuration procedure for WRF-Fire."""

        comp_fam = self.toolchain.comp_family()

        # define $NETCDF* for netCDF dependency
        netcdf_fortran = get_software_root('netCDF-Fortran')
        if netcdf_fortran:
            env.setvar('NETCDF', netcdf_fortran)
        else:
            raise EasyBuildError("Required dependendy netCDF-Fortran is missing")

        # define $PHDF5 for parallel HDF5 dependency
        hdf5 = get_software_root('HDF5')
        if hdf5 and os.path.exists(os.path.join(hdf5, 'bin', 'h5pcc')):
            env.setvar('PHDF5', hdf5)

        # first, configure WRF part
        change_dir(os.path.join(self.cfg['start_dir'], 'WRFV3'))

        # instruct WRF-Fire to create netCDF v4 output files
        env.setvar('WRFIO_NETCDF4_FILE_SUPPORT', '1')

        # patch arch/Config_new.pl script, so that run_cmd_qa receives all output to answer questions
        patch_perl_script_autoflush(os.path.join('arch', 'Config_new.pl'))

        # determine build type option to look for
        known_build_type_options = {
            toolchain.INTELCOMP: "Linux x86_64 i486 i586 i686, ifort compiler with icc",
            toolchain.GCC: "x86_64 Linux, gfortran compiler with gcc",
            toolchain.PGI: "Linux x86_64, PGI compiler with pgcc",
        }
        build_type_option = known_build_type_options.get(comp_fam)
        if build_type_option is None:
            raise EasyBuildError("Don't know which WPS configure option to select for compiler family %s", comp_fam)

        build_type_question = "\s*(?P<nr>[0-9]+).\s*%s\s*\(%s\)" % (build_type_option, self.cfg['buildtype'])
        qa = {
            "Compile for nesting? (1=basic, 2=preset moves, 3=vortex following) [default 1]:": '1',
        }
        std_qa = {
            # named group in match will be used to construct answer
            r"%s.*\n(.*\n)*Enter selection\s*\[[0-9]+-[0-9]+\]\s*:" % build_type_question: '%(nr)s',
        }
        run_cmd_qa('./configure', qa, std_qa=std_qa, log_all=True, simple=True)

        cpp_flag = None
        if comp_fam == toolchain.INTELCOMP:
            cpp_flag = '-fpp'
        elif comp_fam == toolchain.GCC:
            cpp_flag = '-cpp'
        else:
            raise EasyBuildError("Don't know which flag to use to specify that Fortran files were preprocessed")

        # patch configure.wrf to get things right
        comps = {
            'CFLAGS_LOCAL': os.getenv('CFLAGS'),
            'DM_FC': os.getenv('MPIF90'),
            'DM_CC': "%s -DMPI2_SUPPORT" % os.getenv('MPICC'),
            'FCOPTIM': os.getenv('FFLAGS'),
            # specify that Fortran files have been preprocessed with cpp,
            # see http://forum.wrfforum.com/viewtopic.php?f=5&t=6086
            'FORMAT_FIXED': "-FI %s" % cpp_flag,
            'FORMAT_FREE': "-FR %s" % cpp_flag,
        }
        regex_subs = [(r"^(%s\s*=\s*).*$" % k, r"\1 %s" % v) for (k, v) in comps.items()]
        apply_regex_substitutions('configure.wrf', regex_subs)

        # also configure WPS part
        change_dir(os.path.join(self.cfg['start_dir'], 'WPS'))

        # patch arch/Config_new.pl script, so that run_cmd_qa receives all output to answer questions
        patch_perl_script_autoflush(os.path.join('arch', 'Config.pl'))

        # determine build type option to look for
        known_build_type_options = {
            toolchain.INTELCOMP: "PC Linux x86_64, Intel compiler",
            toolchain.GCC: "PC Linux x86_64, g95 compiler",
            toolchain.PGI: "PC Linux x86_64 (IA64 and Opteron), PGI compiler 5.2 or higher",
        }
        build_type_option = known_build_type_options.get(comp_fam)
        if build_type_option is None:
            raise EasyBuildError("Don't know which WPS configure option to select for compiler family %s", comp_fam)

        known_wps_build_types = {
            'dmpar': 'DM parallel',
            'smpar': 'serial',
        }
        wps_build_type = known_wps_build_types.get(self.cfg['buildtype'])
        if wps_build_type is None:
            raise EasyBuildError("Don't know which WPS build type to pick for '%s'", self.cfg['builddtype'])

        build_type_question = "\s*(?P<nr>[0-9]+).\s*%s.*%s(?!NO GRIB2)" % (build_type_option, wps_build_type)
        std_qa = {
            # named group in match will be used to construct answer
            r"%s.*\n(.*\n)*Enter selection\s*\[[0-9]+-[0-9]+\]\s*:" % build_type_question: '%(nr)s',
        }
        run_cmd_qa('./configure', {}, std_qa=std_qa, log_all=True, simple=True)

        # patch configure.wps to get things right
        comps = {
            'CC': '%s %s' % (os.getenv('MPICC'), os.getenv('CFLAGS')),
            'FC': '%s %s' % (os.getenv('MPIF90'), os.getenv('F90FLAGS'))
        }
        regex_subs = [(r"^(%s\s*=\s*).*$" % k, r"\1 %s" % v) for (k, v) in comps.items()]
        # specify that Fortran90 files have been preprocessed with cpp
        regex_subs.extend([
            (r"^(F77FLAGS\s*=\s*)", r"\1 %s " % cpp_flag),
            (r"^(FFLAGS\s*=\s*)", r"\1 %s " % cpp_flag),
        ])
        apply_regex_substitutions('configure.wps', regex_subs)
Ejemplo n.º 34
0
    def configure_step(self):
        """Custom configuration procedure for ALADIN."""

        # unset $LIBRARY_PATH set by modules of dependencies, because it may screw up linking
        if 'LIBRARY_PATH' in os.environ:
            self.log.debug("Unsetting $LIBRARY_PATH (was: %s)" %
                           os.environ['LIBRARY_PATH'])
            self.orig_library_path = os.environ.pop('LIBRARY_PATH')

        # build auxiliary libraries
        auxlibs_dir = None

        my_gnu = None
        if self.toolchain.comp_family() == toolchain.GCC:
            my_gnu = 'y'  # gfortran
            for var in ['CFLAGS', 'CXXFLAGS', 'F90FLAGS', 'FFLAGS']:
                flags = os.getenv(var)
                env.setvar(var,
                           "%s -fdefault-real-8 -fdefault-double-8" % flags)
                self.log.info("Updated %s to '%s'" % (var, os.getenv(var)))
        elif self.toolchain.comp_family() == toolchain.INTELCOMP:
            my_gnu = 'i'  # icc/ifort
        else:
            raise EasyBuildError(
                "Don't know how to set 'my_gnu' variable in auxlibs build script."
            )
        self.log.info("my_gnu set to '%s'" % my_gnu)

        tmp_installroot = tempfile.mkdtemp(prefix='aladin_auxlibs_')

        try:
            cwd = os.getcwd()

            os.chdir(self.builddir)
            builddirs = os.listdir(self.builddir)

            auxlibs_dir = [
                x for x in builddirs if x.startswith('auxlibs_installer')
            ][0]

            os.chdir(auxlibs_dir)

            auto_driver = 'driver_automatic'
            for line in fileinput.input(auto_driver,
                                        inplace=1,
                                        backup='.orig.eb'):

                line = re.sub(r"^(my_gnu\s*=\s*).*$", r"\1%s" % my_gnu, line)
                line = re.sub(r"^(my_r32\s*=\s*).*$", r"\1n",
                              line)  # always 64-bit real precision
                line = re.sub(r"^(my_readonly\s*=\s*).*$", r"\1y",
                              line)  # make libs read-only after build
                line = re.sub(r"^(my_installroot\s*=\s*).*$",
                              r"\1%s" % tmp_installroot, line)

                sys.stdout.write(line)

            run_cmd("./%s" % auto_driver)

            os.chdir(cwd)

        except OSError as err:
            raise EasyBuildError("Failed to build ALADIN: %s", err)

        # build gmkpack, update PATH and set GMKROOT
        # we build gmkpack here because a config file is generated in the gmkpack isntall path
        try:
            gmkpack_dir = [x for x in builddirs if x.startswith('gmkpack')][0]
            os.chdir(os.path.join(self.builddir, gmkpack_dir))

            qa = {
                'Do you want to run the configuration file maker assistant now (y) or later [n] ?':
                'n',
            }

            run_cmd_qa("./build_gmkpack", qa)

            os.chdir(cwd)

            paths = os.getenv('PATH').split(':')
            paths.append(os.path.join(self.builddir, gmkpack_dir, 'util'))
            env.setvar('PATH', ':'.join(paths))

            env.setvar('GMKROOT', os.path.join(self.builddir, gmkpack_dir))

        except OSError as err:
            raise EasyBuildError("Failed to build gmkpack: %s", err)

        # generate gmkpack configuration file
        self.conf_file = 'ALADIN_%s' % self.version
        self.conf_filepath = os.path.join(self.builddir, 'gmkpack_support',
                                          'arch', '%s.x' % self.conf_file)

        try:
            if os.path.exists(self.conf_filepath):
                os.remove(self.conf_filepath)
                self.log.info("Removed existing gmpack config file %s" %
                              self.conf_filepath)

            archdir = os.path.dirname(self.conf_filepath)
            if not os.path.exists(archdir):
                mkdir(archdir, parents=True)

        except OSError as err:
            raise EasyBuildError("Failed to remove existing file %s: %s",
                                 self.conf_filepath, err)

        mpich = 'n'
        known_mpi_libs = [
            toolchain.MPICH, toolchain.MPICH2, toolchain.INTELMPI
        ]
        if self.toolchain.options.get(
                'usempi',
                None) and self.toolchain.mpi_family() in known_mpi_libs:
            mpich = 'y'

        qpref = 'Please type the ABSOLUTE name of '
        qsuff = ', or ignore (environment variables allowed) :'
        qsuff2 = ', or ignore : (environment variables allowed) :'

        comp_fam = self.toolchain.comp_family()
        if comp_fam == toolchain.GCC:
            gribdir = 'GNU'
        elif comp_fam == toolchain.INTELCOMP:
            gribdir = 'INTEL'
        else:
            raise EasyBuildError(
                "Don't know which grib lib dir to use for compiler %s",
                comp_fam)

        aux_lib_gribex = os.path.join(tmp_installroot, gribdir, 'lib',
                                      'libgribex.a')
        aux_lib_ibm = os.path.join(tmp_installroot, gribdir, 'lib',
                                   'libibmdummy.a')
        grib_api_lib = os.path.join(get_software_root('grib_api'), 'lib',
                                    'libgrib_api.a')
        grib_api_f90_lib = os.path.join(get_software_root('grib_api'), 'lib',
                                        'libgrib_api_f90.a')
        grib_api_inc = os.path.join(get_software_root('grib_api'), 'include')
        jasperlib = os.path.join(get_software_root('JasPer'), 'lib',
                                 'libjasper.a')
        mpilib = os.path.join(os.getenv('MPI_LIB_DIR'),
                              os.getenv('MPI_LIB_SHARED'))

        # netCDF
        netcdf = get_software_root('netCDF')
        netcdf_fortran = get_software_root('netCDF-Fortran')
        if netcdf:
            netcdfinc = os.path.join(netcdf, 'include')
            if netcdf_fortran:
                netcdflib = os.path.join(netcdf_fortran,
                                         get_software_libdir('netCDF-Fortran'),
                                         'libnetcdff.a')
            else:
                netcdflib = os.path.join(netcdf, get_software_libdir('netCDF'),
                                         'libnetcdff.a')
            if not os.path.exists(netcdflib):
                raise EasyBuildError("%s does not exist", netcdflib)
        else:
            raise EasyBuildError("netCDF(-Fortran) not available")

        ldpaths = [ldflag[2:] for ldflag in os.getenv('LDFLAGS').split(' ')
                   ]  # LDFLAGS have form '-L/path/to'

        lapacklibs = []
        for lib in os.getenv('LAPACK_STATIC_LIBS').split(','):
            libpaths = [os.path.join(ldpath, lib) for ldpath in ldpaths]
            lapacklibs.append([
                libpath for libpath in libpaths if os.path.exists(libpath)
            ][0])
        lapacklib = ' '.join(lapacklibs)
        blaslibs = []
        for lib in os.getenv('BLAS_STATIC_LIBS').split(','):
            libpaths = [os.path.join(ldpath, lib) for ldpath in ldpaths]
            blaslibs.append([
                libpath for libpath in libpaths if os.path.exists(libpath)
            ][0])
        blaslib = ' '.join(blaslibs)

        qa = {
            'Do you want to run the configuration file maker assistant now (y) or later [n] ?':
            'y',
            'Do you want to setup your configuration file for MPICH (y/n) [n] ?':
            mpich,
            'Please type the directory name where to find a dummy file mpif.h or ignore :':
            os.getenv('MPI_INC_DIR'),
            '%sthe library gribex or emos%s' % (qpref, qsuff2):
            aux_lib_gribex,
            '%sthe library ibm%s' % (qpref, qsuff):
            aux_lib_ibm,
            '%sthe library grib_api%s' % (qpref, qsuff):
            grib_api_lib,
            '%sthe library grib_api_f90%s' % (qpref, qsuff):
            grib_api_f90_lib,
            '%sthe JPEG auxilary library if enabled by Grib_api%s' % (qpref, qsuff2):
            jasperlib,
            '%sthe library netcdf%s' % (qpref, qsuff):
            netcdflib,
            '%sthe library lapack%s' % (qpref, qsuff):
            lapacklib,
            '%sthe library blas%s' % (qpref, qsuff):
            blaslib,
            '%sthe library mpi%s' % (qpref, qsuff):
            mpilib,
            '%sa MPI dummy library for serial executions, or ignore :' % qpref:
            '',
            'Please type the directory name where to find grib_api headers, or ignore :':
            grib_api_inc,
            'Please type the directory name where to find fortint.h or ignore :':
            '',
            'Please type the directory name where to find netcdf headers, or ignore :':
            netcdfinc,
            'Do you want to define CANARI (y/n) [y] ?':
            'y',
            'Please type the name of the script file used to generate a preprocessed blacklist file, or ignore :':
            '',
            'Please type the name of the script file used to recover local libraries (gget), or ignore :':
            '',
            'Please type the options to tune the gnu compilers, or ignore :':
            os.getenv('F90FLAGS'),
        }

        f90_seq = os.getenv('F90_SEQ')
        if not f90_seq:
            # F90_SEQ is only defined when usempi is enabled
            f90_seq = os.getenv('F90')

        stdqa = OrderedDict([
            (r'Confirm library .* is .*',
             'y'),  # this one needs to be tried first!
            (r'.*fortran 90 compiler name .*\s*:\n\(suggestions\s*: .*\)',
             f90_seq),
            (r'.*fortran 90 compiler interfaced with .*\s*:\n\(suggestions\s*: .*\)',
             f90_seq),
            (r'Please type the ABSOLUTE name of .*library.*, or ignore\s*[:]*\s*[\n]*.*',
             ''),
            (r'Please .* to save this draft configuration file :\n.*',
             '%s.x' % self.conf_file),
        ])

        no_qa = [
            ".*ignored.",
        ]

        env.setvar('GMKTMP', self.builddir)
        env.setvar('GMKFILE', self.conf_file)

        run_cmd_qa("gmkfilemaker", qa, std_qa=stdqa, no_qa=no_qa)

        # set environment variables for installation dirs
        env.setvar('ROOTPACK', os.path.join(self.installdir, 'rootpack'))
        env.setvar('ROOTBIN', os.path.join(self.installdir, 'rootpack'))
        env.setvar('HOMEPACK', os.path.join(self.installdir, 'pack'))
        env.setvar('HOMEBIN', os.path.join(self.installdir, 'pack'))

        # patch config file to include right Fortran compiler flags
        regex_subs = [(r"^(FRTFLAGS\s*=.*)$", r"\1 %s" % os.getenv('FFLAGS'))]
        apply_regex_substitutions(self.conf_filepath, regex_subs)
Ejemplo n.º 35
0
    def configure_step(self):
        """Configure Ferret build."""

        buildtype = "x86_64-linux"
        if LooseVersion(self.version) < LooseVersion("7.3"):
            change_dir('FERRET')

        deps = ['HDF5', 'netCDF', 'Java']

        for name in deps:
            if not get_software_root(name):
                raise EasyBuildError("%s module not loaded?", name)

        if LooseVersion(self.version) >= LooseVersion("7.3"):
            copy_file('external_functions/ef_utility/site_specific.mk.in',
                      'external_functions/ef_utility/site_specific.mk')
            copy_file('site_specific.mk.in', 'site_specific.mk')
            fns = [
                "site_specific.mk",
                "external_functions/ef_utility/site_specific.mk",
            ]
        else:
            fns = ["site_specific.mk"]

        regex_subs = [
            (r"^BUILDTYPE\s*=.*", "BUILDTYPE = %s" % buildtype),
            (r"^INSTALL_FER_DIR =.*", "INSTALL_FER_DIR = %s" % self.installdir),
        ]

        for name in deps:
            regex_subs.append((r"^(%s.*DIR\s*)=.*" % name.upper(), r"\1 = %s" % get_software_root(name)))

        if LooseVersion(self.version) >= LooseVersion("7.3"):
            regex_subs.extend([
                (r"^DIR_PREFIX =.*", "DIR_PREFIX = %s" % self.cfg['start_dir']),
                (r"^FER_LOCAL_EXTFCNS = $(FER_DIR)", "FER_LOCAL_EXTFCNS = $(INSTALL_FER_DIR)/libs"),
            ])

        for fn in fns:
            apply_regex_substitutions(fn, regex_subs)

        comp_vars = {
            'CC': 'CC',
            'CFLAGS': 'CFLAGS',
            'CPPFLAGS': 'CPPFLAGS',
            'FC': 'F77',
        }

        gfort2ifort = {
            '-fno-second-underscore': ' ',
            '-fno-backslash': ' ',
            '-fdollar-ok': ' ',
            '-ffast-math': ' ',
            '-ffixed-line-length-132': '-132',
            '-fno-automatic': ' ',
            '-ffpe-trap=overflow': ' ',
            '-fimplicit-none': '-implicitnone',
            '-fdefault-real-8': '-r8',
            '-fdefault-double-8': ' ',
            '-Wl,-Bstatic -lgfortran -Wl,-Bdynamic': ' ',
            '-v --verbose -m64': ' ',
            '-export-dynamic': ' ',
            '-DG77_SIGNAL': ' ',
        }

        fn = 'xgks/CUSTOMIZE.%s' % buildtype

        regex_subs = [(r"^(FFLAGS\s*=').*-m64 (.*)", r"\1%s \2" % os.getenv('FFLAGS'))]
        for x, y in comp_vars.items():
            regex_subs.append((r"^(%s\s*)=.*" % x, r"\1='%s'" % os.getenv(y)))

        x11_root = get_software_root('X11')
        if x11_root:
            regex_subs.append((r"^(LD_X11\s*)=.*", r"\1='-L%s/lib -lX11'" % x11_root))
        else:
            regex_subs.append((r"^(LD_X11\s*)=.*", r"\1='-L/usr/lib64/X11 -lX11'"))

        if LooseVersion(self.version) >= LooseVersion("7.3") and self.toolchain.comp_family() == toolchain.INTELCOMP:
            regex_subs.extend(sorted(gfort2ifort.items()))

        apply_regex_substitutions(fn, regex_subs)

        comp_vars = {
            'CC': 'CC',
            'CXX': 'CXX',
            'F77': 'F77',
            'FC': 'F77',
        }

        if LooseVersion(self.version) >= LooseVersion("7.3"):
            fns = [
                'platform_specific.mk.%s' % buildtype,
                'external_functions/ef_utility/platform_specific.mk.%s' % buildtype,
            ]
        else:
            fns = [
                'fer/platform_specific_flags.mk.%s' % buildtype,
                'ppl/platform_specific_flags.mk.%s' % buildtype,
                'external_functions/ef_utility/platform_specific_flags.mk.%s' % buildtype,
            ]

        regex_subs = []
        for x, y in comp_vars.items():
            regex_subs.append((r"^(\s*%s\s*)=.*" % x, r"\1 = %s" % os.getenv(y)))

        if LooseVersion(self.version) >= LooseVersion("7.3"):
            regex_subs.extend([
                (r"^(\s*LDFLAGS\s*=).*", r"\1 -fPIC %s -lnetcdff -lnetcdf -lhdf5_hl -lhdf5" % os.getenv("LDFLAGS")),
                (r"^(\s*)CDFLIB", r"\1NONEED"),
            ])

        if self.toolchain.comp_family() == toolchain.INTELCOMP:
            regex_subs.append((r"^(\s*LD\s*)=.*", r"\1 = %s -nofor-main" % os.getenv("F77")))
            for x in ["CFLAGS", "FFLAGS"]:
                regex_subs.append((r"^(\s*%s\s*=\s*\$\(CPP_FLAGS\)).*\\" % x, r"\1 %s \\" % os.getenv(x)))
            if LooseVersion(self.version) >= LooseVersion("7.3"):
                for x in ["CFLAGS", "FFLAGS"]:
                    regex_subs.append((r"^(\s*%s\s*=).*-m64 (.*)" % x, r"\1%s \2" % os.getenv(x)))
                regex_subs.extend(sorted(gfort2ifort.items()))

                regex_subs.append((r"^(\s*MYDEFINES\s*=.*)\\", r"\1-DF90_SYSTEM_ERROR_CALLS \\"))

        for fn in fns:
            apply_regex_substitutions(fn, regex_subs)

        if LooseVersion(self.version) >= LooseVersion("7.3"):
            comp_vars = {
                'CC': 'CC',
                'LDFLAGS': 'LDFLAGS',
            }
            fn = 'gksm2ps/Makefile'

            regex_subs = [(r"^(\s*CFLAGS=\")-m64 (.*)", r"\1%s \2" % os.getenv('CFLAGS'))]
            for x, y in comp_vars.items():
                regex_subs.append((r"^(\s*%s)=.*" % x, r"\1='%s' \\" % os.getenv(y)))

            apply_regex_substitutions(fn, regex_subs)
Ejemplo n.º 36
0
    def configure_step(self):
        """Custom configuration procedure for Bazel."""

        binutils_root = get_software_root('binutils')
        gcc_root = get_software_root('GCCcore') or get_software_root('GCC')
        gcc_ver = get_software_version('GCCcore') or get_software_version('GCC')

        # only patch Bazel scripts if binutils & GCC installation prefix could be determined
        if binutils_root and gcc_root:

            res = glob.glob(os.path.join(gcc_root, 'lib', 'gcc', '*', gcc_ver, 'include'))
            if res and len(res) == 1:
                gcc_lib_inc = res[0]
            else:
                raise EasyBuildError("Failed to pinpoint location of GCC include files: %s", res)

            gcc_lib_inc_bis = os.path.join(os.path.dirname(gcc_lib_inc), 'include-fixed')
            if not os.path.exists(gcc_lib_inc_bis):
                self.log.info("Derived directory %s does not exist, falling back to %s", gcc_lib_inc_bis, gcc_lib_inc)
                gcc_lib_inc_bis = gcc_lib_inc

            gcc_cplusplus_inc = os.path.join(gcc_root, 'include', 'c++', gcc_ver)
            if not os.path.exists(gcc_cplusplus_inc):
                raise EasyBuildError("Derived directory %s does not exist", gcc_cplusplus_inc)

            # replace hardcoded paths in CROSSTOOL

            # CROSSTOOL script is no longer there in Bazel 0.24.0
            if LooseVersion(self.version) < LooseVersion('0.24.0'):
                regex_subs = [
                    (r'-B/usr/bin', '-B%s' % os.path.join(binutils_root, 'bin')),
                    (r'(cxx_builtin_include_directory:.*)/usr/lib/gcc', r'\1%s' % gcc_lib_inc),
                    (r'(cxx_builtin_include_directory:.*)/usr/local/include', r'\1%s' % gcc_lib_inc_bis),
                    (r'(cxx_builtin_include_directory:.*)/usr/include', r'\1%s' % gcc_cplusplus_inc),
                ]
                for tool in ['ar', 'cpp', 'dwp', 'gcc', 'ld']:
                    path = which(tool)
                    if path:
                        regex_subs.append((os.path.join('/usr', 'bin', tool), path))
                    else:
                        raise EasyBuildError("Failed to determine path to '%s'", tool)

                apply_regex_substitutions(os.path.join('tools', 'cpp', 'CROSSTOOL'), regex_subs)

            # replace hardcoded paths in (unix_)cc_configure.bzl
            regex_subs = [
                (r'-B/usr/bin', '-B%s' % os.path.join(binutils_root, 'bin')),
                (r'"/usr/bin', '"' + os.path.join(binutils_root, 'bin')),
            ]
            for conf_bzl in ['cc_configure.bzl', 'unix_cc_configure.bzl']:
                filepath = os.path.join('tools', 'cpp', conf_bzl)
                if os.path.exists(filepath):
                    apply_regex_substitutions(filepath, regex_subs)
        else:
            self.log.info("Not patching Bazel build scripts, installation prefix for binutils/GCC not found")

        # enable building in parallel
        bazel_args = '--jobs=%d' % self.cfg['parallel']

        # Bazel provides a JDK by itself for some architectures
        # We want to enforce it using the JDK we provided via modules
        # This is required for Power where Bazel does not have a JDK, but requires it for building itself
        # See https://github.com/bazelbuild/bazel/issues/10377
        bazel_args += ' --host_javabase=@local_jdk//:jdk'

        env.setvar('EXTRA_BAZEL_ARGS', bazel_args)
Ejemplo n.º 37
0
    def build_step(self):
        """Custom build procedure for TensorFlow."""

        # pre-create target installation directory
        mkdir(os.path.join(self.installdir, self.pylibdir), parents=True)

        binutils_root = get_software_root('binutils')
        if binutils_root:
            binutils_bin = os.path.join(binutils_root, 'bin')
        else:
            raise EasyBuildError(
                "Failed to determine installation prefix for binutils")

        gcc_root = get_software_root('GCCcore') or get_software_root('GCC')
        if gcc_root:
            gcc_lib64 = os.path.join(gcc_root, 'lib64')
            gcc_ver = get_software_version('GCCcore') or get_software_version(
                'GCC')

            # figure out location of GCC include files
            res = glob.glob(
                os.path.join(gcc_root, 'lib', 'gcc', '*', gcc_ver, 'include'))
            if res and len(res) == 1:
                gcc_lib_inc = res[0]
            else:
                raise EasyBuildError(
                    "Failed to pinpoint location of GCC include files: %s",
                    res)

            # make sure include-fixed directory is where we expect it to be
            gcc_lib_inc_fixed = os.path.join(os.path.dirname(gcc_lib_inc),
                                             'include-fixed')
            if not os.path.exists(gcc_lib_inc_fixed):
                raise EasyBuildError("Derived directory %s does not exist",
                                     gcc_lib_inc_fixed)

            # also check on location of include/c++/<gcc version> directory
            gcc_cplusplus_inc = os.path.join(gcc_root, 'include', 'c++',
                                             gcc_ver)
            if not os.path.exists(gcc_cplusplus_inc):
                raise EasyBuildError("Derived directory %s does not exist",
                                     gcc_cplusplus_inc)
        else:
            raise EasyBuildError(
                "Failed to determine installation prefix for GCC")

        inc_paths = [gcc_lib_inc, gcc_lib_inc_fixed, gcc_cplusplus_inc]
        lib_paths = [gcc_lib64]

        cuda_root = get_software_root('CUDA')
        if cuda_root:
            inc_paths.append(os.path.join(cuda_root, 'include'))
            lib_paths.append(os.path.join(cuda_root, 'lib64'))

        # fix hardcoded locations of compilers & tools
        cxx_inc_dir_lines = '\n'.join(r'cxx_builtin_include_directory: "%s"' %
                                      resolve_path(p) for p in inc_paths)
        cxx_inc_dir_lines_no_resolv_path = '\n'.join(
            r'cxx_builtin_include_directory: "%s"' % p for p in inc_paths)
        regex_subs = [
            (r'-B/usr/bin/',
             '-B%s/ %s' % (binutils_bin, ' '.join('-L%s/' % p
                                                  for p in lib_paths))),
            (r'(cxx_builtin_include_directory:).*', ''),
            (r'^toolchain {', 'toolchain {\n' + cxx_inc_dir_lines + '\n' +
             cxx_inc_dir_lines_no_resolv_path),
        ]
        for tool in [
                'ar', 'cpp', 'dwp', 'gcc', 'gcov', 'ld', 'nm', 'objcopy',
                'objdump', 'strip'
        ]:
            path = which(tool)
            if path:
                regex_subs.append((os.path.join('/usr', 'bin', tool), path))
            else:
                raise EasyBuildError("Failed to determine path to '%s'", tool)

        # -fPIE/-pie and -fPIC are not compatible, so patch out hardcoded occurences of -fPIE/-pie if -fPIC is used
        if self.toolchain.options.get('pic', None):
            regex_subs.extend([('-fPIE', '-fPIC'), ('"-pie"', '"-fPIC"')])

        # patch all CROSSTOOL* scripts to fix hardcoding of locations of binutils/GCC binaries
        for path, dirnames, filenames in os.walk(self.start_dir):
            for filename in filenames:
                if filename.startswith('CROSSTOOL'):
                    full_path = os.path.join(path, filename)
                    self.log.info("Patching %s", full_path)
                    apply_regex_substitutions(full_path, regex_subs)

        tmpdir = tempfile.mkdtemp(suffix='-bazel-build')
        user_root_tmpdir = tempfile.mkdtemp(suffix='-user_root')

        # compose "bazel build" command with all its options...
        cmd = [
            self.cfg['prebuildopts'], 'bazel',
            '--output_base=%s' % tmpdir,
            '--install_base=%s' % os.path.join(tmpdir, 'inst_base'),
            '--output_user_root=%s' % user_root_tmpdir, 'build'
        ]

        # build with optimization enabled
        # cfr. https://docs.bazel.build/versions/master/user-manual.html#flag--compilation_mode
        cmd.append('--compilation_mode=opt')

        # select 'opt' config section (this is *not* the same as --compilation_mode=opt!)
        # https://docs.bazel.build/versions/master/user-manual.html#flag--config
        cmd.append('--config=opt')

        # make Bazel print full command line + make it verbose on failures
        # https://docs.bazel.build/versions/master/user-manual.html#flag--subcommands
        # https://docs.bazel.build/versions/master/user-manual.html#flag--verbose_failures
        cmd.extend(['--subcommands', '--verbose_failures'])

        # limit the number of parallel jobs running simultaneously (useful on KNL)...
        cmd.append('--jobs=%s' % self.cfg['parallel'])

        if self.toolchain.options.get('pic', None):
            cmd.append('--copt="-fPIC"')

        cmd.append(self.cfg['buildopts'])

        if cuda_root:
            cmd.append('--config=cuda')

        # if mkl-dnn is listed as a dependency it is used. Otherwise downloaded if with_mkl_dnn is true
        mkl_root = get_software_root('mkl-dnn')
        if mkl_root:
            cmd.extend(['--config=mkl'])
            cmd.insert(0, "export TF_MKL_DOWNLOAD=0 &&")
            cmd.insert(0, "export TF_MKL_ROOT=%s &&" % mkl_root)
        elif self.cfg['with_mkl_dnn']:
            # this makes TensorFlow use mkl-dnn (cfr. https://github.com/01org/mkl-dnn)
            cmd.extend(['--config=mkl'])
            cmd.insert(0, "export TF_MKL_DOWNLOAD=1 && ")

        # specify target of the build command as last argument
        cmd.append('//tensorflow/tools/pip_package:build_pip_package')

        run_cmd(' '.join(cmd), log_all=True, simple=True, log_ok=True)

        # run generated 'build_pip_package' script to build the .whl
        cmd = "bazel-bin/tensorflow/tools/pip_package/build_pip_package %s" % self.builddir
        run_cmd(cmd, log_all=True, simple=True, log_ok=True)
Ejemplo n.º 38
0
    def configure_step(self):
        """Custom configure step for NAMD, we build charm++ first (if required)."""

        # complete Charm ++ and NAMD architecture string with compiler family
        comp_fam = self.toolchain.comp_family()
        if self.toolchain.options.get('usempi', False):
            charm_arch_comp = 'mpicxx'
        else:
            charm_arch_comps = {
                toolchain.GCC: 'gcc',
                toolchain.INTELCOMP: 'icc',
            }
            charm_arch_comp = charm_arch_comps.get(comp_fam, None)
        namd_comps = {
            toolchain.GCC: 'g++',
            toolchain.INTELCOMP: 'icc',
        }
        namd_comp = namd_comps.get(comp_fam, None)
        if charm_arch_comp is None or namd_comp is None:
            raise EasyBuildError("Unknown compiler family, can't complete Charm++/NAMD target architecture.")

        # NOTE: important to add smp BEFORE the compiler
        # charm arch style is: mpi-linux-x86_64-smp-mpicxx
        # otherwise the setting of name_charm_arch below will get things
        # in the wrong order
        if self.toolchain.options.get('openmp', False):
            self.cfg.update('charm_arch', 'smp')
        self.cfg.update('charm_arch', charm_arch_comp)
        self.log.info("Updated 'charm_arch': %s", self.cfg['charm_arch'])

        self.namd_arch = '%s-%s' % (self.cfg['namd_basearch'], namd_comp)
        self.log.info("Completed NAMD target architecture: %s", self.namd_arch)

        cmd = "./build charm++ %(arch)s %(opts)s --with-numa -j%(parallel)s '%(cxxflags)s'" % {
            'arch': self.cfg['charm_arch'],
            'cxxflags': os.environ['CXXFLAGS'] + ' -DMPICH_IGNORE_CXX_SEEK ' + self.cfg['charm_extra_cxxflags'],
            'opts': self.cfg['charm_opts'],
            'parallel': self.cfg['parallel'],
        }
        charm_subdir = '.'.join(os.path.basename(self.charm_tarballs[0]).split('.')[:-1])
        self.log.debug("Building Charm++ using cmd '%s' in '%s'" % (cmd, charm_subdir))
        run_cmd(cmd, path=charm_subdir)

        # compiler (options)
        self.cfg.update('namd_cfg_opts', '--cc "%s" --cc-opts "%s"' % (os.environ['CC'], os.environ['CFLAGS']))
        cxxflags = os.environ['CXXFLAGS']
        if LooseVersion(self.version) >= LooseVersion('2.12'):
            cxxflags += ' --std=c++11'
        self.cfg.update('namd_cfg_opts', '--cxx "%s" --cxx-opts "%s"' % (os.environ['CXX'], cxxflags))

        # NAMD dependencies: CUDA, TCL, FFTW
        cuda = get_software_root('CUDA')
        if cuda and (self.cfg['cuda'] is None or self.cfg['cuda']):
            self.cfg.update('namd_cfg_opts', "--with-cuda --cuda-prefix %s" % cuda)
        elif not self.cfg['cuda']:
            self.log.warning("CUDA is disabled")
        elif not cuda and self.cfg['cuda']:
            raise EasyBuildError("CUDA is not a dependency, but support for CUDA is enabled.")

        tcl = get_software_root('Tcl')
        if tcl:
            self.cfg.update('namd_cfg_opts', '--with-tcl --tcl-prefix %s' % tcl)
            tclversion = '.'.join(get_software_version('Tcl').split('.')[0:2])
            tclv_subs = [(r'-ltcl[\d.]*\s', '-ltcl%s ' % tclversion)]

            apply_regex_substitutions(os.path.join('arch', '%s.tcl' % self.cfg['namd_basearch']), tclv_subs)

        fftw = get_software_root('FFTW')
        if fftw:
            if LooseVersion(get_software_version('FFTW')) >= LooseVersion('3.0'):
                if LooseVersion(self.version) >= LooseVersion('2.9'):
                    self.cfg.update('namd_cfg_opts', "--with-fftw3")
                else:
                    raise EasyBuildError("Using FFTW v3.x only supported in NAMD v2.9 and up.")
            else:
                self.cfg.update('namd_cfg_opts', "--with-fftw")
            self.cfg.update('namd_cfg_opts', "--fftw-prefix %s" % fftw)

        namd_charm_arch = "--charm-arch %s" % '-'.join(self.cfg['charm_arch'].strip().split())
        cmd = "./config %s %s %s " % (self.namd_arch, namd_charm_arch, self.cfg["namd_cfg_opts"])
        run_cmd(cmd)
Ejemplo n.º 39
0
    def configure_step(self):
        """Dedicated configure step for Mono: install Mono from RPM (if provided), then run configure."""

        # install Mono from RPMs if provided (because we need Mono to build Mono)
        if self.rpms:

            # prepare path for installing RPMs in
            monorpms_path = os.path.join(self.builddir, "monorpms")
            try:
                os.makedirs(os.path.join(monorpms_path, 'rpm'))
            except OSError as err:
                raise EasyBuildError("Failed to create directories for installing Mono RPMs in: %s", err)

            self.src = self.rpms
            self.rebuildRPM = True

            # rebuild RPMs to make them relocatable
            Rpm.configure_step(self)

            # prepare to install RPMs
            self.log.debug("Initializing temporary RPM repository to install to...")
            cmd = "rpm --initdb --dbpath /rpm --root %s" % monorpms_path
            run_cmd(cmd, log_all=True, simple=True)

            # install RPMs one by one
            for rpm in self.src:
                self.log.debug("Installing RPM %s ..." % rpm['name'])
                if os.path.exists(rpm['path']):
                    cmd = ' '.join([
                        "rpm -i",
                        "--dbpath %(inst)s/rpm",
                        "--force",
                        "--relocate /=%(inst)s",
                        "--badreloc",
                        "--nodeps --nopost",
                        "%(rpm)s",
                    ]) % {
                        'inst': monorpms_path,
                        'rpm': rpm['path'],
                    }
                    run_cmd(cmd,log_all=True,simple=True)
                else:
                    raise EasyBuildError("RPM file %s not found", rpm['path'])

            # create patched version of gmcs command
            self.log.debug("Making our own copy of gmcs (one that works).")

            mygmcs_path = os.path.join(monorpms_path, 'usr', 'bin', 'mygmcs')
            try:
                shutil.copy(os.path.join(monorpms_path, 'usr' ,'bin', 'gmcs'), mygmcs_path)
            except OSError as err:
                raise EasyBuildError("Failed to copy gmcs to %s: %s", mygmcs_path, err)

            rpls = [
                ("exec /usr/bin/mono", "exec %s/usr/bin/mono" % monorpms_path),
                ("`/usr/bin/monodir`", "%s/usr/lib64/mono" % monorpms_path),
            ]
            apply_regex_substitutions(mygmcs_path, rpls)

            self.log.debug("Patched version of gmcs (%s): %s" % (mygmcs_path, read_file(mygmcs_path)))

            # initiate bootstrap: build/install Mono with installed RPMs to temporary path
            tmp_mono_path = os.path.join(self.builddir, "tmp_mono")
            self.log.debug("Build/install temporary Mono version in %s using installed RPMs..." % tmp_mono_path)

            par = ''
            if self.cfg['parallel']:
                par = "-j %s" % self.cfg['parallel']

            config_cmd = "%s ./configure --prefix=%s %s" % (self.cfg['preconfigopts'], tmp_mono_path, self.cfg['configopts'])
            build_cmd = ' '.join([
                "%(prebuildopts)s"
                "make %(par)s",
                "EXTERNAL_MCS=%(path)s/usr/bin/mygmcs",
                "EXTERNAL_RUNTIME=%(path)s/usr/bin/mono",
                "%(buildopts)s",
            ]) %{
                'prebuildopts': self.cfg['prebuildopts'],
                'par': par,
                'path': monorpms_path,
                'buildopts': self.cfg['buildopts'],
            }
            install_cmd = "make install"

            for cmd in [config_cmd, build_cmd, install_cmd]:
                run_cmd(cmd, log_all=True, simple=True)

            more_buildopts = ' '.join([
                "EXTERNAL_MCS=%(path)s/bin/gmcs",
                "EXTERNAL_RUNTIME=%(path)s/bin/mono",
            ]) % {'path': tmp_mono_path}
            self.cfg.update('buildopts', more_buildopts)

            self.src = self.mono_srcs

        # continue with normal configure, and subsequent make, make install
        ConfigureMake.configure_step(self)
Ejemplo n.º 40
0
    def configure_step(self):
        """Configure build:
        - set required environment variables (for netCDF, JasPer)
        - patch compile script and ungrib Makefile for non-default install paths of WRF and JasPer
        - run configure script and figure how to select desired build option
        - patch configure.wps file afterwards to fix 'serial compiler' setting
        """

        # netCDF dependency check + setting env vars (NETCDF, NETCDFF)
        set_netcdf_env_vars(self.log)

        # WRF dependency check
        wrf = get_software_root('WRF')
        if wrf:
            wrfdir = os.path.join(wrf,
                                  det_wrf_subdir(get_software_version('WRF')))
        else:
            raise EasyBuildError("WRF module not loaded?")

        self.compile_script = 'compile'

        if LooseVersion(self.version) >= LooseVersion('4.0.3'):
            # specify install location of WRF via $WRF_DIR (supported since WPS 4.0.3)
            # see https://github.com/wrf-model/WPS/pull/102
            env.setvar('WRF_DIR', wrfdir)
        else:
            # patch compile script so that WRF is found
            regex_subs = [(r"^(\s*set\s*WRF_DIR_PRE\s*=\s*)\${DEV_TOP}(.*)$",
                           r"\1%s\2" % wrfdir)]
            apply_regex_substitutions(self.compile_script, regex_subs)

        # libpng dependency check
        libpng = get_software_root('libpng')
        zlib = get_software_root('zlib')
        if libpng:
            paths = [libpng]
            if zlib:
                paths.insert(0, zlib)
            libpnginc = ' '.join(
                ['-I%s' % os.path.join(path, 'include') for path in paths])
            libpnglib = ' '.join(
                ['-L%s' % os.path.join(path, 'lib') for path in paths])
        else:
            # define these as empty, assume that libpng will be available via OS (e.g. due to --filter-deps=libpng)
            libpnglib = ""
            libpnginc = ""

        # JasPer dependency check + setting env vars
        jasper = get_software_root('JasPer')
        if jasper:
            env.setvar('JASPERINC', os.path.join(jasper, "include"))
            jasperlibdir = os.path.join(jasper, "lib")
            env.setvar('JASPERLIB', jasperlibdir)
            jasperlib = "-L%s" % jasperlibdir
        else:
            raise EasyBuildError("JasPer module not loaded?")

        # patch ungrib Makefile so that JasPer is found
        jasperlibs = "%s -ljasper %s -lpng" % (jasperlib, libpnglib)
        regex_subs = [
            (r"^(\s*-L\.\s*-l\$\(LIBTARGET\))(\s*;.*)$",
             r"\1 %s\2" % jasperlibs),
            (r"^(\s*\$\(COMPRESSION_LIBS\))(\s*;.*)$",
             r"\1 %s\2" % jasperlibs),
        ]
        apply_regex_substitutions(os.path.join('ungrib', 'src', 'Makefile'),
                                  regex_subs)

        # patch arch/Config.pl script, so that run_cmd_qa receives all output to answer questions
        patch_perl_script_autoflush(os.path.join("arch", "Config.pl"))

        # configure

        # determine build type option to look for
        self.comp_fam = self.toolchain.comp_family()
        build_type_option = None

        if LooseVersion(self.version) >= LooseVersion("3.4"):

            knownbuildtypes = {'smpar': 'serial', 'dmpar': 'dmpar'}

            if self.comp_fam == toolchain.INTELCOMP:  # @UndefinedVariable
                build_type_option = " Linux x86_64, Intel compiler"

            elif self.comp_fam == toolchain.GCC:  # @UndefinedVariable
                if LooseVersion(self.version) >= LooseVersion("3.6"):
                    build_type_option = "Linux x86_64, gfortran"
                else:
                    build_type_option = "Linux x86_64 g95"

            else:
                raise EasyBuildError(
                    "Don't know how to figure out build type to select.")

        else:

            knownbuildtypes = {'smpar': 'serial', 'dmpar': 'DM parallel'}

            if self.comp_fam == toolchain.INTELCOMP:  # @UndefinedVariable
                build_type_option = "PC Linux x86_64, Intel compiler"

            elif self.comp_fam == toolchain.GCC:  # @UndefinedVariable
                build_type_option = "PC Linux x86_64, gfortran compiler,"
                knownbuildtypes['dmpar'] = knownbuildtypes['dmpar'].upper()

            else:
                raise EasyBuildError(
                    "Don't know how to figure out build type to select.")

        # check and fetch selected build type
        bt = self.cfg['buildtype']

        if bt not in knownbuildtypes.keys():
            raise EasyBuildError(
                "Unknown build type: '%s'. Supported build types: %s", bt,
                knownbuildtypes.keys())

        # fetch option number based on build type option and selected build type
        build_type_question = r"\s*(?P<nr>[0-9]+).\s*%s\s*\(?%s\)?\s*\n" % (
            build_type_option, knownbuildtypes[bt])

        cmd = ' '.join(
            [self.cfg['preconfigopts'], './configure', self.cfg['configopts']])
        qa = {}
        no_qa = [".*compiler is.*"]
        std_qa = {
            # named group in match will be used to construct answer
            r"%s(.*\n)*Enter selection\s*\[[0-9]+-[0-9]+\]\s*:" % build_type_question:
            "%(nr)s",
        }

        run_cmd_qa(cmd,
                   qa,
                   no_qa=no_qa,
                   std_qa=std_qa,
                   log_all=True,
                   simple=True)

        # make sure correct compilers and compiler flags are being used
        comps = {
            'SCC': "%s -I$(JASPERINC) %s" % (os.getenv('CC'), libpnginc),
            'SFC': os.getenv('F90'),
            'DM_FC': os.getenv('MPIF90'),
            'DM_CC': os.getenv('MPICC'),
            'FC': os.getenv('MPIF90'),
            'CC': os.getenv('MPICC'),
        }
        if self.toolchain.options.get('openmp', None):
            comps.update({
                'LDFLAGS':
                '%s %s' %
                (self.toolchain.get_flag('openmp'), os.environ['LDFLAGS'])
            })

        regex_subs = [(r"^(%s\s*=\s*).*$" % key, r"\1 %s" % val)
                      for (key, val) in comps.items()]
        apply_regex_substitutions('configure.wps', regex_subs)
Ejemplo n.º 41
0
class EB_Molpro(ConfigureMake, Binary):
    """Support for building and installing Molpro."""
    @staticmethod
    def extra_options():
        """Define custom easyconfig parameters for Molpro."""
        # Combine extra variables from Binary and ConfigureMake easyblocks as
        # well as those needed for Molpro specifically
        extra_vars = Binary.extra_options()
        extra_vars = ConfigureMake.extra_options(extra_vars)
        extra_vars.update({
            'precompiled_binaries':
            [False, "Are we installing precompiled binaries?", CUSTOM],
        })
        return EasyBlock.extra_options(extra_vars)

    def __init__(self, *args, **kwargs):
        """Easyblock constructor, initialize class variables specific to Molpro and check on license token."""
        super(EB_Molpro, self).__init__(*args, **kwargs)

        self.full_prefix = ''  # no None, to make easyblock compatible with --module-only
        self.orig_launcher = None

        self.cleanup_token_symlink = False
        self.license_token = os.path.join(os.path.expanduser('~'), '.molpro',
                                          'token')

    def extract_step(self):
        """Extract Molpro source files, or just copy in case of binary install."""
        if self.cfg['precompiled_binaries']:
            Binary.extract_step(self)
        else:
            ConfigureMake.extract_step(self)

    def configure_step(self):
        """Custom configuration procedure for Molpro: use 'configure -batch'."""

        if not os.path.isfile(self.license_token):
            if self.cfg['license_file'] is not None and os.path.isfile(
                    self.cfg['license_file']):
                # put symlink in place to specified license file in $HOME/.molpro/token
                # other approaches (like defining $MOLPRO_KEY) don't seem to work
                self.cleanup_token_symlink = True
                mkdir(os.path.dirname(self.license_token))
                try:
                    os.symlink(self.cfg['license_file'], self.license_token)
                    self.log.debug("Symlinked %s to %s",
                                   self.cfg['license_file'],
                                   self.license_token)
                except OSError, err:
                    raise EasyBuildError(
                        "Failed to create symlink for license token at %s",
                        self.license_token)

            else:
                self.log.warning(
                    "No licence token found at either {0} or via 'license_file'"
                    .format(self.license_token))

        # Only do the rest of the configuration if we're building from source
        if not self.cfg['precompiled_binaries']:
            # installation prefix
            self.cfg.update('configopts', "-prefix %s" % self.installdir)

            # compilers

            # compilers & MPI
            if self.toolchain.options.get('usempi', None):
                self.cfg.update(
                    'configopts',
                    "-%s -%s" % (os.environ['CC_SEQ'], os.environ['F90_SEQ']))
                if 'MPI_INC_DIR' in os.environ:
                    self.cfg.update(
                        'configopts',
                        "-mpp -mppbase %s" % os.environ['MPI_INC_DIR'])
                else:
                    raise EasyBuildError("$MPI_INC_DIR not defined")
            else:
                self.cfg.update(
                    'configopts',
                    "-%s -%s" % (os.environ['CC'], os.environ['F90']))

            # BLAS/LAPACK
            if 'BLAS_LIB_DIR' in os.environ:
                self.cfg.update(
                    'configopts',
                    "-blas -blaspath %s" % os.environ['BLAS_LIB_DIR'])
            else:
                raise EasyBuildError("$BLAS_LIB_DIR not defined")

            if 'LAPACK_LIB_DIR' in os.environ:
                self.cfg.update(
                    'configopts',
                    "-lapack -lapackpath %s" % os.environ['LAPACK_LIB_DIR'])
            else:
                raise EasyBuildError("$LAPACK_LIB_DIR not defined")

            # 32 vs 64 bit
            if self.toolchain.options.get('32bit', None):
                self.cfg.update('configopts', '-i4')
            else:
                self.cfg.update('configopts', '-i8')

            run_cmd("./configure -batch %s" % self.cfg['configopts'])

            cfgfile = os.path.join(self.cfg['start_dir'], 'CONFIG')
            cfgtxt = read_file(cfgfile)

            # determine original LAUNCHER value
            launcher_regex = re.compile('^LAUNCHER=(.*)$', re.M)
            res = launcher_regex.search(cfgtxt)
            if res:
                self.orig_launcher = res.group(1)
                self.log.debug("Found original value for LAUNCHER: %s",
                               self.orig_launcher)
            else:
                raise EasyBuildError("Failed to determine LAUNCHER value")

            # determine full installation prefix
            prefix_regex = re.compile('^PREFIX=(.*)$', re.M)
            res = prefix_regex.search(cfgtxt)
            if res:
                self.full_prefix = res.group(1)
                self.log.debug("Found full installation prefix: %s",
                               self.full_prefix)
            else:
                raise EasyBuildError(
                    "Failed to determine full installation prefix")

            # determine MPI launcher command that can be used during build/test
            # obtain command with specific number of cores (required by mpi_cmd_for), then replace that number with '%n'
            launcher = self.toolchain.mpi_cmd_for('%x', self.cfg['parallel'])
            launcher = launcher.replace(' %s' % self.cfg['parallel'], ' %n')

            # patch CONFIG file to change LAUNCHER definition, in order to avoid having to start mpd
            apply_regex_substitutions(
                cfgfile, [(r"^(LAUNCHER\s*=\s*).*$", r"\1 %s" % launcher)])

            # reread CONFIG and log contents
            cfgtxt = read_file(cfgfile)
            self.log.info("Contents of CONFIG file:\n%s", cfgtxt)
Ejemplo n.º 42
0
    def test_step(self):
        """Run WPS test (requires large dataset to be downloaded). """

        wpsdir = None

        def run_wps_cmd(cmdname, mpi_cmd=True):
            """Run a WPS command, and check for success."""

            cmd = os.path.join(wpsdir, "%s.exe" % cmdname)

            if mpi_cmd:
                if build_option('mpi_tests'):
                    cmd = self.toolchain.mpi_cmd_for(cmd, 1)
                else:
                    self.log.info(
                        "Skipping MPI test for %s, since MPI tests are disabled",
                        cmd)
                    return

            (out, _) = run_cmd(cmd, log_all=True, simple=False)

            re_success = re.compile("Successful completion of %s" % cmdname)
            if not re_success.search(out):
                raise EasyBuildError("%s.exe failed (pattern '%s' not found)?",
                                     cmdname, re_success.pattern)

        if self.cfg['runtest']:
            if not self.cfg['testdata']:
                raise EasyBuildError("List of URLs for testdata not provided.")

            wpsdir = os.path.join(self.builddir, self.wps_subdir)

            try:
                # create temporary directory
                tmpdir = tempfile.mkdtemp()
                change_dir(tmpdir)

                # download data
                testdata_paths = []
                for testdata in self.cfg['testdata']:
                    path = self.obtain_file(testdata)
                    if not path:
                        raise EasyBuildError(
                            "Downloading file from %s failed?", testdata)
                    testdata_paths.append(path)

                # unpack data
                for path in testdata_paths:
                    srcdir = extract_file(path, tmpdir, change_into_dir=False)
                    change_dir(srcdir)

                namelist_file = os.path.join(tmpdir, 'namelist.wps')

                # GEOGRID

                # setup directories and files
                if LooseVersion(self.version) < LooseVersion("4.0"):
                    geog_data_dir = "geog"
                else:
                    geog_data_dir = "WPS_GEOG"
                for dir_name in os.listdir(os.path.join(tmpdir,
                                                        geog_data_dir)):
                    symlink(os.path.join(tmpdir, geog_data_dir, dir_name),
                            os.path.join(tmpdir, dir_name))

                # copy namelist.wps file and patch it for geogrid
                copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file)
                regex_subs = [(r"^(\s*geog_data_path\s*=\s*).*$",
                               r"\1 '%s'" % tmpdir)]
                apply_regex_substitutions(namelist_file, regex_subs)

                # GEOGRID.TBL
                geogrid_dir = os.path.join(tmpdir, 'geogrid')
                mkdir(geogrid_dir)
                symlink(os.path.join(wpsdir, 'geogrid', 'GEOGRID.TBL.ARW'),
                        os.path.join(geogrid_dir, 'GEOGRID.TBL'))

                # run geogrid.exe
                run_wps_cmd("geogrid")

                # UNGRIB

                # determine start and end time stamps of grib files
                grib_file_prefix = "fnl_"
                k = len(grib_file_prefix)
                fs = [
                    f for f in sorted(os.listdir('.'))
                    if f.startswith(grib_file_prefix)
                ]
                start = "%s:00:00" % fs[0][k:]
                end = "%s:00:00" % fs[-1][k:]

                # copy namelist.wps file and patch it for ungrib
                copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file)
                regex_subs = [
                    (r"^(\s*start_date\s*=\s*).*$",
                     r"\1 '%s','%s'," % (start, start)),
                    (r"^(\s*end_date\s*=\s*).*$",
                     r"\1 '%s','%s'," % (end, end)),
                ]
                apply_regex_substitutions(namelist_file, regex_subs)

                # copy correct Vtable
                vtable_dir = os.path.join(wpsdir, 'ungrib', 'Variable_Tables')
                if os.path.exists(os.path.join(vtable_dir, 'Vtable.ARW')):
                    copy_file(os.path.join(vtable_dir, 'Vtable.ARW'),
                              os.path.join(tmpdir, 'Vtable'))
                elif os.path.exists(os.path.join(vtable_dir,
                                                 'Vtable.ARW.UPP')):
                    copy_file(os.path.join(vtable_dir, 'Vtable.ARW.UPP'),
                              os.path.join(tmpdir, 'Vtable'))
                else:
                    raise EasyBuildError(
                        "Could not find Vtable file to use for testing ungrib")

                # run link_grib.csh script
                cmd = "%s %s*" % (os.path.join(
                    wpsdir, "link_grib.csh"), grib_file_prefix)
                run_cmd(cmd, log_all=True, simple=True)

                # run ungrib.exe
                run_wps_cmd("ungrib", mpi_cmd=False)

                # METGRID.TBL

                metgrid_dir = os.path.join(tmpdir, 'metgrid')
                mkdir(metgrid_dir)
                symlink(os.path.join(wpsdir, 'metgrid', 'METGRID.TBL.ARW'),
                        os.path.join(metgrid_dir, 'METGRID.TBL'))

                # run metgrid.exe
                run_wps_cmd('metgrid')

                # clean up
                change_dir(self.builddir)
                remove_dir(tmpdir)

            except OSError as err:
                raise EasyBuildError("Failed to run WPS test: %s", err)
Ejemplo n.º 43
0
                    os.path.join(self.installdir, 'lib'),
                    r"directory .* does not exist, try to create [Y]/n\n":
                    '',
                }
                run_cmd_qa(cmd, qa=qa, std_qa=stdqa, log_all=True, simple=True)
        else:
            if os.path.isfile(self.license_token):
                run_cmd("make tuning")

            super(EB_Molpro, self).install_step()

            # put original LAUNCHER definition back in place in bin/molpro that got installed,
            # since the value used during installation point to temporary files
            molpro_path = os.path.join(self.full_prefix, 'bin', 'molpro')
            apply_regex_substitutions(
                molpro_path,
                [(r"^(LAUNCHER\s*=\s*).*$", r"\1 %s" % self.orig_launcher)])

        if self.cleanup_token_symlink:
            try:
                os.remove(self.license_token)
                self.log.debug("Symlink to license token %s removed",
                               self.license_token)
            except OSError, err:
                raise EasyBuildError("Failed to remove %s: %s",
                                     self.license_token, err)

    def make_module_req_guess(self):
        """Customize $PATH guesses for Molpro module."""
        guesses = super(EB_Molpro, self).make_module_req_guess()
        guesses.update({
Ejemplo n.º 44
0
    def configure_step(self):
        """Custom configure step for NAMD, we build charm++ first (if required)."""

        # complete Charm ++ and NAMD architecture string with compiler family
        comp_fam = self.toolchain.comp_family()
        if self.toolchain.options.get('usempi', False):
            charm_arch_comp = 'mpicxx'
        else:
            charm_arch_comps = {
                toolchain.GCC: 'gcc',
                toolchain.INTELCOMP: 'icc',
            }
            charm_arch_comp = charm_arch_comps.get(comp_fam, None)
        namd_comps = {
            toolchain.GCC: 'g++',
            toolchain.INTELCOMP: 'icc',
        }
        namd_comp = namd_comps.get(comp_fam, None)
        if charm_arch_comp is None or namd_comp is None:
            raise EasyBuildError("Unknown compiler family, can't complete Charm++/NAMD target architecture.")

        # NOTE: important to add smp BEFORE the compiler
        # charm arch style is: mpi-linux-x86_64-smp-mpicxx
        # otherwise the setting of name_charm_arch below will get things
        # in the wrong order
        if self.toolchain.options.get('openmp', False):
            self.cfg.update('charm_arch', 'smp')
        self.cfg.update('charm_arch', charm_arch_comp)

        self.log.info("Updated 'charm_arch': %s" % self.cfg['charm_arch'])
        self.namd_arch = '%s-%s' % (self.cfg['namd_basearch'], namd_comp)
        self.log.info("Completed NAMD target architecture: %s" % self.namd_arch)


        tup = (self.cfg['charm_arch'], self.cfg['charm_opts'], self.cfg['parallel'], os.environ['CXXFLAGS'])
        cmd = "./build charm++ %s %s --with-numa -j%s %s -DMPICH_IGNORE_CXX_SEEK" % tup
        charm_subdir = '.'.join(os.path.basename(self.charm_tarballs[0]).split('.')[:-1])
        self.log.debug("Building Charm++ using cmd '%s' in '%s'" % (cmd, charm_subdir))
        run_cmd(cmd, path=charm_subdir)

        # compiler (options)
        self.cfg.update('namd_cfg_opts', '--cc "%s" --cc-opts "%s"' % (os.environ['CC'], os.environ['CFLAGS']))
        cxxflags = os.environ['CXXFLAGS']
        if LooseVersion(self.version) >= LooseVersion('2.12'):
            cxxflags += ' --std=c++11'
        self.cfg.update('namd_cfg_opts', '--cxx "%s" --cxx-opts "%s"' % (os.environ['CXX'], cxxflags))

        # NAMD dependencies: CUDA, TCL, FFTW
        cuda = get_software_root('CUDA')
        if cuda:
            self.cfg.update('namd_cfg_opts', "--with-cuda --cuda-prefix %s" % cuda)

        tcl = get_software_root('Tcl')
        if tcl:
            self.cfg.update('namd_cfg_opts', '--with-tcl --tcl-prefix %s' % tcl)
            tclversion = '.'.join(get_software_version('Tcl').split('.')[0:2])
            tclv_subs = [(r'-ltcl[\d.]*\s', '-ltcl%s ' % tclversion)]
            apply_regex_substitutions(os.path.join('arch', '%s.tcl' % self.cfg['namd_basearch']), tclv_subs)

        fftw = get_software_root('FFTW')
        if fftw:
            if LooseVersion(get_software_version('FFTW')) >= LooseVersion('3.0'):
                if LooseVersion(self.version) >= LooseVersion('2.9'):
                    self.cfg.update('namd_cfg_opts', "--with-fftw3")
                else:
                    raise EasyBuildError("Using FFTW v3.x only supported in NAMD v2.9 and up.")
            else:
                self.cfg.update('namd_cfg_opts', "--with-fftw")
            self.cfg.update('namd_cfg_opts', "--fftw-prefix %s" % fftw)

        namd_charm_arch = "--charm-arch %s" % '-'.join(self.cfg['charm_arch'].strip().split())
        cmd = "./config %s %s %s " % (self.namd_arch, namd_charm_arch, self.cfg["namd_cfg_opts"])
        run_cmd(cmd)
Ejemplo n.º 45
0
    def configure_step(self):
        """Configure build:
        - set required environment variables (for netCDF, JasPer)
        - patch compile script and ungrib Makefile for non-default install paths of WRF and JasPer
        - run configure script and figure how to select desired build option
        - patch configure.wps file afterwards to fix 'serial compiler' setting
        """

        # netCDF dependency check + setting env vars (NETCDF, NETCDFF)
        set_netcdf_env_vars(self.log)

        # WRF dependency check
        wrf = get_software_root('WRF')
        if wrf:
            wrfdir = os.path.join(wrf, det_wrf_subdir(get_software_version('WRF')))
        else:
            raise EasyBuildError("WRF module not loaded?")

        # patch compile script so that WRF is found
        self.compile_script = "compile"
        regex_subs = [(r"^(\s*set\s*WRF_DIR_PRE\s*=\s*)\${DEV_TOP}(.*)$", r"\1%s\2" % wrfdir)]
        apply_regex_substitutions(self.compile_script, regex_subs)

        # libpng dependency check
        libpng = get_software_root('libpng')
        zlib = get_software_root('zlib')
        if libpng:
            paths = [libpng]
            if zlib:
                paths.insert(0, zlib)
            libpnginc = ' '.join(['-I%s' % os.path.join(path, 'include') for path in paths])
            libpnglib = ' '.join(['-L%s' % os.path.join(path, 'lib') for path in paths])
        else:
            # define these as empty, assume that libpng will be available via OS (e.g. due to --filter-deps=libpng)
            libpnglib = ""
            libpnginc = ""

        # JasPer dependency check + setting env vars
        jasper = get_software_root('JasPer')
        if jasper:
            env.setvar('JASPERINC', os.path.join(jasper, "include"))
            jasperlibdir = os.path.join(jasper, "lib")
            env.setvar('JASPERLIB', jasperlibdir)
            jasperlib = "-L%s" % jasperlibdir
        else:
            raise EasyBuildError("JasPer module not loaded?")

        # patch ungrib Makefile so that JasPer is found
        jasperlibs = "%s -ljasper %s -lpng" % (jasperlib, libpnglib)
        regex_subs = [
            (r"^(\s*-L\.\s*-l\$\(LIBTARGET\))(\s*;.*)$", r"\1 %s\2" % jasperlibs),
            (r"^(\s*\$\(COMPRESSION_LIBS\))(\s*;.*)$", r"\1 %s\2" % jasperlibs),
        ]
        apply_regex_substitutions(os.path.join('ungrib', 'src', 'Makefile'), regex_subs)

        # patch arch/Config.pl script, so that run_cmd_qa receives all output to answer questions
        patch_perl_script_autoflush(os.path.join("arch", "Config.pl"))

        # configure

        # determine build type option to look for
        self.comp_fam = self.toolchain.comp_family()
        build_type_option = None

        if LooseVersion(self.version) >= LooseVersion("3.4"):

            knownbuildtypes = {
                'smpar': 'serial',
                'dmpar': 'dmpar'
            }

            if self.comp_fam == toolchain.INTELCOMP:  # @UndefinedVariable
                build_type_option = " Linux x86_64, Intel compiler"

            elif self.comp_fam == toolchain.GCC:  # @UndefinedVariable
                build_type_option = "Linux x86_64 g95 compiler"

            else:
                raise EasyBuildError("Don't know how to figure out build type to select.")

        else:

            knownbuildtypes = {
                'smpar': 'serial',
                'dmpar': 'DM parallel'
            }

            if self.comp_fam == toolchain.INTELCOMP:  # @UndefinedVariable
                build_type_option = "PC Linux x86_64, Intel compiler"

            elif self.comp_fam == toolchain.GCC:  # @UndefinedVariable
                build_type_option = "PC Linux x86_64, gfortran compiler,"
                knownbuildtypes['dmpar'] = knownbuildtypes['dmpar'].upper()

            else:
                raise EasyBuildError("Don't know how to figure out build type to select.")

        # check and fetch selected build type
        bt = self.cfg['buildtype']

        if bt not in knownbuildtypes.keys():
            raise EasyBuildError("Unknown build type: '%s'. Supported build types: %s", bt, knownbuildtypes.keys())

        # fetch option number based on build type option and selected build type
        build_type_question = "\s*(?P<nr>[0-9]+).\s*%s\s*\(?%s\)?\s*\n" % (build_type_option, knownbuildtypes[bt])

        cmd = "./configure"
        qa = {}
        no_qa = [".*compiler is.*"]
        std_qa = {
            # named group in match will be used to construct answer
            r"%s(.*\n)*Enter selection\s*\[[0-9]+-[0-9]+\]\s*:" % build_type_question: "%(nr)s",
        }

        run_cmd_qa(cmd, qa, no_qa=no_qa, std_qa=std_qa, log_all=True, simple=True)

        # make sure correct compilers and compiler flags are being used
        comps = {
            'SCC': "%s -I$(JASPERINC) %s" % (os.getenv('CC'), libpnginc),
            'SFC': os.getenv('F90'),
            'DM_FC': os.getenv('MPIF90'),
            'DM_CC': os.getenv('MPICC'),
            'FC': os.getenv('MPIF90'),
            'CC': os.getenv('MPICC'),
        }
        regex_subs = [(r"^(%s\s*=\s*).*$" % key, r"\1 %s" % val) for (key, val) in comps.items()]
        apply_regex_substitutions('configure.wps', regex_subs)
Ejemplo n.º 46
0
    def configure_step(self):
        """Configure build:
            - set some magic environment variables
            - run configure script
            - adjust configure.wrf file if needed
        """
        # define $NETCDF* for netCDF dependency (used when creating WRF module file)
        set_netcdf_env_vars(self.log)

        # HDF5 (optional) dependency
        hdf5 = get_software_root('HDF5')
        if hdf5:
            # check if this is parallel HDF5
            phdf5_bins = ['h5pcc', 'ph5diff']
            parallel_hdf5 = True
            for f in phdf5_bins:
                if not os.path.exists(os.path.join(hdf5, 'bin', f)):
                    parallel_hdf5 = False
                    break
            if not (hdf5 or parallel_hdf5):
                raise EasyBuildError("Parallel HDF5 module not loaded?")
            else:
                env.setvar('PHDF5', hdf5)
        else:
            self.log.info("HDF5 module not loaded, assuming that's OK...")

        # JasPer dependency check + setting env vars
        jasper = get_software_root('JasPer')
        if jasper:
            jasperlibdir = os.path.join(jasper, "lib")
            env.setvar('JASPERINC', os.path.join(jasper, "include"))
            env.setvar('JASPERLIB', jasperlibdir)

        else:
            if os.getenv('JASPERINC') or os.getenv('JASPERLIB'):
                raise EasyBuildError("JasPer module not loaded, but JASPERINC and/or JASPERLIB still set?")
            else:
                self.log.info("JasPer module not loaded, assuming that's OK...")

        # enable support for large file support in netCDF
        env.setvar('WRFIO_NCD_LARGE_FILE_SUPPORT', '1')

        # patch arch/Config_new.pl script, so that run_cmd_qa receives all output to answer questions
        patch_perl_script_autoflush(os.path.join("arch", "Config_new.pl"))

        # determine build type option to look for
        build_type_option = None
        self.comp_fam = self.toolchain.comp_family()
        if self.comp_fam == toolchain.INTELCOMP:  #@UndefinedVariable
            if LooseVersion(self.version) >= LooseVersion('3.7'):
                build_type_option = "INTEL\ \(ifort\/icc\)"
            else:
                build_type_option = "Linux x86_64 i486 i586 i686, ifort compiler with icc"

        elif self.comp_fam == toolchain.GCC:  #@UndefinedVariable
            if LooseVersion(self.version) >= LooseVersion('3.7'):
                build_type_option = "GNU\ \(gfortran\/gcc\)"
            else:
                build_type_option = "x86_64 Linux, gfortran compiler with gcc"

        else:
            raise EasyBuildError("Don't know how to figure out build type to select.")

        # fetch selected build type (and make sure it makes sense)
        known_build_types = ['serial', 'smpar', 'dmpar', 'dm+sm']
        self.parallel_build_types = ["dmpar", "smpar", "dm+sm"]
        bt = self.cfg['buildtype']

        if not bt in known_build_types:
            raise EasyBuildError("Unknown build type: '%s'. Supported build types: %s", bt, known_build_types)

        # fetch option number based on build type option and selected build type
        if LooseVersion(self.version) >= LooseVersion('3.7'):
            # the two relevant lines in the configure output for WRF 3.8 are:
            #  13. (serial)  14. (smpar)  15. (dmpar)  16. (dm+sm)   INTEL (ifort/icc)
            #  32. (serial)  33. (smpar)  34. (dmpar)  35. (dm+sm)   GNU (gfortran/gcc)
            build_type_question = "\s*(?P<nr>[0-9]+)\.\ \(%s\).*%s" % (bt, build_type_option)
        else:
            # the relevant lines in the configure output for WRF 3.6 are:
            #  13.  Linux x86_64 i486 i586 i686, ifort compiler with icc  (serial)
            #  14.  Linux x86_64 i486 i586 i686, ifort compiler with icc  (smpar)
            #  15.  Linux x86_64 i486 i586 i686, ifort compiler with icc  (dmpar)
            #  16.  Linux x86_64 i486 i586 i686, ifort compiler with icc  (dm+sm)
            #  32.  x86_64 Linux, gfortran compiler with gcc   (serial)
            #  33.  x86_64 Linux, gfortran compiler with gcc   (smpar)
            #  34.  x86_64 Linux, gfortran compiler with gcc   (dmpar)
            #  35.  x86_64 Linux, gfortran compiler with gcc   (dm+sm)
            build_type_question = "\s*(?P<nr>[0-9]+).\s*%s\s*\(%s\)" % (build_type_option, bt)

        # run configure script
        cmd = "./configure"
        qa = {
              # named group in match will be used to construct answer
              "Compile for nesting? (1=basic, 2=preset moves, 3=vortex following) [default 1]:": "1",
              "Compile for nesting? (0=no nesting, 1=basic, 2=preset moves, 3=vortex following) [default 0]:": "0"
             }
        no_qa = [
                 "testing for fseeko and fseeko64",
                 r"If you wish to change the default options, edit the file:[\s\n]*arch/configure_new.defaults"
                ]
        std_qa = {
                  # named group in match will be used to construct answer
                  r"%s.*\n(.*\n)*Enter selection\s*\[[0-9]+-[0-9]+\]\s*:" % build_type_question: "%(nr)s",
                 }

        run_cmd_qa(cmd, qa, no_qa=no_qa, std_qa=std_qa, log_all=True, simple=True)

        cfgfile = 'configure.wrf'

        # make sure correct compilers are being used
        comps = {
                 'SCC': os.getenv('CC'),
                 'SFC': os.getenv('F90'),
                 'CCOMP': os.getenv('CC'),
                 'DM_FC': os.getenv('MPIF90'),
                 'DM_CC': "%s -DMPI2_SUPPORT" % os.getenv('MPICC'),
                }
        regex_subs = [(r"^(%s\s*=\s*).*$" % k, r"\1 %s" % v) for (k, v) in comps.items()]
        apply_regex_substitutions(cfgfile, regex_subs)

        # rewrite optimization options if desired
        if self.cfg['rewriteopts']:

            # replace default -O3 option in configure.wrf with CFLAGS/FFLAGS from environment
            self.log.info("Rewriting optimization options in %s" % cfgfile)

            # set extra flags for Intel compilers
            # see http://software.intel.com/en-us/forums/showthread.php?t=72109&p=1#146748
            if self.comp_fam == toolchain.INTELCOMP:  #@UndefinedVariable

                # -O3 -heap-arrays is required to resolve compilation error
                for envvar in ['CFLAGS', 'FFLAGS']:
                    val = os.getenv(envvar)
                    if '-O3' in val:
                        env.setvar(envvar, '%s -heap-arrays' % val)
                        self.log.info("Updated %s to '%s'" % (envvar, os.getenv(envvar)))

            # replace -O3 with desired optimization options
            regex_subs = [
                (r"^(FCOPTIM.*)(\s-O3)(\s.*)$", r"\1 %s \3" % os.getenv('FFLAGS')),
                (r"^(CFLAGS_LOCAL.*)(\s-O3)(\s.*)$", r"\1 %s \3" % os.getenv('CFLAGS')),
            ]
            apply_regex_substitutions(cfgfile, regex_subs)
Ejemplo n.º 47
0
        ]

        env.setvar('GMKTMP', self.builddir)
        env.setvar('GMKFILE', self.conf_file)

        run_cmd_qa("gmkfilemaker", qa, std_qa=stdqa, no_qa=no_qa)

        # set environment variables for installation dirs
        env.setvar('ROOTPACK', os.path.join(self.installdir, 'rootpack'))
        env.setvar('ROOTBIN', os.path.join(self.installdir, 'rootpack'))
        env.setvar('HOMEPACK', os.path.join(self.installdir, 'pack'))
        env.setvar('HOMEBIN', os.path.join(self.installdir, 'pack'))

        # patch config file to include right Fortran compiler flags
        regex_subs = [(r"^(FRTFLAGS\s*=.*)$", r"\1 %s" % os.getenv('FFLAGS'))]
        apply_regex_substitutions(self.conf_filepath, regex_subs)

    def build_step(self):
        """No separate build procedure for ALADIN (see install_step)."""

        pass

    def test_step(self):
        """Custom built-in test procedure for ALADIN."""

        if self.cfg['runtest']:
            cmd = "test-command"
            run_cmd(cmd, simple=True, log_all=True, log_output=True)

    def install_step(self):
        """Custom install procedure for ALADIN."""
Ejemplo n.º 48
0
    def configure_step(self):
        """Custom configuration procedure for binutils: statically link to zlib, configure options."""

        # take sysroot configuration option into account;
        # make sure we don't use None going forward since the value is used in os.path.join expressions
        sysroot = build_option('sysroot') or os.path.sep

        libs = ''

        if self.toolchain.is_system_toolchain():
            # determine list of 'lib' directories to use rpath for;
            # this should 'harden' the resulting binutils to bootstrap GCC
            # (no trouble when other libstdc++ is build etc)

            # The installed lib dir must come first though to avoid taking system libs over installed ones, see:
            # https://github.com/easybuilders/easybuild-easyconfigs/issues/10056
            # Escaping: Double $$ for Make, \$ for shell to get literal $ORIGIN in the file
            libdirs = [r'\$\$ORIGIN/../lib']
            for libdir in [
                    'lib', 'lib64',
                    os.path.join('lib', 'x86_64-linux-gnu')
            ]:

                libdir = os.path.join(sysroot, 'usr', libdir)

                # also consider /lib, /lib64 (without /usr/)
                alt_libdir = os.path.join(sysroot, libdir)

                if os.path.exists(libdir):
                    libdirs.append(libdir)
                    if os.path.exists(alt_libdir) and not os.path.samefile(
                            libdir, alt_libdir):
                        libdirs.append(alt_libdir)

                elif os.path.exists(alt_libdir):
                    libdirs.append(alt_libdir)

            # Mind the single quotes
            libs += ' '.join("-Wl,-rpath='%s'" % libdir for libdir in libdirs)

        # configure using `--with-system-zlib` if zlib is a (build) dependency
        zlibroot = get_software_root('zlib')
        if zlibroot:
            self.cfg.update('configopts', '--with-system-zlib')

            # statically link to zlib only if it is a build dependency
            # see https://github.com/easybuilders/easybuild-easyblocks/issues/1350
            build_deps = self.cfg.dependencies(build_only=True)
            if any(dep['name'] == 'zlib' for dep in build_deps):
                libz_path = os.path.join(zlibroot, get_software_libdir('zlib'),
                                         'libz.a')

                # for recent binutils versions, we need to override ZLIB in Makefile.in of components
                if LooseVersion(self.version) >= LooseVersion('2.26'):
                    regex_subs = [
                        (r"^(ZLIB\s*=\s*).*$", r"\1%s" % libz_path),
                        (r"^(ZLIBINC\s*=\s*).*$",
                         r"\1-I%s" % os.path.join(zlibroot, 'include')),
                    ]
                    for makefile in glob.glob(
                            os.path.join(self.cfg['start_dir'], '*',
                                         'Makefile.in')):
                        apply_regex_substitutions(makefile, regex_subs)

                # for older versions, injecting the path to the static libz library into $LIBS works
                else:
                    libs += ' ' + libz_path

        # Using double quotes for LIBS to allow single quotes in libs
        self.cfg.update('preconfigopts', 'LIBS="%s"' % libs)
        self.cfg.update('prebuildopts', 'LIBS="%s"' % libs)

        # explicitly configure binutils to use / as sysroot
        # this is required to ensure the binutils installation works correctly with a (system)
        # GCC compiler that was explicitly configured with --with-sysroot=/;
        # we should *not* use the value of the EasyBuild --sysroot configuration option here,
        # since that leads to weird errors where the sysroot path is duplicated, like:
        #   /bin/ld.gold: error: cannot open /<sysroot>/<sysroot>/lib64/libc.so.6: No such file or directory
        self.cfg.update('configopts', '--with-sysroot=/')

        # build both static and shared libraries for recent binutils versions (default is only static)
        if LooseVersion(self.version) > LooseVersion('2.24'):
            self.cfg.update('configopts', "--enable-shared --enable-static")

        # enable gold linker with plugin support, use ld as default linker (for recent versions of binutils)
        if LooseVersion(self.version) > LooseVersion('2.24'):
            self.cfg.update(
                'configopts',
                "--enable-gold --enable-plugins --enable-ld=default")

        if LooseVersion(self.version) >= LooseVersion('2.34'):
            if self.cfg['use_debuginfod']:
                self.cfg.update('configopts', '--with-debuginfod')
            else:
                self.cfg.update('configopts', '--without-debuginfod')

        # complete configuration with configure_method of parent
        super(EB_binutils, self).configure_step()

        if self.cfg['install_libiberty']:
            cflags = os.getenv('CFLAGS')
            if cflags:
                self.cfg.update('buildopts', 'CFLAGS="$CFLAGS -fPIC"')
            else:
                # if $CFLAGS is not defined, make sure we retain "-g -O2",
                # since not specifying any optimization level implies -O0...
                self.cfg.update('buildopts', 'CFLAGS="-g -O2 -fPIC"')
Ejemplo n.º 49
0
Archivo: imkl.py Proyecto: surak/JSC
class EB_imkl(IntelBase):
    """
    Class that can be used to install mkl
    - tested with 10.2.1.017
    -- will fail for all older versions (due to newer silent installer)
    """
    @staticmethod
    def extra_options():
        """Add easyconfig parameters custom to imkl (e.g. interfaces)."""
        extra_vars = {
            'interfaces':
            [True, "Indicates whether interfaces should be built", CUSTOM],
        }
        return IntelBase.extra_options(extra_vars)

    def __init__(self, *args, **kwargs):
        super(EB_imkl, self).__init__(*args, **kwargs)
        # make sure $MKLROOT isn't set, it's known to cause problems with the installation
        self.cfg.update('unwanted_env_vars', ['MKLROOT'])

    def install_step(self):
        """
        Actual installation
        - create silent cfg file
        - execute command
        """
        silent_cfg_names_map = None
        silent_cfg_extras = None

        if LooseVersion(self.version) < LooseVersion('11.1'):
            # since imkl v11.1, silent.cfg has been slightly changed to be 'more standard'

            silent_cfg_names_map = {
                'activation_name': ACTIVATION_NAME_2012,
                'license_file_name': LICENSE_FILE_NAME_2012,
            }

        if LooseVersion(self.version) >= LooseVersion(
                '11.1') and self.install_components is None:
            silent_cfg_extras = {
                'COMPONENTS': 'ALL',
            }

        super(EB_imkl,
              self).install_step(silent_cfg_names_map=silent_cfg_names_map,
                                 silent_cfg_extras=silent_cfg_extras)

    def make_module_req_guess(self):
        """
        A dictionary of possible directories to look for
        """
        if LooseVersion(self.version) >= LooseVersion('10.3'):
            if self.cfg['m32']:
                raise EasyBuildError(
                    "32-bit not supported yet for IMKL v%s (>= 10.3)",
                    self.version)
            else:
                retdict = {
                    'PATH': [
                        'bin', 'mkl/bin', 'mkl/bin/intel64',
                        'composerxe-2011/bin'
                    ],
                    'LD_LIBRARY_PATH': ['lib/intel64', 'mkl/lib/intel64'],
                    'LIBRARY_PATH': ['lib/intel64', 'mkl/lib/intel64'],
                    'MANPATH': ['man', 'man/en_US'],
                    'CPATH': ['mkl/include', 'mkl/include/fftw'],
                    'FPATH': ['mkl/include', 'mkl/include/fftw'],
                }
                if LooseVersion(self.version) >= LooseVersion('11.0'):
                    if LooseVersion(self.version) >= LooseVersion('11.3'):
                        retdict['MIC_LD_LIBRARY_PATH'] = [
                            'lib/intel64_lin_mic', 'mkl/lib/mic'
                        ]
                    elif LooseVersion(self.version) >= LooseVersion('11.1'):
                        retdict['MIC_LD_LIBRARY_PATH'] = [
                            'lib/mic', 'mkl/lib/mic'
                        ]
                    else:
                        retdict['MIC_LD_LIBRARY_PATH'] = [
                            'compiler/lib/mic', 'mkl/lib/mic'
                        ]
                return retdict
        else:
            if self.cfg['m32']:
                return {
                    'PATH': ['bin', 'bin/ia32', 'tbb/bin/ia32'],
                    'LD_LIBRARY_PATH': ['lib', 'lib/32'],
                    'LIBRARY_PATH': ['lib', 'lib/32'],
                    'MANPATH': ['man', 'share/man', 'man/en_US'],
                    'CPATH': ['include'],
                    'FPATH': ['include']
                }

            else:
                return {
                    'PATH': ['bin', 'bin/intel64', 'tbb/bin/em64t'],
                    'LD_LIBRARY_PATH': ['lib', 'lib/em64t'],
                    'LIBRARY_PATH': ['lib', 'lib/em64t'],
                    'MANPATH': ['man', 'share/man', 'man/en_US'],
                    'CPATH': ['include'],
                    'FPATH': ['include'],
                }

    def make_module_extra(self):
        """Overwritten from Application to add extra txt"""
        txt = super(EB_imkl, self).make_module_extra()
        txt += self.module_generator.set_environment(
            'MKLROOT', os.path.join(self.installdir, 'mkl'))
        return txt

    def post_install_step(self):
        """
        Install group libraries and interfaces (if desired).
        """
        super(EB_imkl, self).post_install_step()

        shlib_ext = get_shared_lib_ext()

        # reload the dependencies
        self.load_dependency_modules()

        if self.cfg['m32']:
            extra = {
                'libmkl.%s' % shlib_ext:
                'GROUP (-lmkl_intel -lmkl_intel_thread -lmkl_core)',
                'libmkl_em64t.a':
                'GROUP (libmkl_intel.a libmkl_intel_thread.a libmkl_core.a)',
                'libmkl_solver.a': 'GROUP (libmkl_solver.a)',
                'libmkl_scalapack.a': 'GROUP (libmkl_scalapack_core.a)',
                'libmkl_lapack.a':
                'GROUP (libmkl_intel.a libmkl_intel_thread.a libmkl_core.a)',
                'libmkl_cdft.a': 'GROUP (libmkl_cdft_core.a)'
            }
        else:
            extra = {
                'libmkl.%s' % shlib_ext:
                'GROUP (-lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core)',
                'libmkl_em64t.a':
                'GROUP (libmkl_intel_lp64.a libmkl_intel_thread.a libmkl_core.a)',
                'libmkl_solver.a': 'GROUP (libmkl_solver_lp64.a)',
                'libmkl_scalapack.a': 'GROUP (libmkl_scalapack_lp64.a)',
                'libmkl_lapack.a':
                'GROUP (libmkl_intel_lp64.a libmkl_intel_thread.a libmkl_core.a)',
                'libmkl_cdft.a': 'GROUP (libmkl_cdft_core.a)'
            }

        if LooseVersion(self.version) >= LooseVersion('10.3'):
            libsubdir = os.path.join('mkl', 'lib', 'intel64')
        else:
            if self.cfg['m32']:
                libsubdir = os.path.join('lib', '32')
            else:
                libsubdir = os.path.join('lib', 'em64t')

        for fil, txt in extra.items():
            dest = os.path.join(self.installdir, libsubdir, fil)
            if not os.path.exists(dest):
                try:
                    f = open(dest, 'w')
                    f.write(txt)
                    f.close()
                    self.log.info("File %s written" % dest)
                except IOError, err:
                    raise EasyBuildError("Can't write file %s: %s", dest, err)

        # build the mkl interfaces, if desired
        if self.cfg['interfaces']:

            if LooseVersion(self.version) >= LooseVersion('10.3'):
                intsubdir = os.path.join('mkl', 'interfaces')
                inttarget = 'libintel64'
            else:
                intsubdir = 'interfaces'
                if self.cfg['m32']:
                    inttarget = 'lib32'
                else:
                    inttarget = 'libem64t'

            cmd = "make -f makefile %s" % inttarget

            # blas95 and lapack95 need more work, ignore for now
            # blas95 and lapack also need include/.mod to be processed
            fftw2libs = ['fftw2xc', 'fftw2xf']
            fftw3libs = ['fftw3xc', 'fftw3xf']
            cdftlibs = ['fftw2x_cdft']
            if LooseVersion(self.version) >= LooseVersion('10.3'):
                cdftlibs.append('fftw3x_cdft')

            interfacedir = os.path.join(self.installdir, intsubdir)
            try:
                os.chdir(interfacedir)
                self.log.info("Changed to interfaces directory %s" %
                              interfacedir)
            except OSError, err:
                raise EasyBuildError("Can't change to interfaces directory %s",
                                     interfacedir)

            compopt = None
            # determine whether we're using a non-Intel GCC-based or PGI-based toolchain
            # can't use toolchain.comp_family, because of dummy toolchain used when installing imkl
            if get_software_root('icc') is None:
                # check for PGI first, since there's a GCC underneath PGI too...
                if get_software_root('PGI'):
                    compopt = 'compiler=pgi'
                elif get_software_root('GCC'):
                    compopt = 'compiler=gnu'
                else:
                    raise EasyBuildError(
                        "Not using Intel/GCC/PGI compilers, don't know how to build wrapper libs"
                    )
            else:
                compopt = 'compiler=intel'

            # patch makefiles for cdft wrappers when PGI is used as compiler
            if get_software_root('PGI'):
                regex_subs = [
                    # pgi should be considered as a valid compiler
                    ("intel gnu", "intel gnu pgi"),
                    # transform 'gnu' case to 'pgi' case
                    (r"ifeq \(\$\(compiler\),gnu\)", "ifeq ($(compiler),pgi)"),
                    ('=gcc', '=pgcc'),
                    # correct flag to use C99 standard
                    ('-std=c99', '-c99'),
                    # -Wall and -Werror are not valid options for pgcc, no close equivalent
                    ('-Wall', ''),
                    ('-Werror', ''),
                ]
                for lib in cdftlibs:
                    apply_regex_substitutions(
                        os.path.join(interfacedir, lib, 'makefile'),
                        regex_subs)

            for lib in fftw2libs + fftw3libs + cdftlibs:
                buildopts = [compopt]
                if lib in fftw3libs:
                    buildopts.append('install_to=$INSTALL_DIR')
                elif lib in cdftlibs:
                    mpi_spec = None
                    # check whether MPI_FAMILY constant is defined, so mpi_family() can be used
                    if hasattr(self.toolchain, 'MPI_FAMILY'
                               ) and self.toolchain.MPI_FAMILY is not None:
                        mpi_spec_by_fam = {
                            toolchain.MPICH:
                            'mpich2',  # MPICH is MPICH v3.x, which is MPICH2 compatible
                            toolchain.MPICH2: 'mpich2',
                            toolchain.MVAPICH2: 'mpich2',
                            toolchain.OPENMPI: 'openmpi',
                        }
                        mpi_fam = self.toolchain.mpi_family()
                        mpi_spec = mpi_spec_by_fam.get(mpi_fam)
                        self.log.debug(
                            "Determined MPI specification based on MPI toolchain component: %s"
                            % mpi_spec)
                    else:
                        # can't use toolchain.mpi_family, because of dummy toolchain
                        if get_software_root('MPICH2') or get_software_root(
                                'MVAPICH2'):
                            mpi_spec = 'mpich2'
                        elif get_software_root('OpenMPI'):
                            mpi_spec = 'openmpi'
                        self.log.debug(
                            "Determined MPI specification based on loaded MPI module: %s"
                            % mpi_spec)

                    if mpi_spec is not None:
                        buildopts.append('mpi=%s' % mpi_spec)

                precflags = ['']
                if lib.startswith('fftw2x') and not self.cfg['m32']:
                    # build both single and double precision variants
                    precflags = [
                        'PRECISION=MKL_DOUBLE', 'PRECISION=MKL_SINGLE'
                    ]

                intflags = ['']
                if lib in cdftlibs and not self.cfg['m32']:
                    # build both 32-bit and 64-bit interfaces
                    intflags = ['interface=lp64', 'interface=ilp64']

                allopts = [
                    list(opts)
                    for opts in itertools.product(intflags, precflags)
                ]

                for flags, extraopts in itertools.product(['', '-fPIC'],
                                                          allopts):
                    tup = (lib, flags, buildopts, extraopts)
                    self.log.debug(
                        "Building lib %s with: flags %s, buildopts %s, extraopts %s"
                        % tup)

                    tmpbuild = tempfile.mkdtemp(dir=self.builddir)
                    self.log.debug("Created temporary directory %s" % tmpbuild)

                    # always set INSTALL_DIR, SPEC_OPT, COPTS and CFLAGS
                    # fftw2x(c|f): use $INSTALL_DIR, $CFLAGS and $COPTS
                    # fftw3x(c|f): use $CFLAGS
                    # fftw*cdft: use $INSTALL_DIR and $SPEC_OPT
                    env.setvar('INSTALL_DIR', tmpbuild)
                    env.setvar('SPEC_OPT', flags)
                    env.setvar('COPTS', flags)
                    env.setvar('CFLAGS', flags)

                    try:
                        intdir = os.path.join(interfacedir, lib)
                        os.chdir(intdir)
                        self.log.info("Changed to interface %s directory %s" %
                                      (lib, intdir))
                    except OSError, err:
                        raise EasyBuildError(
                            "Can't change to interface %s directory %s: %s",
                            lib, intdir, err)

                    fullcmd = "%s %s" % (cmd, ' '.join(buildopts + extraopts))
                    res = run_cmd(fullcmd, log_all=True, simple=True)
                    if not res:
                        raise EasyBuildError(
                            "Building %s (flags: %s, fullcmd: %s) failed", lib,
                            flags, fullcmd)

                    for fn in os.listdir(tmpbuild):
                        src = os.path.join(tmpbuild, fn)
                        if flags == '-fPIC':
                            # add _pic to filename
                            ff = fn.split('.')
                            fn = '.'.join(ff[:-1]) + '_pic.' + ff[-1]
                        dest = os.path.join(self.installdir, libsubdir, fn)
                        try:
                            if os.path.isfile(src):
                                shutil.move(src, dest)
                                self.log.info("Moved %s to %s" % (src, dest))
                        except OSError, err:
                            raise EasyBuildError("Failed to move %s to %s: %s",
                                                 src, dest, err)

                    rmtree2(tmpbuild)
Ejemplo n.º 50
0
    def install_step(self):
        """Custom install procedure for Trinity."""

        version = LooseVersion(self.version)
        if version > LooseVersion('2012') and version < LooseVersion('2012-10-05'):
            self.inchworm()
            self.chrysalis()
            self.kmer()
            self.butterfly()

            bwapluginver = self.cfg['bwapluginver']
            if bwapluginver:
                self.trinityplugin('bwa-%s-patched_multi_map' % bwapluginver)

            if self.cfg['RSEMmod']:
                self.trinityplugin('RSEM-mod', cc=os.getenv('CXX'))

        else:
            self.jellyfish()

            inchworm_flags = self.inchworm(run=False)
            chrysalis_flags = self.chrysalis(run=False)

            cc = os.getenv('CC')
            cxx = os.getenv('CXX')

            lib_flags = ""
            for lib in ['ncurses', 'zlib']:
                libroot = get_software_root(lib)
                if libroot:
                    lib_flags += " -L%s/lib" % libroot

            if version >= LooseVersion('2.0') and version < LooseVersion('3.0'):
                regex_subs = [
                    (r'^( INCHWORM_CONFIGURE_FLAGS\s*=\s*).*$', r'\1%s' % inchworm_flags),
                    (r'^( CHRYSALIS_MAKE_FLAGS\s*=\s*).*$', r'\1%s' % chrysalis_flags),
                ]
            else:
                regex_subs = [
                    (r'^(INCHWORM_CONFIGURE_FLAGS\s*=\s*).*$', r'\1%s' % inchworm_flags),
                    (r'^(CHRYSALIS_MAKE_FLAGS\s*=\s*).*$', r'\1%s' % chrysalis_flags),
                    (r'(/rsem && \$\(MAKE\))\s*$',
                     r'\1 CC=%s CXX="%s %s" CFLAGS_EXTRA="%s"\n' % (cc, cxx, lib_flags, lib_flags)),
                    (r'(/fastool && \$\(MAKE\))\s*$',
                     r'\1 CC="%s -std=c99" CFLAGS="%s ${CFLAGS}"\n' % (cc, lib_flags)),
                ]
            apply_regex_substitutions('Makefile', regex_subs)

            trinity_compiler = None
            comp_fam = self.toolchain.comp_family()
            if comp_fam in [toolchain.INTELCOMP]:
                trinity_compiler = "intel"
            elif comp_fam in [toolchain.GCC]:
                trinity_compiler = "gcc"
            else:
                raise EasyBuildError("Don't know how to set TRINITY_COMPILER for %s compiler", comp_fam)

            explicit_make_args = ''
            if version >= LooseVersion('2.0') and version < LooseVersion('3.0'):
                explicit_make_args = 'all plugins'
                 
            cmd = "make TRINITY_COMPILER=%s %s" % (trinity_compiler, explicit_make_args)
            run_cmd(cmd)

            # butterfly is not included in standard build
            self.butterfly()

        # remove sample data if desired
        if not self.cfg['withsampledata']:
            try:
                shutil.rmtree(os.path.join(self.cfg['start_dir'], 'sample_data'))
            except OSError, err:
                raise EasyBuildError("Failed to remove sample data: %s", err)
Ejemplo n.º 51
0
    def fixup_hardcoded_paths(self):
        """Patch out hard coded paths to compiler and binutils tools"""
        binutils_root = get_software_root('binutils')
        gcc_root = get_software_root('GCCcore') or get_software_root('GCC')
        gcc_ver = get_software_version('GCCcore') or get_software_version(
            'GCC')

        # only patch Bazel scripts if binutils & GCC installation prefix could be determined
        if not binutils_root or not gcc_root:
            self.log.info(
                "Not patching Bazel build scripts, installation prefix for binutils/GCC not found"
            )
            return

        # replace hardcoded paths in (unix_)cc_configure.bzl
        # hard-coded paths in (unix_)cc_configure.bzl were removed in 0.19.0
        if LooseVersion(self.version) < LooseVersion('0.19.0'):
            regex_subs = [
                (r'-B/usr/bin', '-B%s' % os.path.join(binutils_root, 'bin')),
                (r'"/usr/bin', '"' + os.path.join(binutils_root, 'bin')),
            ]
            for conf_bzl in ['cc_configure.bzl', 'unix_cc_configure.bzl']:
                filepath = os.path.join('tools', 'cpp', conf_bzl)
                if os.path.exists(filepath):
                    apply_regex_substitutions(filepath, regex_subs)

        # replace hardcoded paths in CROSSTOOL
        # CROSSTOOL script is no longer there in Bazel 0.24.0
        if LooseVersion(self.version) < LooseVersion('0.24.0'):
            res = glob.glob(
                os.path.join(gcc_root, 'lib', 'gcc', '*', gcc_ver, 'include'))
            if res and len(res) == 1:
                gcc_lib_inc = res[0]
            else:
                raise EasyBuildError(
                    "Failed to pinpoint location of GCC include files: %s",
                    res)

            gcc_lib_inc_bis = os.path.join(os.path.dirname(gcc_lib_inc),
                                           'include-fixed')
            if not os.path.exists(gcc_lib_inc_bis):
                self.log.info(
                    "Derived directory %s does not exist, falling back to %s",
                    gcc_lib_inc_bis, gcc_lib_inc)
                gcc_lib_inc_bis = gcc_lib_inc

            gcc_cplusplus_inc = os.path.join(gcc_root, 'include', 'c++',
                                             gcc_ver)
            if not os.path.exists(gcc_cplusplus_inc):
                raise EasyBuildError("Derived directory %s does not exist",
                                     gcc_cplusplus_inc)

            regex_subs = [
                (r'-B/usr/bin', '-B%s' % os.path.join(binutils_root, 'bin')),
                (r'(cxx_builtin_include_directory:.*)/usr/lib/gcc',
                 r'\1%s' % gcc_lib_inc),
                (r'(cxx_builtin_include_directory:.*)/usr/local/include',
                 r'\1%s' % gcc_lib_inc_bis),
                (r'(cxx_builtin_include_directory:.*)/usr/include',
                 r'\1%s' % gcc_cplusplus_inc),
            ]
            for tool in ['ar', 'cpp', 'dwp', 'gcc', 'ld']:
                path = which(tool)
                if path:
                    regex_subs.append((os.path.join('/usr', 'bin',
                                                    tool), path))
                else:
                    raise EasyBuildError("Failed to determine path to '%s'",
                                         tool)

            apply_regex_substitutions(
                os.path.join('tools', 'cpp', 'CROSSTOOL'), regex_subs)
Ejemplo n.º 52
0
    def post_install_step(self):
        """
        Install group libraries and interfaces (if desired).
        """
        super(EB_imkl, self).post_install_step()

        shlib_ext = get_shared_lib_ext()

        # reload the dependencies
        self.load_dependency_modules()

        if self.cfg['m32']:
            extra = {
                'libmkl.%s' % shlib_ext : 'GROUP (-lmkl_intel -lmkl_intel_thread -lmkl_core)',
                'libmkl_em64t.a': 'GROUP (libmkl_intel.a libmkl_intel_thread.a libmkl_core.a)',
                'libmkl_solver.a': 'GROUP (libmkl_solver.a)',
                'libmkl_scalapack.a': 'GROUP (libmkl_scalapack_core.a)',
                'libmkl_lapack.a': 'GROUP (libmkl_intel.a libmkl_intel_thread.a libmkl_core.a)',
                'libmkl_cdft.a': 'GROUP (libmkl_cdft_core.a)'
            }
        else:
            extra = {
                'libmkl.%s' % shlib_ext: 'GROUP (-lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core)',
                'libmkl_em64t.a': 'GROUP (libmkl_intel_lp64.a libmkl_intel_thread.a libmkl_core.a)',
                'libmkl_solver.a': 'GROUP (libmkl_solver_lp64.a)',
                'libmkl_scalapack.a': 'GROUP (libmkl_scalapack_lp64.a)',
                'libmkl_lapack.a': 'GROUP (libmkl_intel_lp64.a libmkl_intel_thread.a libmkl_core.a)',
                'libmkl_cdft.a': 'GROUP (libmkl_cdft_core.a)'
            }

        if LooseVersion(self.version) >= LooseVersion('10.3'):
            libsubdir = os.path.join('mkl', 'lib', 'intel64')
        else:
            if self.cfg['m32']:
                libsubdir = os.path.join('lib', '32')
            else:
                libsubdir = os.path.join('lib', 'em64t')

        for fil, txt in extra.items():
            dest = os.path.join(self.installdir, libsubdir, fil)
            if not os.path.exists(dest):
                try:
                    f = open(dest, 'w')
                    f.write(txt)
                    f.close()
                    self.log.info("File %s written" % dest)
                except IOError as err:
                    raise EasyBuildError("Can't write file %s: %s", dest, err)

        # build the mkl interfaces, if desired
        if self.cfg['interfaces']:

            if LooseVersion(self.version) >= LooseVersion('10.3'):
                intsubdir = os.path.join('mkl', 'interfaces')
                inttarget = 'libintel64'
            else:
                intsubdir = 'interfaces'
                if self.cfg['m32']:
                    inttarget = 'lib32'
                else:
                    inttarget = 'libem64t'

            cmd = "make -f makefile %s" % inttarget

            # blas95 and lapack95 need more work, ignore for now
            # blas95 and lapack also need include/.mod to be processed
            fftw2libs = ['fftw2xc', 'fftw2xf']
            fftw3libs = ['fftw3xc', 'fftw3xf']

            interfacedir = os.path.join(self.installdir, intsubdir)
            change_dir(interfacedir)
            self.log.info("Changed to interfaces directory %s", interfacedir)

            compopt = None
            # determine whether we're using a non-Intel GCC-based or PGI-based toolchain
            # can't use toolchain.comp_family, because of system toolchain used when installing imkl
            if get_software_root('icc') is None:
                # check for PGI first, since there's a GCC underneath PGI too...
                if get_software_root('PGI'):
                    compopt = 'compiler=pgi'
                elif get_software_root('GCC'):
                    compopt = 'compiler=gnu'
                else:
                    raise EasyBuildError("Not using Intel/GCC/PGI compilers, don't know how to build wrapper libs")
            else:
                compopt = 'compiler=intel'

            # patch makefiles for cdft wrappers when PGI is used as compiler
            if get_software_root('PGI'):
                regex_subs = [
                    # pgi should be considered as a valid compiler
                    ("intel gnu", "intel gnu pgi"),
                    # transform 'gnu' case to 'pgi' case
                    (r"ifeq \(\$\(compiler\),gnu\)", "ifeq ($(compiler),pgi)"),
                    ('=gcc', '=pgcc'),
                    # correct flag to use C99 standard
                    ('-std=c99', '-c99'),
                    # -Wall and -Werror are not valid options for pgcc, no close equivalent
                    ('-Wall', ''),
                    ('-Werror', ''),
                ]
                for lib in self.cdftlibs:
                    apply_regex_substitutions(os.path.join(interfacedir, lib, 'makefile'), regex_subs)

            for lib in fftw2libs + fftw3libs + self.cdftlibs:
                buildopts = [compopt]
                if lib in fftw3libs:
                    buildopts.append('install_to=$INSTALL_DIR')
                elif lib in self.cdftlibs:
                    if self.mpi_spec is not None:
                        buildopts.append('mpi=%s' % self.mpi_spec)

                precflags = ['']
                if lib.startswith('fftw2x') and not self.cfg['m32']:
                    # build both single and double precision variants
                    precflags = ['PRECISION=MKL_DOUBLE', 'PRECISION=MKL_SINGLE']

                intflags = ['']
                if lib in self.cdftlibs and not self.cfg['m32']:
                    # build both 32-bit and 64-bit interfaces
                    intflags = ['interface=lp64', 'interface=ilp64']

                allopts = [list(opts) for opts in itertools.product(intflags, precflags)]

                for flags, extraopts in itertools.product(['', '-fPIC'], allopts):
                    tup = (lib, flags, buildopts, extraopts)
                    self.log.debug("Building lib %s with: flags %s, buildopts %s, extraopts %s" % tup)

                    tmpbuild = tempfile.mkdtemp(dir=self.builddir)
                    self.log.debug("Created temporary directory %s" % tmpbuild)

                    # always set INSTALL_DIR, SPEC_OPT, COPTS and CFLAGS
                    # fftw2x(c|f): use $INSTALL_DIR, $CFLAGS and $COPTS
                    # fftw3x(c|f): use $CFLAGS
                    # fftw*cdft: use $INSTALL_DIR and $SPEC_OPT
                    env.setvar('INSTALL_DIR', tmpbuild)
                    env.setvar('SPEC_OPT', flags)
                    env.setvar('COPTS', flags)
                    env.setvar('CFLAGS', flags)

                    try:
                        intdir = os.path.join(interfacedir, lib)
                        os.chdir(intdir)
                        self.log.info("Changed to interface %s directory %s" % (lib, intdir))
                    except OSError as err:
                        raise EasyBuildError("Can't change to interface %s directory %s: %s", lib, intdir, err)

                    fullcmd = "%s %s" % (cmd, ' '.join(buildopts + extraopts))
                    res = run_cmd(fullcmd, log_all=True, simple=True)
                    if not res:
                        raise EasyBuildError("Building %s (flags: %s, fullcmd: %s) failed", lib, flags, fullcmd)

                    for fn in os.listdir(tmpbuild):
                        src = os.path.join(tmpbuild, fn)
                        if flags == '-fPIC':
                            # add _pic to filename
                            ff = fn.split('.')
                            fn = '.'.join(ff[:-1]) + '_pic.' + ff[-1]
                        dest = os.path.join(self.installdir, libsubdir, fn)
                        try:
                            if os.path.isfile(src):
                                shutil.move(src, dest)
                                self.log.info("Moved %s to %s" % (src, dest))
                        except OSError as err:
                            raise EasyBuildError("Failed to move %s to %s: %s", src, dest, err)

                    remove_dir(tmpbuild)
Ejemplo n.º 53
0
    def configure_step(self):
        """
        Custom configure and build procedure for Siesta.
        - There are two main builds to do, siesta and transiesta
        - In addition there are multiple support tools to build
        """

        start_dir = self.cfg['start_dir']
        obj_dir = os.path.join(start_dir, 'Obj')
        arch_make = os.path.join(obj_dir, 'arch.make')
        bindir = os.path.join(start_dir, 'bin')

        par = ''
        if LooseVersion(self.version) >= LooseVersion('4.1'):
            par = '-j %s' % self.cfg['parallel']

        # enable OpenMP support if desired
        env_var_suff = ''
        if self.toolchain.options.get('openmp', None):
            env_var_suff = '_MT'

        scalapack = os.environ['LIBSCALAPACK' + env_var_suff]
        blacs = os.environ['LIBSCALAPACK' + env_var_suff]
        lapack = os.environ['LIBLAPACK' + env_var_suff]
        blas = os.environ['LIBBLAS' + env_var_suff]
        if get_software_root('imkl') or get_software_root('FFTW'):
            fftw = os.environ['LIBFFT' + env_var_suff]
        else:
            fftw = None

        regex_newlines = []
        regex_subs = [
            ('dc_lapack.a', ''),
            (r'^NETCDF_INTERFACE\s*=.*$', ''),
            ('libsiestaBLAS.a', ''),
            ('libsiestaLAPACK.a', ''),
            # Needed here to allow 4.1-b1 to be built with openmp
            (r"^(LDFLAGS\s*=).*$", r"\1 %s %s" % (os.environ['FCFLAGS'], os.environ['LDFLAGS'])),
        ]

        netcdff_loc = get_software_root('netCDF-Fortran')
        if netcdff_loc:
            # Needed for gfortran at least
            regex_newlines.append((r"^(ARFLAGS_EXTRA\s*=.*)$", r"\1\nNETCDF_INCFLAGS = -I%s/include" % netcdff_loc))

        if fftw:
            fft_inc, fft_lib = os.environ['FFT_INC_DIR'], os.environ['FFT_LIB_DIR']
            fppflags = r"\1\nFFTW_INCFLAGS = -I%s\nFFTW_LIBS = -L%s %s" % (fft_inc, fft_lib, fftw)
            regex_newlines.append((r'(FPPFLAGS\s*=.*)$', fppflags))

        # Make a temp installdir during the build of the various parts
        mkdir(bindir)

        # change to actual build dir
        change_dir(obj_dir)

        # Populate start_dir with makefiles
        run_cmd(os.path.join(start_dir, 'Src', 'obj_setup.sh'), log_all=True, simple=True, log_output=True)

        if LooseVersion(self.version) < LooseVersion('4.1-b2'):
            # MPI?
            if self.toolchain.options.get('usempi', None):
                self.cfg.update('configopts', '--enable-mpi')

            # BLAS and LAPACK
            self.cfg.update('configopts', '--with-blas="%s"' % blas)
            self.cfg.update('configopts', '--with-lapack="%s"' % lapack)

            # ScaLAPACK (and BLACS)
            self.cfg.update('configopts', '--with-scalapack="%s"' % scalapack)
            self.cfg.update('configopts', '--with-blacs="%s"' % blacs)

            # NetCDF-Fortran
            if netcdff_loc:
                self.cfg.update('configopts', '--with-netcdf=-lnetcdff')

            # Configure is run in obj_dir, configure script is in ../Src
            super(EB_Siesta, self).configure_step(cmd_prefix='../Src/')

            if LooseVersion(self.version) > LooseVersion('4.0'):
                regex_subs_Makefile = [
                    (r'CFLAGS\)-c', r'CFLAGS) -c'),
                ]
                apply_regex_substitutions('Makefile', regex_subs_Makefile)

        else: # there's no configure on newer versions

            if self.toolchain.comp_family() in [toolchain.INTELCOMP]:
                copy_file(os.path.join(obj_dir, 'intel.make'), arch_make)
            elif self.toolchain.comp_family() in [toolchain.GCC]:
                copy_file(os.path.join(obj_dir, 'gfortran.make'), arch_make)
            else:
                raise EasyBuildError("There is currently no support for compiler: %s", self.toolchain.comp_family())

            if self.toolchain.options.get('usempi', None):
                regex_subs.extend([
                    (r"^(CC\s*=\s*).*$", r"\1%s" % os.environ['MPICC']),
                    (r"^(FC\s*=\s*).*$", r"\1%s" % os.environ['MPIF90']),
                    (r"^(FPPFLAGS\s*=.*)$", r"\1 -DMPI"),
                ])
                regex_newlines.append((r"^(FPPFLAGS\s*=.*)$", r"\1\nMPI_INTERFACE = libmpi_f90.a\nMPI_INCLUDE = ."))
                complibs = scalapack
            else:
                complibs = lapack

            regex_subs.extend([
                (r"^(LIBS\s*=\s).*$", r"\1 %s" % complibs),
                # Needed for a couple of the utils
                (r"^(FFLAGS\s*=\s*).*$", r"\1 -fPIC %s" % os.environ['FCFLAGS']),
            ])
            regex_newlines.append((r"^(COMP_LIBS\s*=.*)$", r"\1\nWXML = libwxml.a"))

            if netcdff_loc:
                regex_subs.extend([
                    (r"^(LIBS\s*=.*)$", r"\1 $(NETCDF_LIBS)"),
                    (r"^(FPPFLAGS\s*=.*)$", r"\1 -DCDF"),
                ])
                regex_newlines.append((r"^(COMP_LIBS\s*=.*)$", r"\1\nNETCDF_LIBS = -lnetcdff"))

        apply_regex_substitutions(arch_make, regex_subs)

        # individually apply substitutions that add lines
        for regex_nl in regex_newlines:
            apply_regex_substitutions(arch_make, [regex_nl])

        run_cmd('make %s' % par, log_all=True, simple=True, log_output=True)

        # Put binary in temporary install dir
        copy_file(os.path.join(obj_dir, 'siesta'), bindir)

        if self.cfg['with_utils']:
            # Make the utils
            change_dir(os.path.join(start_dir, 'Util'))

            # clean_all.sh might be missing executable bit...
            adjust_permissions('./clean_all.sh', stat.S_IXUSR, recursive=False, relative=True)
            run_cmd('./clean_all.sh', log_all=True, simple=True, log_output=True)

            if LooseVersion(self.version) >= LooseVersion('4.1'):
                regex_subs_TS = [
                    (r"^default:.*$", r""),
                    (r"^EXE\s*=.*$", r""),
                    (r"^(include\s*..ARCH_MAKE.*)$", r"EXE=tshs2tshs\ndefault: $(EXE)\n\1"),
                    (r"^(INCFLAGS.*)$", r"\1 -I%s" % obj_dir),
                ]

                makefile = os.path.join(start_dir, 'Util', 'TS', 'tshs2tshs', 'Makefile')
                apply_regex_substitutions(makefile, regex_subs_TS)

            # SUFFIX rules in wrong place
            regex_subs_suffix = [
                (r'^(\.SUFFIXES:.*)$', r''),
                (r'^(include\s*\$\(ARCH_MAKE\).*)$', r'\1\n.SUFFIXES:\n.SUFFIXES: .c .f .F .o .a .f90 .F90'),
            ]
            makefile = os.path.join(start_dir, 'Util', 'Sockets', 'Makefile')
            apply_regex_substitutions(makefile, regex_subs_suffix)
            makefile = os.path.join(start_dir, 'Util', 'SiestaSubroutine', 'SimpleTest', 'Src', 'Makefile')
            apply_regex_substitutions(makefile, regex_subs_suffix)

            regex_subs_UtilLDFLAGS = [
                (r'(\$\(FC\)\s*-o\s)', r'$(FC) %s %s -o ' % (os.environ['FCFLAGS'], os.environ['LDFLAGS'])),
            ]
            makefile = os.path.join(start_dir, 'Util', 'Optimizer', 'Makefile')
            apply_regex_substitutions(makefile, regex_subs_UtilLDFLAGS)
            makefile = os.path.join(start_dir, 'Util', 'JobList', 'Src', 'Makefile')
            apply_regex_substitutions(makefile, regex_subs_UtilLDFLAGS)

            run_cmd('./build_all.sh', log_all=True, simple=True, log_output=True)

            # Now move all the built utils to the temp installdir
            expected_utils = [
                'Bands/eigfat2plot',
                'CMLComp/ccViz',
                'Contrib/APostnikov/eig2bxsf', 'Contrib/APostnikov/rho2xsf',
                'Contrib/APostnikov/vib2xsf', 'Contrib/APostnikov/fmpdos',
                'Contrib/APostnikov/xv2xsf', 'Contrib/APostnikov/md2axsf',
                'COOP/mprop', 'COOP/fat',
                'Denchar/Src/denchar',
                'DensityMatrix/dm2cdf', 'DensityMatrix/cdf2dm',
                'Eig2DOS/Eig2DOS',
                'Gen-basis/ioncat', 'Gen-basis/gen-basis',
                'Grid/cdf2grid', 'Grid/cdf_laplacian', 'Grid/cdf2xsf',
                'Grid/grid2cube',
                'Grid/grid_rotate', 'Grid/g2c_ng', 'Grid/grid2cdf', 'Grid/grid2val',
                'Helpers/get_chem_labels',
                'HSX/hs2hsx', 'HSX/hsx2hs',
                'JobList/Src/getResults', 'JobList/Src/countJobs',
                'JobList/Src/runJobs', 'JobList/Src/horizontal',
                'Macroave/Src/macroave',
                'ON/lwf2cdf',
                'Optimizer/simplex', 'Optimizer/swarm',
                'pdosxml/pdosxml',
                'Projections/orbmol_proj',
                'SiestaSubroutine/FmixMD/Src/driver',
                'SiestaSubroutine/FmixMD/Src/para',
                'SiestaSubroutine/FmixMD/Src/simple',
                'STM/simple-stm/plstm', 'STM/ol-stm/Src/stm',
                'VCA/mixps', 'VCA/fractional',
                'Vibra/Src/vibra', 'Vibra/Src/fcbuild',
                'WFS/info_wfsx', 'WFS/wfsx2wfs',
                'WFS/readwfx', 'WFS/wfsnc2wfsx', 'WFS/readwf', 'WFS/wfs2wfsx',
            ]

            if LooseVersion(self.version) <= LooseVersion('4.0'):
                expected_utils.extend([
                    'Bands/new.gnubands',
                    'TBTrans/tbtrans',
                ])

            if LooseVersion(self.version) >= LooseVersion('4.0'):
                expected_utils.extend([
                    'SiestaSubroutine/ProtoNEB/Src/protoNEB',
                    'SiestaSubroutine/SimpleTest/Src/simple_pipes_parallel',
                    'SiestaSubroutine/SimpleTest/Src/simple_pipes_serial',
                    'Sockets/f2fmaster', 'Sockets/f2fslave',
                ])

            if LooseVersion(self.version) >= LooseVersion('4.1'):
                expected_utils.extend([
                    'Bands/gnubands',
                    'Grimme/fdf2grimme',
                    'SpPivot/pvtsp',
                    'TS/ts2ts/ts2ts', 'TS/tshs2tshs/tshs2tshs', 'TS/TBtrans/tbtrans',
                ])

            for util in expected_utils:
                copy_file(os.path.join(start_dir, 'Util', util), bindir)

        if self.cfg['with_transiesta']:
            # Build transiesta
            change_dir(obj_dir)

            run_cmd('make clean', log_all=True, simple=True, log_output=True)
            run_cmd('make %s transiesta' % par, log_all=True, simple=True, log_output=True)

            copy_file(os.path.join(obj_dir, 'transiesta'), bindir)
Ejemplo n.º 54
0
    def configure_step(self):
        """Custom configuration procedure for TensorFlow."""

        binutils_root = get_software_root('binutils')
        if not binutils_root:
            raise EasyBuildError(
                "Failed to determine installation prefix for binutils")
        self.binutils_bin_path = os.path.join(binutils_root, 'bin')

        tmpdir = tempfile.mkdtemp(suffix='-bazel-configure')

        # filter out paths from CPATH and LIBRARY_PATH. This is needed since bazel will pull some dependencies that
        # might conflict with dependencies on the system and/or installed with EB. For example: protobuf
        path_filter = self.cfg['path_filter']
        if path_filter:
            self.log.info(
                "Filtering $CPATH and $LIBRARY_PATH with path filter %s",
                path_filter)
            for var in ['CPATH', 'LIBRARY_PATH']:
                path = os.getenv(var).split(os.pathsep)
                self.log.info("$%s old value was %s" % (var, path))
                filtered_path = os.pathsep.join(
                    [p for fil in path_filter for p in path if fil not in p])
                env.setvar(var, filtered_path)

        wrapper_dir = os.path.join(tmpdir, 'bin')
        use_wrapper = False

        if self.toolchain.comp_family() == toolchain.INTELCOMP:
            # put wrappers for Intel C/C++ compilers in place (required to make sure license server is found)
            # cfr. https://github.com/bazelbuild/bazel/issues/663
            for compiler in ('icc', 'icpc'):
                self.write_wrapper(wrapper_dir, compiler, 'NOT-USED-WITH-ICC')
            use_wrapper = True

        use_mpi = self.toolchain.options.get('usempi', False)
        mpi_home = ''
        if use_mpi:
            impi_root = get_software_root('impi')
            if impi_root:
                # put wrappers for Intel MPI compiler wrappers in place
                # (required to make sure license server and I_MPI_ROOT are found)
                for compiler in (os.getenv('MPICC'), os.getenv('MPICXX')):
                    self.write_wrapper(wrapper_dir, compiler,
                                       os.getenv('I_MPI_ROOT'))
                use_wrapper = True
                # set correct value for MPI_HOME
                mpi_home = os.path.join(impi_root, 'intel64')
            else:
                self.log.debug("MPI module name: %s",
                               self.toolchain.MPI_MODULE_NAME[0])
                mpi_home = get_software_root(self.toolchain.MPI_MODULE_NAME[0])

            self.log.debug("Derived value for MPI_HOME: %s", mpi_home)

        if use_wrapper:
            env.setvar('PATH',
                       os.pathsep.join([wrapper_dir,
                                        os.getenv('PATH')]))

        self.prepare_python()
        self.handle_jemalloc()

        self.verify_system_libs_info()
        self.system_libs_info = self.get_system_libs()

        cuda_root = get_software_root('CUDA')
        cudnn_root = get_software_root('cuDNN')
        opencl_root = get_software_root('OpenCL')
        tensorrt_root = get_software_root('TensorRT')
        nccl_root = get_software_root('NCCL')

        config_env_vars = {
            'CC_OPT_FLAGS': os.getenv('CXXFLAGS'),
            'MPI_HOME': mpi_home,
            'PYTHON_BIN_PATH': self.python_cmd,
            'PYTHON_LIB_PATH': os.path.join(self.installdir, self.pylibdir),
            'TF_CUDA_CLANG': '0',
            'TF_ENABLE_XLA': '0',  # XLA JIT support
            'TF_NEED_CUDA': ('0', '1')[bool(cuda_root)],
            'TF_NEED_GCP': '0',  # Google Cloud Platform
            'TF_NEED_GDR': '0',
            'TF_NEED_HDFS': '0',  # Hadoop File System
            'TF_NEED_JEMALLOC': ('0', '1')[self.cfg['with_jemalloc']],
            'TF_NEED_MPI': ('0', '1')[bool(use_mpi)],
            'TF_NEED_OPENCL': ('0', '1')[bool(opencl_root)],
            'TF_NEED_OPENCL_SYCL': '0',
            'TF_NEED_ROCM': '0',
            'TF_NEED_S3': '0',  # Amazon S3 File System
            'TF_NEED_TENSORRT': '0',
            'TF_NEED_VERBS': '0',
            'TF_NEED_AWS': '0',  # Amazon AWS Platform
            'TF_NEED_KAFKA': '0',  # Amazon Kafka Platform
            'TF_SET_ANDROID_WORKSPACE': '0',
            'TF_DOWNLOAD_CLANG': '0',  # Still experimental in TF 2.1.0
            'TF_SYSTEM_LIBS': ','.join(self.system_libs_info[0]),
        }
        if cuda_root:
            cuda_version = get_software_version('CUDA')
            cuda_maj_min_ver = '.'.join(cuda_version.split('.')[:2])

            # $GCC_HOST_COMPILER_PATH should be set to path of the actual compiler (not the MPI compiler wrapper)
            if use_mpi:
                compiler_path = which(os.getenv('CC_SEQ'))
            else:
                compiler_path = which(os.getenv('CC'))

            # list of CUDA compute capabilities to use can be specifed in two ways (where (2) overrules (1)):
            # (1) in the easyconfig file, via the custom cuda_compute_capabilities;
            # (2) in the EasyBuild configuration, via --cuda-compute-capabilities configuration option;
            ec_cuda_cc = self.cfg['cuda_compute_capabilities']
            cfg_cuda_cc = build_option('cuda_compute_capabilities')
            cuda_cc = cfg_cuda_cc or ec_cuda_cc or []

            if cfg_cuda_cc and ec_cuda_cc:
                warning_msg = "cuda_compute_capabilities specified in easyconfig (%s) are overruled by " % ec_cuda_cc
                warning_msg += "--cuda-compute-capabilities configuration option (%s)" % cfg_cuda_cc
                print_warning(warning_msg)
            elif not cuda_cc:
                warning_msg = "No CUDA compute capabilities specified, so using TensorFlow default "
                warning_msg += "(which may not be optimal for your system).\nYou should use "
                warning_msg += "the --cuda-compute-capabilities configuration option or the cuda_compute_capabilities "
                warning_msg += "easyconfig parameter to specify a list of CUDA compute capabilities to compile with."
                print_warning(warning_msg)

            # TensorFlow 1.12.1 requires compute capability >= 3.5
            # see https://github.com/tensorflow/tensorflow/pull/25767
            if LooseVersion(self.version) >= LooseVersion('1.12.1'):
                faulty_comp_caps = [
                    x for x in cuda_cc if LooseVersion(x) < LooseVersion('3.5')
                ]
                if faulty_comp_caps:
                    error_msg = "TensorFlow >= 1.12.1 requires CUDA compute capabilities >= 3.5, "
                    error_msg += "found one or more older ones: %s"
                    raise EasyBuildError(error_msg,
                                         ', '.join(faulty_comp_caps))

            if cuda_cc:
                self.log.info(
                    "Compiling with specified list of CUDA compute capabilities: %s",
                    ', '.join(cuda_cc))

            config_env_vars.update({
                'CUDA_TOOLKIT_PATH':
                cuda_root,
                'GCC_HOST_COMPILER_PATH':
                compiler_path,
                # This is the binutils bin folder: https://github.com/tensorflow/tensorflow/issues/39263
                'GCC_HOST_COMPILER_PREFIX':
                self.binutils_bin_path,
                'TF_CUDA_COMPUTE_CAPABILITIES':
                ','.join(cuda_cc),
                'TF_CUDA_VERSION':
                cuda_maj_min_ver,
            })

            # for recent TensorFlow versions, $TF_CUDA_PATHS and $TF_CUBLAS_VERSION must also be set
            if LooseVersion(self.version) >= LooseVersion('1.14'):

                # figure out correct major/minor version for CUBLAS from cublas_api.h
                cublas_api_header_glob_pattern = os.path.join(
                    cuda_root, 'targets', '*', 'include', 'cublas_api.h')
                matches = glob.glob(cublas_api_header_glob_pattern)
                if len(matches) == 1:
                    cublas_api_header_path = matches[0]
                    cublas_api_header_txt = read_file(cublas_api_header_path)
                else:
                    raise EasyBuildError(
                        "Failed to isolate path to cublas_api.h: %s", matches)

                cublas_ver_parts = []
                for key in [
                        'CUBLAS_VER_MAJOR', 'CUBLAS_VER_MINOR',
                        'CUBLAS_VER_PATCH'
                ]:
                    regex = re.compile("^#define %s ([0-9]+)" % key, re.M)
                    res = regex.search(cublas_api_header_txt)
                    if res:
                        cublas_ver_parts.append(res.group(1))
                    else:
                        raise EasyBuildError(
                            "Failed to find pattern '%s' in %s", regex.pattern,
                            cublas_api_header_path)

                config_env_vars.update({
                    'TF_CUDA_PATHS':
                    cuda_root,
                    'TF_CUBLAS_VERSION':
                    '.'.join(cublas_ver_parts),
                })

            if cudnn_root:
                cudnn_version = get_software_version('cuDNN')
                cudnn_maj_min_patch_ver = '.'.join(
                    cudnn_version.split('.')[:3])

                config_env_vars.update({
                    'CUDNN_INSTALL_PATH':
                    cudnn_root,
                    'TF_CUDNN_VERSION':
                    cudnn_maj_min_patch_ver,
                })
            else:
                raise EasyBuildError(
                    "TensorFlow has a strict dependency on cuDNN if CUDA is enabled"
                )
            if nccl_root:
                nccl_version = get_software_version('NCCL')
                # Ignore the PKG_REVISION identifier if it exists (i.e., report 2.4.6 for 2.4.6-1 or 2.4.6-2)
                nccl_version = nccl_version.split('-')[0]
                config_env_vars.update({
                    'NCCL_INSTALL_PATH': nccl_root,
                })
            else:
                nccl_version = '1.3'  # Use simple downloadable version
            config_env_vars.update({
                'TF_NCCL_VERSION': nccl_version,
            })
            if tensorrt_root:
                tensorrt_version = get_software_version('TensorRT')
                config_env_vars.update({
                    'TF_NEED_TENSORRT': '1',
                    'TENSORRT_INSTALL_PATH': tensorrt_root,
                    'TF_TENSORRT_VERSION': tensorrt_version,
                })

        for (key, val) in sorted(config_env_vars.items()):
            env.setvar(key, val)

        # patch configure.py (called by configure script) to avoid that Bazel abuses $HOME/.cache/bazel
        regex_subs = [(r"(run_shell\(\['bazel')",
                       r"\1, '--output_base=%s', '--install_base=%s'" %
                       (tmpdir, os.path.join(tmpdir, 'inst_base')))]
        apply_regex_substitutions('configure.py', regex_subs)

        # Tell Bazel to not use $HOME/.cache/bazel at all
        # See https://docs.bazel.build/versions/master/output_directories.html
        env.setvar('TEST_TMPDIR', os.path.join(tmpdir, 'output_root'))
        cmd = self.cfg['preconfigopts'] + './configure ' + self.cfg[
            'configopts']
        run_cmd(cmd, log_all=True, simple=True)
Ejemplo n.º 55
0
    def build_step(self):
        """Custom build procedure for TensorFlow."""

        # pre-create target installation directory
        mkdir(os.path.join(self.installdir, self.pylibdir), parents=True)

        inc_paths, lib_paths = [], []

        gcc_root = get_software_root('GCCcore') or get_software_root('GCC')
        if gcc_root:
            gcc_lib64 = os.path.join(gcc_root, 'lib64')
            lib_paths.append(gcc_lib64)

            gcc_ver = get_software_version('GCCcore') or get_software_version(
                'GCC')

            # figure out location of GCC include files
            res = glob.glob(
                os.path.join(gcc_root, 'lib', 'gcc', '*', gcc_ver, 'include'))
            if res and len(res) == 1:
                gcc_lib_inc = res[0]
                inc_paths.append(gcc_lib_inc)
            else:
                raise EasyBuildError(
                    "Failed to pinpoint location of GCC include files: %s",
                    res)

            # make sure include-fixed directory is where we expect it to be
            gcc_lib_inc_fixed = os.path.join(os.path.dirname(gcc_lib_inc),
                                             'include-fixed')
            if os.path.exists(gcc_lib_inc_fixed):
                inc_paths.append(gcc_lib_inc_fixed)
            else:
                self.log.info(
                    "Derived directory %s does not exist, so discarding it",
                    gcc_lib_inc_fixed)

            # also check on location of include/c++/<gcc version> directory
            gcc_cplusplus_inc = os.path.join(gcc_root, 'include', 'c++',
                                             gcc_ver)
            if os.path.exists(gcc_cplusplus_inc):
                inc_paths.append(gcc_cplusplus_inc)
            else:
                raise EasyBuildError("Derived directory %s does not exist",
                                     gcc_cplusplus_inc)
        else:
            raise EasyBuildError(
                "Failed to determine installation prefix for GCC")

        cuda_root = get_software_root('CUDA')
        if cuda_root:
            inc_paths.append(os.path.join(cuda_root, 'include'))
            lib_paths.append(os.path.join(cuda_root, 'lib64'))

        # fix hardcoded locations of compilers & tools
        cxx_inc_dirs = [
            'cxx_builtin_include_directory: "%s"' % resolve_path(p)
            for p in inc_paths
        ]
        cxx_inc_dirs += [
            'cxx_builtin_include_directory: "%s"' % p for p in inc_paths
        ]
        regex_subs = [
            (r'-B/usr/bin/', '-B%s %s' %
             (self.binutils_bin_path, ' '.join('-L%s/' % p
                                               for p in lib_paths))),
            (r'(cxx_builtin_include_directory:).*', ''),
            (r'^toolchain {', 'toolchain {\n' + '\n'.join(cxx_inc_dirs)),
        ]
        for tool in [
                'ar', 'cpp', 'dwp', 'gcc', 'gcov', 'ld', 'nm', 'objcopy',
                'objdump', 'strip'
        ]:
            path = which(tool)
            if path:
                regex_subs.append((os.path.join('/usr', 'bin', tool), path))
            else:
                raise EasyBuildError("Failed to determine path to '%s'", tool)

        # -fPIE/-pie and -fPIC are not compatible, so patch out hardcoded occurences of -fPIE/-pie if -fPIC is used
        if self.toolchain.options.get('pic', None):
            regex_subs.extend([('-fPIE', '-fPIC'), ('"-pie"', '"-fPIC"')])

        # patch all CROSSTOOL* scripts to fix hardcoding of locations of binutils/GCC binaries
        for path, dirnames, filenames in os.walk(os.getcwd()):
            for filename in filenames:
                if filename.startswith('CROSSTOOL'):
                    full_path = os.path.join(path, filename)
                    self.log.info("Patching %s", full_path)
                    apply_regex_substitutions(full_path, regex_subs)

        tmpdir = tempfile.mkdtemp(suffix='-bazel-build')
        user_root_tmpdir = tempfile.mkdtemp(suffix='-user_root')

        # compose "bazel build" command with all its options...
        cmd = [
            self.cfg['prebuildopts'], 'bazel',
            '--output_base=%s' % tmpdir,
            '--install_base=%s' % os.path.join(tmpdir, 'inst_base'),
            '--output_user_root=%s' % user_root_tmpdir, 'build'
        ]

        # build with optimization enabled
        # cfr. https://docs.bazel.build/versions/master/user-manual.html#flag--compilation_mode
        cmd.append('--compilation_mode=opt')

        # select 'opt' config section (this is *not* the same as --compilation_mode=opt!)
        # https://docs.bazel.build/versions/master/user-manual.html#flag--config
        cmd.append('--config=opt')

        # make Bazel print full command line + make it verbose on failures
        # https://docs.bazel.build/versions/master/user-manual.html#flag--subcommands
        # https://docs.bazel.build/versions/master/user-manual.html#flag--verbose_failures
        cmd.extend(['--subcommands', '--verbose_failures'])

        # Disable support of AWS platform via config switch introduced in 1.12.1
        if LooseVersion(self.version) >= LooseVersion('1.12.1'):
            cmd.append('--config=noaws')

        # Bazel seems to not be able to handle a large amount of parallel jobs, e.g. 176 on some Power machines,
        # and will hang forever building the TensorFlow package.
        # So limit to something high but still reasonable while allowing ECs to overwrite it
        parallel = self.cfg['parallel']
        if self.cfg['maxparallel'] is None:
            parallel = min(parallel, 64)
        cmd.append('--jobs=%s' % parallel)

        if self.toolchain.options.get('pic', None):
            cmd.append('--copt="-fPIC"')

        # include install location of Python packages in $PYTHONPATH,
        # and specify that value of $PYTHONPATH should be passed down into Bazel build environment;
        # this is required to make sure that Python packages included as extensions are found at build time;
        # see also https://github.com/tensorflow/tensorflow/issues/22395
        pythonpath = os.getenv('PYTHONPATH', '')
        env.setvar(
            'PYTHONPATH',
            os.pathsep.join(
                [os.path.join(self.installdir, self.pylibdir), pythonpath]))

        # Make TF find our modules. LD_LIBRARY_PATH gets automatically added by configure.py
        cpaths, libpaths = self.system_libs_info[1:]
        if cpaths:
            cmd.append("--action_env=CPATH='%s'" % ':'.join(cpaths))
        if libpaths:
            cmd.append("--action_env=LIBRARY_PATH='%s'" % ':'.join(libpaths))
        cmd.append('--action_env=PYTHONPATH')
        # Also export $EBPYTHONPREFIXES to handle the multi-deps python setup
        # See https://github.com/easybuilders/easybuild-easyblocks/pull/1664
        if 'EBPYTHONPREFIXES' in os.environ:
            cmd.append('--action_env=EBPYTHONPREFIXES')

        # Ignore user environment for Python
        cmd.append('--action_env=PYTHONNOUSERSITE=1')

        # use same configuration for both host and target programs, which can speed up the build
        # only done when optarch is enabled, since this implicitely assumes that host and target platform are the same
        # see https://docs.bazel.build/versions/master/guide.html#configurations
        if self.toolchain.options.get('optarch'):
            cmd.append('--distinct_host_configuration=false')

        cmd.append(self.cfg['buildopts'])

        # TF 2 (final) sets this in configure
        if LooseVersion(self.version) < LooseVersion('2.0'):
            if cuda_root:
                cmd.append('--config=cuda')

        # if mkl-dnn is listed as a dependency it is used. Otherwise downloaded if with_mkl_dnn is true
        mkl_root = get_software_root('mkl-dnn')
        if mkl_root:
            cmd.extend(['--config=mkl'])
            cmd.insert(0, "export TF_MKL_DOWNLOAD=0 &&")
            cmd.insert(0, "export TF_MKL_ROOT=%s &&" % mkl_root)
        elif self.cfg['with_mkl_dnn']:
            # this makes TensorFlow use mkl-dnn (cfr. https://github.com/01org/mkl-dnn)
            cmd.extend(['--config=mkl'])
            cmd.insert(0, "export TF_MKL_DOWNLOAD=1 && ")

        # specify target of the build command as last argument
        cmd.append('//tensorflow/tools/pip_package:build_pip_package')

        run_cmd(' '.join(cmd), log_all=True, simple=True, log_ok=True)

        # run generated 'build_pip_package' script to build the .whl
        cmd = "bazel-bin/tensorflow/tools/pip_package/build_pip_package %s" % self.builddir
        run_cmd(cmd, log_all=True, simple=True, log_ok=True)
Ejemplo n.º 56
0
    def install_step(self):
        """Custom install procedure for TensorFlow."""

        # avoid that pip (ab)uses $HOME/.cache/pip
        # cfr. https://pip.pypa.io/en/stable/reference/pip_install/#caching
        env.setvar('XDG_CACHE_HOME', tempfile.gettempdir())
        self.log.info("Using %s as pip cache directory",
                      os.environ['XDG_CACHE_HOME'])

        # find .whl file that was built, and install it using 'pip install'
        if ("-rc" in self.version):
            whl_version = self.version.replace("-rc", "rc")
        else:
            whl_version = self.version

        whl_paths = glob.glob(
            os.path.join(self.builddir, 'tensorflow-%s-*.whl' % whl_version))
        if not whl_paths:
            whl_paths = glob.glob(
                os.path.join(self.builddir, 'tensorflow-*.whl'))
        if len(whl_paths) == 1:
            # --ignore-installed is required to ensure *this* wheel is installed
            cmd = "pip install --ignore-installed --prefix=%s %s" % (
                self.installdir, whl_paths[0])

            # if extensions are listed, assume they will provide all required dependencies,
            # so use --no-deps to prevent pip from downloading & installing them
            if self.cfg['exts_list']:
                cmd += ' --no-deps'

            run_cmd(cmd, log_all=True, simple=True, log_ok=True)
        else:
            raise EasyBuildError("Failed to isolate built .whl in %s: %s",
                                 whl_paths, self.builddir)

        # Fix for https://github.com/tensorflow/tensorflow/issues/6341 on Python < 3.3
        # If the site-packages/google/__init__.py file is missing, make it an empty file.
        # This fixes the "No module named google.protobuf" error that sometimes shows up during sanity_check
        # For Python >= 3.3 the logic is reversed: The __init__.py must not exist.
        # See e.g. http://python-notes.curiousefficiency.org/en/latest/python_concepts/import_traps.html
        google_protobuf_dir = os.path.join(self.installdir, self.pylibdir,
                                           'google', 'protobuf')
        google_init_file = os.path.join(self.installdir, self.pylibdir,
                                        'google', '__init__.py')
        if LooseVersion(det_python_version(
                self.python_cmd)) < LooseVersion('3.3'):
            if os.path.isdir(
                    google_protobuf_dir) and not is_readable(google_init_file):
                self.log.debug("Creating (empty) missing %s", google_init_file)
                write_file(google_init_file, '')
        else:
            if os.path.exists(google_init_file):
                self.log.debug("Removing %s for Python >= 3.3",
                               google_init_file)
                remove_file(google_init_file)

        # Fix cuda header paths
        # This is needed for building custom TensorFlow ops
        if LooseVersion(self.version) < LooseVersion('1.14'):
            pyshortver = '.'.join(
                get_software_version('Python').split('.')[:2])
            regex_subs = [(r'#include "cuda/include/', r'#include "')]
            base_path = os.path.join(self.installdir, 'lib',
                                     'python%s' % pyshortver, 'site-packages',
                                     'tensorflow', 'include', 'tensorflow')
            for header in glob.glob(
                    os.path.join(base_path, 'stream_executor', 'cuda',
                                 'cuda*.h')) + glob.glob(
                                     os.path.join(base_path, 'core', 'util',
                                                  'cuda*.h')):
                apply_regex_substitutions(header, regex_subs)
Ejemplo n.º 57
0
    def configure_step(self):
        """Configure OpenFOAM build by setting appropriate environment variables."""
        # compiler & compiler flags
        comp_fam = self.toolchain.comp_family()

        extra_flags = ''
        if comp_fam == toolchain.GCC:  # @UndefinedVariable
            self.wm_compiler = 'Gcc'
            if get_software_version('GCC') >= LooseVersion('4.8'):
                # make sure non-gold version of ld is used, since OpenFOAM requires it
                # see http://www.openfoam.org/mantisbt/view.php?id=685
                extra_flags = '-fuse-ld=bfd'

            # older versions of OpenFOAM-Extend require -fpermissive
            if 'extend' in self.name.lower() and LooseVersion(self.version) < LooseVersion('2.0'):
                extra_flags += ' -fpermissive'

        elif comp_fam == toolchain.INTELCOMP:  # @UndefinedVariable
            self.wm_compiler = 'Icc'

            # make sure -no-prec-div is used with Intel compilers
            extra_flags = '-no-prec-div'

        else:
            raise EasyBuildError("Unknown compiler family, don't know how to set WM_COMPILER")

        for env_var in ['CFLAGS', 'CXXFLAGS']:
            env.setvar(env_var, "%s %s" % (os.environ.get(env_var, ''), extra_flags))

        # patch out hardcoding of WM_* environment variables
        # for example, replace 'export WM_COMPILER=Gcc' with ': ${WM_COMPILER:=Gcc}; export WM_COMPILER'
        for script in [os.path.join(self.builddir, self.openfoamdir, x) for x in ['etc/bashrc', 'etc/cshrc']]:
            self.log.debug("Patching out hardcoded $WM_* env vars in %s", script)
            # disable any third party stuff, we use EB controlled builds
            regex_subs = [(r"^(setenv|export) WM_THIRD_PARTY_USE_.*[ =].*$", r"# \g<0>")]
            WM_env_var = ['WM_COMPILER', 'WM_MPLIB', 'WM_THIRD_PARTY_DIR']
            # OpenFOAM >= 3.0.0 can use 64 bit integers
            if 'extend' not in self.name.lower() and LooseVersion(self.version) >= LooseVersion('3.0'):
                WM_env_var.append('WM_LABEL_SIZE')
            for env_var in WM_env_var:
                regex_subs.append((r"^(setenv|export) (?P<var>%s)[ =](?P<val>.*)$" % env_var,
                                   r": ${\g<var>:=\g<val>}; export \g<var>"))

            apply_regex_substitutions(script, regex_subs)

        # inject compiler variables into wmake/rules files
        ldirs = glob.glob(os.path.join(self.builddir, self.openfoamdir, 'wmake', 'rules', 'linux*'))
        langs = ['c', 'c++']
        suffixes = ['', 'Opt']
        wmake_rules_files = [os.path.join(ldir, lang + suff) for ldir in ldirs for lang in langs for suff in suffixes]

        mpicc = os.environ['MPICC']
        mpicxx = os.environ['MPICXX']
        cc_seq = os.environ.get('CC_SEQ', os.environ['CC'])
        cxx_seq = os.environ.get('CXX_SEQ', os.environ['CXX'])

        if self.toolchain.mpi_family() == toolchain.OPENMPI:
            # no -cc/-cxx flags supported in OpenMPI compiler wrappers
            c_comp_cmd = 'OMPI_CC="%s" %s' % (cc_seq, mpicc)
            cxx_comp_cmd = 'OMPI_CXX="%s" %s' % (cxx_seq, mpicxx)
        else:
            # -cc/-cxx should work for all MPICH-based MPIs (including Intel MPI)
            c_comp_cmd = '%s -cc="%s"' % (mpicc, cc_seq)
            cxx_comp_cmd = '%s -cxx="%s"' % (mpicxx, cxx_seq)

        comp_vars = {
            # specify MPI compiler wrappers and compiler commands + sequential compiler that should be used by them
            'cc': c_comp_cmd,
            'CC': cxx_comp_cmd,
            'cOPT': os.environ['CFLAGS'],
            'c++OPT': os.environ['CXXFLAGS'],
        }
        for wmake_rules_file in wmake_rules_files:
            fullpath = os.path.join(self.builddir, self.openfoamdir, wmake_rules_file)
            self.log.debug("Patching compiler variables in %s", fullpath)
            regex_subs = []
            for comp_var, newval in comp_vars.items():
                regex_subs.append((r"^(%s\s*=\s*).*$" % re.escape(comp_var), r"\1%s" % newval))
            apply_regex_substitutions(fullpath, regex_subs)

        # enable verbose build for debug purposes
        # starting with openfoam-extend 3.2, PS1 also needs to be set
        env.setvar("FOAM_VERBOSE", '1')

        # installation directory
        env.setvar("FOAM_INST_DIR", self.installdir)

        # third party directory
        self.thrdpartydir = "ThirdParty-%s" % self.version
        # only if third party stuff is actually installed
        if os.path.exists(self.thrdpartydir):
            os.symlink(os.path.join("..", self.thrdpartydir), self.thrdpartydir)
            env.setvar("WM_THIRD_PARTY_DIR", os.path.join(self.installdir, self.thrdpartydir))

        env.setvar("WM_COMPILER", self.wm_compiler)

        # set to an MPI unknown by OpenFOAM, since we're handling the MPI settings ourselves (via mpicc, etc.)
        # Note: this name must contain 'MPI' so the MPI version of the Pstream library is built (cf src/Pstream/Allwmake)
        self.wm_mplib = "EASYBUILDMPI"
        env.setvar("WM_MPLIB", self.wm_mplib)

        # parallel build spec
        env.setvar("WM_NCOMPPROCS", str(self.cfg['parallel']))

        # OpenFOAM >= 3.0.0 can use 64 bit integers
        if 'extend' not in self.name.lower() and LooseVersion(self.version) >= LooseVersion('3.0'):
            if self.toolchain.options['i8']:
                env.setvar("WM_LABEL_SIZE", '64')
            else:
                env.setvar("WM_LABEL_SIZE", '32')

        # make sure lib/include dirs for dependencies are found
        openfoam_extend_v3 = 'extend' in self.name.lower() and LooseVersion(self.version) >= LooseVersion('3.0')
        if LooseVersion(self.version) < LooseVersion("2") or openfoam_extend_v3:
            self.log.debug("List of deps: %s" % self.cfg.dependencies())
            for dep in self.cfg.dependencies():
                dep_name = dep['name'].upper(),
                dep_root = get_software_root(dep['name'])
                env.setvar("%s_SYSTEM" % dep_name, "1")
                dep_vars = {
                    "%s_DIR": "%s",
                    "%s_BIN_DIR": "%s/bin",
                    "%s_LIB_DIR": "%s/lib",
                    "%s_INCLUDE_DIR": "%s/include",
                }
                for var, val in dep_vars.iteritems():
                    env.setvar(var % dep_name, val % dep_root)
        else:
            for depend in ['SCOTCH', 'METIS', 'CGAL', 'Paraview']:
                dependloc = get_software_root(depend)
                if dependloc:
                    if depend == 'CGAL' and get_software_root('Boost'):
                        env.setvar("CGAL_ROOT", dependloc)
                        env.setvar("BOOST_ROOT", get_software_root('Boost'))
                    else:
                        env.setvar("%s_ROOT" % depend.upper(), dependloc)
Ejemplo n.º 58
0
    def configure_step(self):
        """Custom configuration procedure for WRF-Fire."""

        comp_fam = self.toolchain.comp_family()

        # define $NETCDF* for netCDF dependency
        netcdf_fortran = get_software_root('netCDF-Fortran')
        if netcdf_fortran:
            env.setvar('NETCDF', netcdf_fortran)
        else:
            raise EasyBuildError("Required dependendy netCDF-Fortran is missing")

        # define $PHDF5 for parallel HDF5 dependency
        hdf5 = get_software_root('HDF5')
        if hdf5 and os.path.exists(os.path.join(hdf5, 'bin', 'h5pcc')):
            env.setvar('PHDF5', hdf5)

        # first, configure WRF part
        change_dir(os.path.join(self.cfg['start_dir'], 'WRFV3'))

        # instruct WRF-Fire to create netCDF v4 output files
        env.setvar('WRFIO_NETCDF4_FILE_SUPPORT', '1')

        # patch arch/Config_new.pl script, so that run_cmd_qa receives all output to answer questions
        patch_perl_script_autoflush(os.path.join('arch', 'Config_new.pl'))

        # determine build type option to look for
        known_build_type_options = {
            toolchain.INTELCOMP: "Linux x86_64 i486 i586 i686, ifort compiler with icc",
            toolchain.GCC: "x86_64 Linux, gfortran compiler with gcc",
            toolchain.PGI: "Linux x86_64, PGI compiler with pgcc",
        }
        build_type_option = known_build_type_options.get(comp_fam)
        if build_type_option is None:
            raise EasyBuildError("Don't know which WPS configure option to select for compiler family %s", comp_fam)

        build_type_question = "\s*(?P<nr>[0-9]+).\s*%s\s*\(%s\)" % (build_type_option, self.cfg['buildtype'])
        qa = {
            "Compile for nesting? (1=basic, 2=preset moves, 3=vortex following) [default 1]:": '1',
        }
        std_qa = {
            # named group in match will be used to construct answer
            r"%s.*\n(.*\n)*Enter selection\s*\[[0-9]+-[0-9]+\]\s*:" % build_type_question: '%(nr)s',
        }
        run_cmd_qa('./configure', qa, std_qa=std_qa, log_all=True, simple=True)

        cpp_flag = None
        if comp_fam == toolchain.INTELCOMP:
            cpp_flag = '-fpp'
        elif comp_fam == toolchain.GCC:
            cpp_flag = '-cpp'
        else:
            raise EasyBuildError("Don't know which flag to use to specify that Fortran files were preprocessed")

        # patch configure.wrf to get things right
        comps = {
            'CFLAGS_LOCAL': os.getenv('CFLAGS'),
            'DM_FC': os.getenv('MPIF90'),
            'DM_CC': "%s -DMPI2_SUPPORT" % os.getenv('MPICC'),
            'FCOPTIM': os.getenv('FFLAGS'),
            # specify that Fortran files have been preprocessed with cpp,
            # see http://forum.wrfforum.com/viewtopic.php?f=5&t=6086
            'FORMAT_FIXED': "-FI %s" % cpp_flag,
            'FORMAT_FREE': "-FR %s" % cpp_flag,
        }
        regex_subs = [(r"^(%s\s*=\s*).*$" % k, r"\1 %s" % v) for (k, v) in comps.items()]
        apply_regex_substitutions('configure.wrf', regex_subs)

        # also configure WPS part
        change_dir(os.path.join(self.cfg['start_dir'], 'WPS'))

        # patch arch/Config_new.pl script, so that run_cmd_qa receives all output to answer questions
        patch_perl_script_autoflush(os.path.join('arch', 'Config.pl'))

        # determine build type option to look for
        known_build_type_options = {
            toolchain.INTELCOMP: "PC Linux x86_64, Intel compiler",
            toolchain.GCC: "PC Linux x86_64, g95 compiler",
            toolchain.PGI: "PC Linux x86_64 (IA64 and Opteron), PGI compiler 5.2 or higher",
        }
        build_type_option = known_build_type_options.get(comp_fam)
        if build_type_option is None:
            raise EasyBuildError("Don't know which WPS configure option to select for compiler family %s", comp_fam)

        known_wps_build_types = {
            'dmpar': 'DM parallel',
            'smpar': 'serial',
        }
        wps_build_type = known_wps_build_types.get(self.cfg['buildtype'])
        if wps_build_type is None:
            raise EasyBuildError("Don't know which WPS build type to pick for '%s'", self.cfg['builddtype'])

        build_type_question = "\s*(?P<nr>[0-9]+).\s*%s.*%s(?!NO GRIB2)" % (build_type_option, wps_build_type)
        std_qa = {
            # named group in match will be used to construct answer
            r"%s.*\n(.*\n)*Enter selection\s*\[[0-9]+-[0-9]+\]\s*:" % build_type_question: '%(nr)s',
        }
        run_cmd_qa('./configure', {}, std_qa=std_qa, log_all=True, simple=True)

        # patch configure.wps to get things right
        comps = {
            'CC': '%s %s' % (os.getenv('MPICC'), os.getenv('CFLAGS')),
            'FC': '%s %s' % (os.getenv('MPIF90'), os.getenv('F90FLAGS'))
        }
        regex_subs = [(r"^(%s\s*=\s*).*$" % k, r"\1 %s" % v) for (k, v) in comps.items()]
        # specify that Fortran90 files have been preprocessed with cpp
        regex_subs.extend([
            (r"^(F77FLAGS\s*=\s*)", r"\1 %s " % cpp_flag),
            (r"^(FFLAGS\s*=\s*)", r"\1 %s " % cpp_flag),
        ])
        apply_regex_substitutions('configure.wps', regex_subs)
    def configure_step(self):
        """Custom configuration procedure for Quantum ESPRESSO."""

        if self.toolchain.options.get('openmp', False) or self.cfg['hybrid']:
            self.cfg.update('configopts', '--enable-openmp')

        if not self.toolchain.options.get('usempi', None):
            self.cfg.update('configopts', '--disable-parallel')

        if not self.cfg['with_scalapack']:
            self.cfg.update('configopts', '--without-scalapack')

        repls = []

        if self.toolchain.comp_family() in [toolchain.INTELCOMP]:
            # set preprocessor command (-E to stop after preprocessing, -C to preserve comments)
            cpp = "%s -E -C" % os.getenv('CC')
            repls.append(('CPP', cpp, False))
            env.setvar('CPP', cpp)

            # also define $FCCPP, but do *not* include -C (comments should not be preserved when preprocessing Fortran)
            env.setvar('FCCPP', "%s -E" % os.getenv('CC'))

        super(EB_QuantumESPRESSO, self).configure_step()

        # compose list of DFLAGS (flag, value, keep_stuff)
        # for guidelines, see include/defs.h.README in sources
        dflags = []

        comp_fam_dflags = {
            toolchain.INTELCOMP: '-D__INTEL',
            toolchain.GCC: '-D__GFORTRAN -D__STD_F95',
        }
        dflags.append(comp_fam_dflags[self.toolchain.comp_family()])

        if self.toolchain.options.get('openmp', False):
            libfft = os.getenv('LIBFFT_MT')
        else:
            libfft = os.getenv('LIBFFT')
        if libfft:
            if "fftw3" in libfft:
                dflags.append('-D__FFTW3')
            else:
                dflags.append('-D__FFTW')
            env.setvar('FFTW_LIBS', libfft)

        if get_software_root('ACML'):
            dflags.append('-D__ACML')

        if self.toolchain.options.get('usempi', None):
            dflags.append('-D__MPI -D__PARA')

        if self.toolchain.options.get('openmp', False) or self.cfg['hybrid']:
            dflags.append(" -D__OPENMP")

        if self.cfg['with_scalapack']:
            dflags.append(" -D__SCALAPACK")

        # always include -w to supress warnings
        dflags.append('-w')

        repls.append(('DFLAGS', ' '.join(dflags), False))

        # complete C/Fortran compiler and LD flags
        if self.toolchain.options.get('openmp', False) or self.cfg['hybrid']:
            repls.append(('LDFLAGS', self.toolchain.get_flag('openmp'), True))
            repls.append(('(?:C|F90|F)FLAGS', self.toolchain.get_flag('openmp'), True))

        # obtain library settings
        libs = []
        for lib in ['BLAS', 'LAPACK', 'FFT', 'SCALAPACK']:
            if self.toolchain.options.get('openmp', False):
                val = os.getenv('LIB%s_MT' % lib)
            else:
                val = os.getenv('LIB%s' % lib)
            repls.append(('%s_LIBS' % lib, val, False))
            libs.append(val)
        libs = ' '.join(libs)

        repls.append(('BLAS_LIBS_SWITCH', 'external', False))
        repls.append(('LAPACK_LIBS_SWITCH', 'external', False))
        repls.append(('LD_LIBS', os.getenv('LIBS'), False))

        # check for external FoX
        if get_software_root('FoX'):
            self.log.debug("Found FoX external module, disabling libfox target in Makefile")
            regex_subs = [(r"(libfox: touch-dummy)\n.*", r"\1\n\techo 'libfox: external module used' #")]
            apply_regex_substitutions('Makefile', regex_subs)
            # Make configure and make look in the correct directory
            regex_subs = [(r"\(TOPDIR\)/FoX", r"(EBROOTFOX)")]
            apply_regex_substitutions('install/configure', regex_subs)
            apply_regex_substitutions('make.inc', regex_subs)

        self.log.debug("List of replacements to perform: %s" % repls)

        if LooseVersion(self.version) >= LooseVersion("6"):
            make_ext = '.inc'
        else:
            make_ext = '.sys'

        # patch make.sys file
        fn = os.path.join(self.cfg['start_dir'], 'make' + make_ext)
        try:
            for line in fileinput.input(fn, inplace=1, backup='.orig.eb'):
                for (k, v, keep) in repls:
                    # need to use [ \t]* instead of \s*, because vars may be undefined as empty,
                    # and we don't want to include newlines
                    if keep:
                        line = re.sub(r"^(%s\s*=[ \t]*)(.*)$" % k, r"\1\2 %s" % v, line)
                    else:
                        line = re.sub(r"^(%s\s*=[ \t]*).*$" % k, r"\1%s" % v, line)

                # fix preprocessing directives for .f90 files in make.sys if required
                if self.toolchain.comp_family() in [toolchain.GCC]:
                    line = re.sub(r"\$\(MPIF90\) \$\(F90FLAGS\) -c \$<",
                                  "$(CPP) -C $(CPPFLAGS) $< -o $*.F90\n" +
                                  "\t$(MPIF90) $(F90FLAGS) -c $*.F90 -o $*.o",
                                  line)

                sys.stdout.write(line)
        except IOError, err:
            raise EasyBuildError("Failed to patch %s: %s", fn, err)
Ejemplo n.º 60
0
    def configure_step(self):
        """Custom configuration procedure for Xmipp."""

        # Tell xmipp config that there is no Scipion.
        env.setvar('XMIPP_NOSCIPION', 'True')
        # Initialize the config file and then patch it with the correct values
        cmd = ' '.join([
            self.cfg['preconfigopts'],
            self.xmipp_exe,
            'config',
            self.cfg['configopts'],
        ])
        run_cmd(cmd, log_all=True, simple=True)

        # Parameters to be set in the config file
        params = {
            'CC': os.environ['CC'],
            'CXX': os.environ['CXX'],
            'LINKERFORPROGRAMS': os.environ['CXX'],
            'LIBDIRFLAGS': '',
            # I don't think Xmipp can actually build without MPI.
            'MPI_CC': os.environ.get('MPICC', 'UNKNOWN'),
            'MPI_CXX': os.environ.get('MPICXX', 'UNKNOWN'),
            'MPI_LINKERFORPROGRAMS': os.environ.get('MPICXX', 'UNKNOWN'),
        }

        if LooseVersion(self.version) >= LooseVersion('3.20.07'):
            # Define include dirs with INCDIRFLAGS (Xmipp does not use CPPPATH directly)
            incdirflags = os.getenv('INCDIRFLAGS', '')
            incdirflags += '-I../ -I ' + ' -I'.join(
                os.environ['CPATH'].split(':'))
            opencv_root = get_software_root('OpenCV')
            if opencv_root:
                incdirflags += ' -I' + opencv_root + '/include/opencv4'
            # env.setvar('INCDIRFLAGS', incdirflags)
            params['INCDIRFLAGS'] = incdirflags

        deps = [
            # Name of dependency, name of env var to set, required or not
            ('HDF5', None, True),
            ('SQLite', None, True),
            ('LibTIFF', None, True),
            ('libjpeg-turbo', None, True),
            ('Java', 'JAVA_HOME', True),
            ('MATLAB', 'MATLAB_DIR', False),
        ]

        cuda_root = get_software_root('CUDA')
        if cuda_root:
            params.update({'CUDA_BIN': os.path.join(cuda_root, 'bin')})
            params.update({'CUDA_LIB': os.path.join(cuda_root, 'lib64')})
            params.update({'NVCC': os.environ['CUDA_CXX']})
            # Their default for NVCC is to use g++-5, fix that
            nvcc_flags = '-v --x cu -D_FORCE_INLINES -Xcompiler -fPIC -Wno-deprecated-gpu-targets -std=c++11'
            if LooseVersion(self.version) >= LooseVersion('3.20.07'):
                nvcc_flags += ' --extended-lambda'
            params.update({'NVCC_CXXFLAGS': nvcc_flags})
            self.use_cuda = True

            # Make sure cuFFTAdvisor is available even if unpacked under
            # a different name
            if not os.path.isdir('cuFFTAdvisor'):
                matches = glob.glob(os.path.join(self.srcdir,
                                                 'cuFFTAdvisor-*'))
                if len(matches) == 1:
                    cufft = os.path.basename(matches[0])
                    symlink(cufft,
                            os.path.join(self.srcdir, 'cuFFTAdvisor'),
                            use_abspath_source=False)
                    if LooseVersion(self.version) >= LooseVersion('3.20.07'):
                        symlink(
                            os.path.join(self.srcdir, cufft),
                            os.path.join(self.srcdir, 'xmipp', 'external',
                                         'cuFFTAdvisor'))
                else:
                    raise EasyBuildError(
                        "Failed to isolate path to cuFFTAdvisor-*: %s",
                        matches)

        for dep in ['CUDA', 'MATLAB']:
            use_dep = bool(get_software_root(dep))
            params.update({dep: use_dep})

        if get_software_root('OpenCV'):
            params.update({'OPENCV': True})
            if self.use_cuda:
                params.update({'OPENCVSUPPORTSCUDA': True})
            if LooseVersion(
                    get_software_version('OpenCV')) >= LooseVersion('3'):
                params.update({'OPENCV3': True})

        missing_deps = []
        for dep, var, required in deps:
            root = get_software_root(dep)
            if root:
                if var:
                    params.update({var: root})
            elif required:
                missing_deps.append(dep)

        if missing_deps:
            raise EasyBuildError(
                "One or more required dependencies not available: %s",
                ', '.join(missing_deps))

        # Here we know that Java exists since it is a required dependency
        jnipath = os.path.join(get_software_root('Java'), 'include')
        jnipath = os.pathsep.join([jnipath, os.path.join(jnipath, 'linux')])
        params.update({'JNI_CPPPATH': jnipath})

        regex_subs = []

        # Set the variables in the config file to match the environment from EasyBuild
        for (key, val) in params.items():
            regex_subs.extend([(r'^%s\s*=.*$' % key, r'%s = %s' % (key, val))])

        apply_regex_substitutions(self.cfgfile, regex_subs)

        self.setup_xmipp_env()

        py_ver = get_software_version('Python')
        py_maj_ver = LooseVersion(py_ver).version[0]
        py_min_ver = LooseVersion(py_ver).version[1]
        pyver = 'python%s.%s' % (py_maj_ver, py_min_ver)
        pyincpath = os.path.join(get_software_root('Python'), 'include', pyver)
        # Temp workaround for missing include/pythonx.y in CPATH
        env.setvar('CPATH', os.pathsep.join([os.environ['CPATH'], pyincpath]))

        super(EB_Xmipp, self).configure_step()