def set_as_default(self, module_folder_path, module_version): """ Create a symlink named 'default' inside the package's module folder in order to set the default module version :param module_folder_path: module folder path, e.g. $HOME/easybuild/modules/all/Bison :param module_version: module version, e.g. 3.0.4 """ default_filepath = os.path.join(module_folder_path, 'default') if os.path.islink(default_filepath): link_target = resolve_path(default_filepath) remove_file(default_filepath) self.log.info("Removed default version marking from %s.", link_target) elif os.path.exists(default_filepath): raise EasyBuildError('Found an unexpected file named default in dir %s' % module_folder_path) symlink(module_version + self.MODULE_FILE_EXTENSION, default_filepath, use_abspath_source=False) self.log.info("Module default version file written to point to %s", default_filepath)
def get_paths_for(subdir=EASYCONFIGS_PKG_SUBDIR, robot_path=None): """ Return a list of absolute paths where the specified subdir can be found, determined by the PYTHONPATH """ paths = [] # primary search path is robot path path_list = [] if isinstance(robot_path, list): path_list = robot_path[:] elif robot_path is not None: path_list = [robot_path] # consider Python search path, e.g. setuptools install path for easyconfigs path_list.extend(sys.path) # figure out installation prefix, e.g. distutils install path for easyconfigs eb_path = which('eb') if eb_path is None: _log.warning( "'eb' not found in $PATH, failed to determine installation prefix") else: # real location to 'eb' should be <install_prefix>/bin/eb eb_path = resolve_path(eb_path) install_prefix = os.path.dirname(os.path.dirname(eb_path)) path_list.append(install_prefix) _log.debug("Also considering installation prefix %s..." % install_prefix) # look for desired subdirs for path in path_list: path = os.path.join(path, "easybuild", subdir) _log.debug("Checking for easybuild/%s at %s" % (subdir, path)) try: if os.path.exists(path): paths.append(os.path.abspath(path)) _log.debug("Added %s to list of paths for easybuild/%s" % (path, subdir)) except OSError as err: raise EasyBuildError(str(err)) return paths
def extract_compiler_version(compiler_name): """Extract compiler version for provided compiler_name.""" # look for 3-4 digit version number, surrounded by spaces # examples: # gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11) # Intel(R) C Intel(R) 64 Compiler XE for applications running on Intel(R) 64, Version 15.0.1.133 Build 20141023 version_regex = re.compile(r'\s([0-9]+(?:\.[0-9]+){1,3})\s', re.M) if compiler_name == 'gcc': out, _ = run_cmd("gcc --version", simple=False) res = version_regex.search(out) if res is None: raise EasyBuildError("Could not extract GCC version from %s", out) compiler_version = res.group(1) elif compiler_name in ['icc', 'ifort']: # A fully resolved icc/ifort (without symlinks) includes the release version in the path # e.g. .../composer_xe_2015.3.187/bin/intel64/icc # Match the last incidence of _ since we don't know what might be in the path, then split it up on / compiler_path = which(compiler_name) if compiler_path: compiler_version = resolve_path(compiler_path).split( '_')[-1].split('/')[0] else: raise EasyBuildError("Compiler command '%s' not found", compiler_name) # Check what we have looks like a version number (the regex we use requires spaces around the version number) if version_regex.search(' ' + compiler_version + ' ') is None: error_msg = "Derived Intel compiler version '%s' doesn't look correct, " % compiler_version error_msg += "is compiler installed in a path like '.../composer_xe_2015.3.187/bin/intel64/icc'?" raise EasyBuildError(error_msg) else: raise EasyBuildError("Unknown compiler %s", compiler_name) if compiler_version: _log.debug("Extracted compiler version '%s' for %s", compiler_version, compiler_name) else: raise EasyBuildError( "Failed to extract compiler version for %s using regex pattern '%s' from: %s", compiler_name, version_regex.pattern, out) return compiler_version
def get_paths_for(subdir=EASYCONFIGS_PKG_SUBDIR, robot_path=None): """ Return a list of absolute paths where the specified subdir can be found, determined by the PYTHONPATH """ paths = [] # primary search path is robot path path_list = [] if isinstance(robot_path, list): path_list = robot_path[:] elif robot_path is not None: path_list = [robot_path] # consider Python search path, e.g. setuptools install path for easyconfigs path_list.extend(sys.path) # figure out installation prefix, e.g. distutils install path for easyconfigs eb_path = which('eb') if eb_path is None: _log.warning("'eb' not found in $PATH, failed to determine installation prefix") else: # real location to 'eb' should be <install_prefix>/bin/eb eb_path = resolve_path(eb_path) install_prefix = os.path.dirname(os.path.dirname(eb_path)) path_list.append(install_prefix) _log.debug("Also considering installation prefix %s..." % install_prefix) # look for desired subdirs for path in path_list: path = os.path.join(path, "easybuild", subdir) _log.debug("Checking for easybuild/%s at %s" % (subdir, path)) try: if os.path.exists(path): paths.append(os.path.abspath(path)) _log.debug("Added %s to list of paths for easybuild/%s" % (path, subdir)) except OSError as err: raise EasyBuildError(str(err)) return paths
def extract_compiler_version(compiler_name): """Extract compiler version for provided compiler_name.""" # look for 3-4 digit version number, surrounded by spaces # examples: # gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11) # Intel(R) C Intel(R) 64 Compiler XE for applications running on Intel(R) 64, Version 15.0.1.133 Build 20141023 version_regex = re.compile(r'\s([0-9]+(?:\.[0-9]+){1,3})\s', re.M) if compiler_name == 'gcc': out, _ = run_cmd("gcc --version", simple=False) res = version_regex.search(out) if res is None: raise EasyBuildError("Could not extract GCC version from %s", out) compiler_version = res.group(1) elif compiler_name in ['icc', 'ifort']: # A fully resolved icc/ifort (without symlinks) includes the release version in the path # e.g. .../composer_xe_2015.3.187/bin/intel64/icc # Match the last incidence of _ since we don't know what might be in the path, then split it up on / compiler_path = which(compiler_name) if compiler_path: compiler_version = resolve_path(compiler_path).split('_')[-1].split('/')[0] else: raise EasyBuildError("Compiler command '%s' not found", compiler_name) # Check what we have looks like a version number (the regex we use requires spaces around the version number) if version_regex.search(' ' + compiler_version + ' ') is None: error_msg = "Derived Intel compiler version '%s' doesn't look correct, " % compiler_version error_msg += "is compiler installed in a path like '.../composer_xe_2015.3.187/bin/intel64/icc'?" raise EasyBuildError(error_msg) else: raise EasyBuildError("Unknown compiler %s", compiler_name) if compiler_version: _log.debug("Extracted compiler version '%s' for %s", compiler_version, compiler_name) else: raise EasyBuildError("Failed to extract compiler version for %s using regex pattern '%s' from: %s", compiler_name, version_regex.pattern, out) return compiler_version
def build_step(self): """Custom build procedure for TensorFlow.""" # pre-create target installation directory mkdir(os.path.join(self.installdir, self.pylibdir), parents=True) binutils_root = get_software_root('binutils') if binutils_root: binutils_bin = os.path.join(binutils_root, 'bin') else: raise EasyBuildError( "Failed to determine installation prefix for binutils") gcc_root = get_software_root('GCCcore') or get_software_root('GCC') if gcc_root: gcc_lib64 = os.path.join(gcc_root, 'lib64') gcc_ver = get_software_version('GCCcore') or get_software_version( 'GCC') # figure out location of GCC include files res = glob.glob( os.path.join(gcc_root, 'lib', 'gcc', '*', gcc_ver, 'include')) if res and len(res) == 1: gcc_lib_inc = res[0] else: raise EasyBuildError( "Failed to pinpoint location of GCC include files: %s", res) # make sure include-fixed directory is where we expect it to be gcc_lib_inc_fixed = os.path.join(os.path.dirname(gcc_lib_inc), 'include-fixed') if not os.path.exists(gcc_lib_inc_fixed): raise EasyBuildError("Derived directory %s does not exist", gcc_lib_inc_fixed) # also check on location of include/c++/<gcc version> directory gcc_cplusplus_inc = os.path.join(gcc_root, 'include', 'c++', gcc_ver) if not os.path.exists(gcc_cplusplus_inc): raise EasyBuildError("Derived directory %s does not exist", gcc_cplusplus_inc) else: raise EasyBuildError( "Failed to determine installation prefix for GCC") inc_paths = [gcc_lib_inc, gcc_lib_inc_fixed, gcc_cplusplus_inc] lib_paths = [gcc_lib64] cuda_root = get_software_root('CUDA') if cuda_root: inc_paths.append(os.path.join(cuda_root, 'include')) lib_paths.append(os.path.join(cuda_root, 'lib64')) # fix hardcoded locations of compilers & tools cxx_inc_dir_lines = '\n'.join(r'cxx_builtin_include_directory: "%s"' % resolve_path(p) for p in inc_paths) cxx_inc_dir_lines_no_resolv_path = '\n'.join( r'cxx_builtin_include_directory: "%s"' % p for p in inc_paths) regex_subs = [ (r'-B/usr/bin/', '-B%s/ %s' % (binutils_bin, ' '.join('-L%s/' % p for p in lib_paths))), (r'(cxx_builtin_include_directory:).*', ''), (r'^toolchain {', 'toolchain {\n' + cxx_inc_dir_lines + '\n' + cxx_inc_dir_lines_no_resolv_path), ] for tool in [ 'ar', 'cpp', 'dwp', 'gcc', 'gcov', 'ld', 'nm', 'objcopy', 'objdump', 'strip' ]: path = which(tool) if path: regex_subs.append((os.path.join('/usr', 'bin', tool), path)) else: raise EasyBuildError("Failed to determine path to '%s'", tool) # -fPIE/-pie and -fPIC are not compatible, so patch out hardcoded occurences of -fPIE/-pie if -fPIC is used if self.toolchain.options.get('pic', None): regex_subs.extend([('-fPIE', '-fPIC'), ('"-pie"', '"-fPIC"')]) # patch all CROSSTOOL* scripts to fix hardcoding of locations of binutils/GCC binaries for path, dirnames, filenames in os.walk(os.getcwd()): for filename in filenames: if filename.startswith('CROSSTOOL'): full_path = os.path.join(path, filename) self.log.info("Patching %s", full_path) apply_regex_substitutions(full_path, regex_subs) tmpdir = tempfile.mkdtemp(suffix='-bazel-build') user_root_tmpdir = tempfile.mkdtemp(suffix='-user_root') # compose "bazel build" command with all its options... cmd = [ self.cfg['prebuildopts'], 'bazel', '--output_base=%s' % tmpdir, '--install_base=%s' % os.path.join(tmpdir, 'inst_base'), '--output_user_root=%s' % user_root_tmpdir, 'build' ] # build with optimization enabled # cfr. https://docs.bazel.build/versions/master/user-manual.html#flag--compilation_mode cmd.append('--compilation_mode=opt') # select 'opt' config section (this is *not* the same as --compilation_mode=opt!) # https://docs.bazel.build/versions/master/user-manual.html#flag--config cmd.append('--config=opt') # make Bazel print full command line + make it verbose on failures # https://docs.bazel.build/versions/master/user-manual.html#flag--subcommands # https://docs.bazel.build/versions/master/user-manual.html#flag--verbose_failures cmd.extend(['--subcommands', '--verbose_failures']) # limit the number of parallel jobs running simultaneously (useful on KNL)... cmd.append('--jobs=%s' % self.cfg['parallel']) if self.toolchain.options.get('pic', None): cmd.append('--copt="-fPIC"') # include install location of Python packages in $PYTHONPATH, # and specify that value of $PYTHONPATH should be passed down into Bazel build environment; # this is required to make sure that Python packages included as extensions are found at build time; # see also https://github.com/tensorflow/tensorflow/issues/22395 pythonpath = os.getenv('PYTHONPATH', '') env.setvar( 'PYTHONPATH', os.pathsep.join( [os.path.join(self.installdir, self.pylibdir), pythonpath])) cmd.append('--action_env=PYTHONPATH') # Also export $EBPYTHONPREFIXES to handle the multi-deps python setup # See https://github.com/easybuilders/easybuild-easyblocks/pull/1664 if 'EBPYTHONPREFIXES' in os.environ: cmd.append('--action_env=EBPYTHONPREFIXES') # use same configuration for both host and target programs, which can speed up the build # only done when optarch is enabled, since this implicitely assumes that host and target platform are the same # see https://docs.bazel.build/versions/master/guide.html#configurations if self.toolchain.options.get('optarch'): cmd.append('--distinct_host_configuration=false') cmd.append(self.cfg['buildopts']) # TF 2 (final) sets this in configure if LooseVersion(self.version) < LooseVersion('2.0'): if cuda_root: cmd.append('--config=cuda') # if mkl-dnn is listed as a dependency it is used. Otherwise downloaded if with_mkl_dnn is true mkl_root = get_software_root('mkl-dnn') if mkl_root: cmd.extend(['--config=mkl']) cmd.insert(0, "export TF_MKL_DOWNLOAD=0 &&") cmd.insert(0, "export TF_MKL_ROOT=%s &&" % mkl_root) elif self.cfg['with_mkl_dnn']: # this makes TensorFlow use mkl-dnn (cfr. https://github.com/01org/mkl-dnn) cmd.extend(['--config=mkl']) cmd.insert(0, "export TF_MKL_DOWNLOAD=1 && ") # specify target of the build command as last argument cmd.append('//tensorflow/tools/pip_package:build_pip_package') run_cmd(' '.join(cmd), log_all=True, simple=True, log_ok=True) # run generated 'build_pip_package' script to build the .whl cmd = "bazel-bin/tensorflow/tools/pip_package/build_pip_package %s" % self.builddir run_cmd(cmd, log_all=True, simple=True, log_ok=True)
def get_paths_for(subdir=EASYCONFIGS_PKG_SUBDIR, robot_path=None): """ Return a list of absolute paths where the specified subdir can be found, determined by the PYTHONPATH """ paths = [] # primary search path is robot path path_list = [] if isinstance(robot_path, list): path_list = robot_path[:] elif robot_path is not None: path_list = [robot_path] # consider Python search path, e.g. setuptools install path for easyconfigs path_list.extend(sys.path) # figure out installation prefix, e.g. distutils install path for easyconfigs # prefer using path specified in $EB_SCRIPT_PATH (if defined), which is set by 'eb' wrapper script eb_path = os.getenv('EB_SCRIPT_PATH') if eb_path is None: # try to determine location of 'eb' script via $PATH, as fallback mechanism eb_path = which('eb') _log.info("Location to 'eb' script (found via $PATH): %s", eb_path) else: _log.info("Found location to 'eb' script via $EB_SCRIPT_PATH: %s", eb_path) if eb_path is None: warning_msg = "'eb' not found in $PATH, failed to determine installation prefix!" _log.warning(warning_msg) print_warning(warning_msg) else: # eb_path is location to 'eb' wrapper script, e.g. <install_prefix>/bin/eb # so installation prefix is usually two levels up install_prefix = os.path.dirname(os.path.dirname(eb_path)) # only consider resolved path to 'eb' script if desired subdir is not found relative to 'eb' script location if os.path.exists(os.path.join(install_prefix, 'easybuild', subdir)): path_list.append(install_prefix) _log.info( "Also considering installation prefix %s (determined via path to 'eb' script)...", install_prefix) else: _log.info("Not considering %s (no easybuild/%s subdir found)", install_prefix, subdir) # also consider fully resolved location to 'eb' wrapper # see https://github.com/easybuilders/easybuild-framework/pull/2248 resolved_eb_path = resolve_path(eb_path) if eb_path != resolved_eb_path: install_prefix = os.path.dirname( os.path.dirname(resolved_eb_path)) path_list.append(install_prefix) _log.info( "Also considering installation prefix %s (via resolved path to 'eb')...", install_prefix) # look for desired subdirs for path in path_list: path = os.path.join(path, "easybuild", subdir) _log.debug("Checking for easybuild/%s at %s" % (subdir, path)) try: if os.path.exists(path): paths.append(os.path.abspath(path)) _log.debug("Added %s to list of paths for easybuild/%s" % (path, subdir)) except OSError as err: raise EasyBuildError(str(err)) return paths
def prepare_step(self, *args, **kwargs): """Do compiler appropriate prepare step, determine system compiler version and prefix.""" if self.cfg['generate_standalone_module']: if self.cfg['name'] in ['GCC', 'GCCcore']: EB_GCC.prepare_step(self, *args, **kwargs) elif self.cfg['name'] in ['icc']: EB_icc.prepare_step(self, *args, **kwargs) elif self.cfg['name'] in ['ifort']: EB_ifort.prepare_step(self, *args, **kwargs) else: raise EasyBuildError("I don't know how to do the prepare_step for %s", self.cfg['name']) else: Bundle.prepare_step(self, *args, **kwargs) # Determine compiler path (real path, with resolved symlinks) compiler_name = self.cfg['name'].lower() if compiler_name == 'gcccore': compiler_name = 'gcc' path_to_compiler = which(compiler_name) if path_to_compiler: path_to_compiler = resolve_path(path_to_compiler) self.log.info("Found path to compiler '%s' (with symlinks resolved): %s", compiler_name, path_to_compiler) else: raise EasyBuildError("%s not found in $PATH", compiler_name) # Determine compiler version self.compiler_version = extract_compiler_version(compiler_name) # Determine installation prefix if compiler_name == 'gcc': # strip off 'bin/gcc' self.compiler_prefix = os.path.dirname(os.path.dirname(path_to_compiler)) elif compiler_name in ['icc', 'ifort']: intelvars_fn = path_to_compiler + 'vars.sh' if os.path.isfile(intelvars_fn): self.log.debug("Trying to determine compiler install prefix from %s", intelvars_fn) intelvars_txt = read_file(intelvars_fn) prod_dir_regex = re.compile(r'^PROD_DIR=(.*)$', re.M) res = prod_dir_regex.search(intelvars_txt) if res: self.compiler_prefix = res.group(1) else: raise EasyBuildError("Failed to determine %s installation prefix from %s", compiler_name, intelvars_fn) else: # strip off 'bin/intel*/icc' self.compiler_prefix = os.path.dirname(os.path.dirname(os.path.dirname(path_to_compiler))) # For versions 2016+ of Intel compilers they changed the installation path so must shave off 2 more # directories from result of the above if LooseVersion(self.compiler_version) >= LooseVersion('2016'): self.compiler_prefix = os.path.dirname(os.path.dirname(self.compiler_prefix)) else: raise EasyBuildError("Unknown system compiler %s" % self.cfg['name']) if not os.path.exists(self.compiler_prefix): raise EasyBuildError("Path derived for system compiler (%s) does not exist: %s!", compiler_name, self.compiler_prefix) self.log.debug("Derived version/install prefix for system compiler %s: %s, %s", compiler_name, self.compiler_version, self.compiler_prefix) # If EasyConfig specified "real" version (not 'system' which means 'derive automatically'), check it if self.cfg['version'] == 'system': self.log.info("Found specified version '%s', going with derived compiler version '%s'", self.cfg['version'], self.compiler_version) elif self.cfg['version'] != self.compiler_version: raise EasyBuildError("Specified version (%s) does not match version reported by compiler (%s)" % (self.cfg['version'], self.compiler_version))
def prepare_step(self, *args, **kwargs): """Do compiler appropriate prepare step, determine system compiler version and prefix.""" if self.cfg['generate_standalone_module']: if self.cfg['name'] in ['GCC', 'GCCcore']: EB_GCC.prepare_step(self, *args, **kwargs) elif self.cfg['name'] in ['icc']: EB_icc.prepare_step(self, *args, **kwargs) elif self.cfg['name'] in ['ifort']: EB_ifort.prepare_step(self, *args, **kwargs) else: raise EasyBuildError( "I don't know how to do the prepare_step for %s", self.cfg['name']) else: Bundle.prepare_step(self, *args, **kwargs) # Determine compiler path (real path, with resolved symlinks) compiler_name = self.cfg['name'].lower() if compiler_name == 'gcccore': compiler_name = 'gcc' path_to_compiler = which(compiler_name) if path_to_compiler: path_to_compiler = resolve_path(path_to_compiler) self.log.info( "Found path to compiler '%s' (with symlinks resolved): %s", compiler_name, path_to_compiler) else: raise EasyBuildError("%s not found in $PATH", compiler_name) # Determine compiler version self.compiler_version = extract_compiler_version(compiler_name) # Determine installation prefix if compiler_name == 'gcc': # strip off 'bin/gcc' self.compiler_prefix = os.path.dirname( os.path.dirname(path_to_compiler)) elif compiler_name in ['icc', 'ifort']: intelvars_fn = path_to_compiler + 'vars.sh' if os.path.isfile(intelvars_fn): self.log.debug( "Trying to determine compiler install prefix from %s", intelvars_fn) intelvars_txt = read_file(intelvars_fn) prod_dir_regex = re.compile(r'^PROD_DIR=(.*)$', re.M) res = prod_dir_regex.search(intelvars_txt) if res: self.compiler_prefix = res.group(1) else: raise EasyBuildError( "Failed to determine %s installation prefix from %s", compiler_name, intelvars_fn) else: # strip off 'bin/intel*/icc' self.compiler_prefix = os.path.dirname( os.path.dirname(os.path.dirname(path_to_compiler))) # For versions 2016+ of Intel compilers they changed the installation path so must shave off 2 more # directories from result of the above if LooseVersion(self.compiler_version) >= LooseVersion('2016'): self.compiler_prefix = os.path.dirname( os.path.dirname(self.compiler_prefix)) else: raise EasyBuildError("Unknown system compiler %s" % self.cfg['name']) if not os.path.exists(self.compiler_prefix): raise EasyBuildError( "Path derived for system compiler (%s) does not exist: %s!", compiler_name, self.compiler_prefix) self.log.debug( "Derived version/install prefix for system compiler %s: %s, %s", compiler_name, self.compiler_version, self.compiler_prefix) # If EasyConfig specified "real" version (not 'system' which means 'derive automatically'), check it if self.cfg['version'] == 'system': self.log.info( "Found specified version '%s', going with derived compiler version '%s'", self.cfg['version'], self.compiler_version) elif self.cfg['version'] != self.compiler_version: raise EasyBuildError( "Specified version (%s) does not match version reported by compiler (%s)" % (self.cfg['version'], self.compiler_version))
def build_step(self): """Custom build procedure for TensorFlow.""" # pre-create target installation directory mkdir(os.path.join(self.installdir, self.pylibdir), parents=True) binutils_root = get_software_root('binutils') if binutils_root: binutils_bin = os.path.join(binutils_root, 'bin') else: raise EasyBuildError("Failed to determine installation prefix for binutils") gcc_root = get_software_root('GCCcore') or get_software_root('GCC') if gcc_root: gcc_lib64 = os.path.join(gcc_root, 'lib64') gcc_ver = get_software_version('GCCcore') or get_software_version('GCC') # figure out location of GCC include files res = glob.glob(os.path.join(gcc_root, 'lib', 'gcc', '*', gcc_ver, 'include')) if res and len(res) == 1: gcc_lib_inc = res[0] else: raise EasyBuildError("Failed to pinpoint location of GCC include files: %s", res) # make sure include-fixed directory is where we expect it to be gcc_lib_inc_fixed = os.path.join(os.path.dirname(gcc_lib_inc), 'include-fixed') if not os.path.exists(gcc_lib_inc_fixed): raise EasyBuildError("Derived directory %s does not exist", gcc_lib_inc_fixed) # also check on location of include/c++/<gcc version> directory gcc_cplusplus_inc = os.path.join(gcc_root, 'include', 'c++', gcc_ver) if not os.path.exists(gcc_cplusplus_inc): raise EasyBuildError("Derived directory %s does not exist", gcc_cplusplus_inc) else: raise EasyBuildError("Failed to determine installation prefix for GCC") inc_paths = [gcc_lib_inc, gcc_lib_inc_fixed, gcc_cplusplus_inc] lib_paths = [gcc_lib64] cuda_root = get_software_root('CUDA') if cuda_root: inc_paths.append(os.path.join(cuda_root, 'include')) lib_paths.append(os.path.join(cuda_root, 'lib64')) # fix hardcoded locations of compilers & tools cxx_inc_dir_lines = '\n'.join(r'cxx_builtin_include_directory: "%s"' % resolve_path(p) for p in inc_paths) cxx_inc_dir_lines_no_resolv_path = '\n'.join(r'cxx_builtin_include_directory: "%s"' % p for p in inc_paths) regex_subs = [ (r'-B/usr/bin/', '-B%s/ %s' % (binutils_bin, ' '.join('-L%s/' % p for p in lib_paths))), (r'(cxx_builtin_include_directory:).*', ''), (r'^toolchain {', 'toolchain {\n' + cxx_inc_dir_lines + '\n' + cxx_inc_dir_lines_no_resolv_path), ] for tool in ['ar', 'cpp', 'dwp', 'gcc', 'gcov', 'ld', 'nm', 'objcopy', 'objdump', 'strip']: path = which(tool) if path: regex_subs.append((os.path.join('/usr', 'bin', tool), path)) else: raise EasyBuildError("Failed to determine path to '%s'", tool) # -fPIE/-pie and -fPIC are not compatible, so patch out hardcoded occurences of -fPIE/-pie if -fPIC is used if self.toolchain.options.get('pic', None): regex_subs.extend([('-fPIE', '-fPIC'), ('"-pie"', '"-fPIC"')]) # patch all CROSSTOOL* scripts to fix hardcoding of locations of binutils/GCC binaries for path, dirnames, filenames in os.walk(self.start_dir): for filename in filenames: if filename.startswith('CROSSTOOL'): full_path = os.path.join(path, filename) self.log.info("Patching %s", full_path) apply_regex_substitutions(full_path, regex_subs) tmpdir = tempfile.mkdtemp(suffix='-bazel-build') # compose "bazel build" command with all its options... cmd = [self.cfg['prebuildopts'], 'bazel', '--output_base=%s' % tmpdir, 'build'] # build with optimization enabled # cfr. https://docs.bazel.build/versions/master/user-manual.html#flag--compilation_mode cmd.append('--compilation_mode=opt') # select 'opt' config section (this is *not* the same as --compilation_mode=opt!) # https://docs.bazel.build/versions/master/user-manual.html#flag--config cmd.append('--config=opt') # make Bazel print full command line + make it verbose on failures # https://docs.bazel.build/versions/master/user-manual.html#flag--subcommands # https://docs.bazel.build/versions/master/user-manual.html#flag--verbose_failures cmd.extend(['--subcommands', '--verbose_failures']) # limit the number of parallel jobs running simultaneously (useful on KNL)... cmd.append('--jobs=%s' % self.cfg['parallel']) if self.toolchain.options.get('pic', None): cmd.append('--copt="-fPIC"') cmd.append(self.cfg['buildopts']) if cuda_root: cmd.append('--config=cuda') # enable mkl-dnn by default, but only if cuDNN is not listed as dependency if self.cfg['with_mkl_dnn'] is None and get_software_root('cuDNN') is None: self.log.info("Enabling use of mkl-dnn since cuDNN is not listed as dependency") self.cfg['with_mkl_dnn'] = True # if mkl-dnn is listed as a dependency it is used. Otherwise downloaded if with_mkl_dnn is true mkl_root = get_software_root('mkl-dnn') if mkl_root: cmd.extend(['--config=mkl']) cmd.insert(0, 'export TF_MKL_DOWNLOAD=0 &&' ) cmd.insert(0, 'export TF_MKL_ROOT=%s &&' % mkl_root) elif self.cfg['with_mkl_dnn']: # this makes TensorFlow use mkl-dnn (cfr. https://github.com/01org/mkl-dnn) cmd.extend(['--config=mkl']) cmd.insert(0, 'export TF_MKL_DOWNLOAD=1 &&' ) # specify target of the build command as last argument cmd.append('//tensorflow/tools/pip_package:build_pip_package') run_cmd(' '.join(cmd), log_all=True, simple=True, log_ok=True) # run generated 'build_pip_package' script to build the .whl cmd = "bazel-bin/tensorflow/tools/pip_package/build_pip_package %s" % self.builddir run_cmd(cmd, log_all=True, simple=True, log_ok=True)
def make_module_step(self, fake=False): """Install .modulerc file.""" modfile_path = self.module_generator.get_module_filepath(fake=fake) modulerc = os.path.join(os.path.dirname(modfile_path), self.module_generator.DOT_MODULERC) deps = self.cfg['dependencies'] if len(deps) != 1: raise EasyBuildError( "There should be exactly one dependency specified, found %d", len(deps)) # names should match if self.name != deps[0]['name']: raise EasyBuildError( "Name does not match dependency name: %s vs %s", self.name, deps[0]['name']) # ensure version to alias to is a prefix of the version of the dependency if not deps[0]['version'].startswith( self.version) and not self.version == "default": raise EasyBuildError( "Version is not 'default' and not a prefix of dependency version: %s vs %s", self.version, deps[0]['version']) alias_modname = deps[0]['short_mod_name'] self.log.info("Adding module version alias for %s to %s", alias_modname, modulerc) # add symlink to wrapped module file when generating .modulerc in temporary directory (done during sanity check) # this is strictly required for Lmod 6.x, for which .modulerc and wrapped module file must be in same location if fake: wrapped_mod_path = self.modules_tool.modulefile_path(alias_modname) wrapped_mod_filename = os.path.basename(wrapped_mod_path) target = os.path.join(os.path.dirname(modulerc), wrapped_mod_filename) mkdir(os.path.dirname(target), parents=True) symlink(wrapped_mod_path, target) module_version_specs = { 'modname': alias_modname, 'sym_version': self.version, 'version': deps[0]['version'], } self.module_generator.modulerc(module_version=module_version_specs, filepath=modulerc) if not fake: print_msg("updated .modulerc file at %s" % modulerc, log=self.log) # symlink .modulerc in other locations (unless they're already linked) mod_symlink_dirs = ActiveMNS().det_module_symlink_paths(self.cfg) mod_subdir = os.path.dirname(ActiveMNS().det_full_module_name( self.cfg)) mod_install_path = install_path('mod') modulerc_filename = os.path.basename(modulerc) for mod_symlink_dir in mod_symlink_dirs: modulerc_symlink = os.path.join(mod_install_path, mod_symlink_dir, mod_subdir, modulerc_filename) if os.path.islink(modulerc_symlink): if resolve_path(modulerc_symlink) == resolve_path( modulerc): print_msg("symlink %s to %s already exists", modulerc_symlink, modulerc) else: raise EasyBuildError( "%s exists but is not a symlink to %s", modulerc_symlink, modulerc) else: # Make sure folder exists mkdir(os.path.dirname(modulerc_symlink), parents=True) symlink(modulerc, modulerc_symlink) print_msg("created symlink %s to .modulerc file at %s", modulerc_symlink, modulerc, log=self.log) modpath = self.module_generator.get_modules_path(fake=fake) self.invalidate_module_caches(modpath) return modpath
def prepare_step(self, *args, **kwargs): """Load all dependencies, determine system MPI version, prefix and any associated envvars.""" # Do the bundle prepare step to ensure any deps are loaded (no need to worry about licences for Intel MPI) Bundle.prepare_step(self, *args, **kwargs) # Prepare additional parameters: determine system MPI version, prefix and any associated envvars. mpi_name = self.cfg['name'].lower() # Determine MPI wrapper path (real path, with resolved symlinks) to ensure it exists if mpi_name == 'impi': # For impi the version information is only found in *some* of the wrappers it ships, in particular it is # not in mpicc mpi_c_wrapper = 'mpiicc' path_to_mpi_c_wrapper = which(mpi_c_wrapper) if not path_to_mpi_c_wrapper: mpi_c_wrapper = 'mpigcc' path_to_mpi_c_wrapper = which(mpi_c_wrapper) if not path_to_mpi_c_wrapper: raise EasyBuildError( "Could not find suitable MPI wrapper to extract version for impi" ) else: mpi_c_wrapper = 'mpicc' path_to_mpi_c_wrapper = which(mpi_c_wrapper) if path_to_mpi_c_wrapper: path_to_mpi_c_wrapper = resolve_path(path_to_mpi_c_wrapper) self.log.info( "Found path to MPI implementation '%s' %s compiler (with symlinks resolved): %s", mpi_name, mpi_c_wrapper, path_to_mpi_c_wrapper) else: raise EasyBuildError("%s not found in $PATH", mpi_c_wrapper) # Determine MPI version, installation prefix and underlying compiler if mpi_name in ('openmpi', 'spectrummpi'): # Spectrum MPI is based on Open MPI so is also covered by this logic output_of_ompi_info, _ = run_cmd("ompi_info", simple=False) # Extract the version of the MPI implementation if mpi_name == 'spectrummpi': mpi_version_string = 'Spectrum MPI' else: mpi_version_string = 'Open MPI' self.mpi_version = self.extract_ompi_setting( mpi_version_string, output_of_ompi_info) # Extract the installation prefix self.mpi_prefix = self.extract_ompi_setting( "Prefix", output_of_ompi_info) # Extract any OpenMPI environment variables in the current environment and ensure they are added to the # final module self.mpi_env_vars = dict((key, value) for key, value in os.environ.items() if key.startswith('OMPI_')) # Extract the C compiler used underneath the MPI implementation, check for the definition of OMPI_MPICC self.mpi_c_compiler = self.extract_ompi_setting( "C compiler", output_of_ompi_info) elif mpi_name == 'impi': # Extract the version of IntelMPI # The prefix in the the mpiicc (or mpigcc) script can be used to extract the explicit version contents_of_mpixcc = read_file(path_to_mpi_c_wrapper) prefix_regex = re.compile( r'(?<=compilers_and_libraries_)(.*)(?=/linux/mpi)', re.M) self.mpi_version = None res = prefix_regex.search(contents_of_mpixcc) if res: self.mpi_version = res.group(1) else: # old iimpi version prefix_regex = re.compile(r'^prefix=(.*)$', re.M) res = prefix_regex.search(contents_of_mpixcc) if res: self.mpi_version = res.group(1).split('/')[-1] if self.mpi_version is None: raise EasyBuildError("No version found for system Intel MPI") else: self.log.info("Found Intel MPI version %s for system MPI" % self.mpi_version) # Extract the installation prefix, if I_MPI_ROOT is defined, let's use that i_mpi_root = os.environ.get('I_MPI_ROOT') if i_mpi_root: self.mpi_prefix = i_mpi_root else: # Else just go up three directories from where mpiicc is found # (it's 3 because bin64 is a symlink to intel64/bin and we are assuming 64 bit) self.mpi_prefix = os.path.dirname( os.path.dirname(os.path.dirname(path_to_mpi_c_wrapper))) # Extract any IntelMPI environment variables in the current environment and ensure they are added to the # final module self.mpi_env_vars = {} for key, value in os.environ.items(): i_mpi_key = key.startswith('I_MPI_') or key.startswith( 'MPICH_') mpi_profile_key = key.startswith('MPI') and key.endswith( 'PROFILE') if i_mpi_key or mpi_profile_key: self.mpi_env_vars[key] = value # Extract the C compiler used underneath Intel MPI compile_info, exit_code = run_cmd("%s -compile-info" % mpi_c_wrapper, simple=False) if exit_code == 0: self.mpi_c_compiler = compile_info.split(' ', 1)[0] else: raise EasyBuildError( "Could not determine C compiler underneath Intel MPI, '%s -compiler-info' " "returned %s", mpi_c_wrapper, compile_info) else: raise EasyBuildError("Unrecognised system MPI implementation %s", mpi_name) # Ensure install path of system MPI actually exists if not os.path.exists(self.mpi_prefix): raise EasyBuildError( "Path derived for system MPI (%s) does not exist: %s!", mpi_name, self.mpi_prefix) self.log.debug( "Derived version/install prefix for system MPI %s: %s, %s", mpi_name, self.mpi_version, self.mpi_prefix) # For the version of the underlying C compiler need to explicitly extract (to be certain) self.c_compiler_version = extract_compiler_version(self.mpi_c_compiler) self.log.debug( "Derived compiler/version for C compiler underneath system MPI %s: %s, %s", mpi_name, self.mpi_c_compiler, self.c_compiler_version) # If EasyConfig specified "real" version (not 'system' which means 'derive automatically'), check it if self.cfg['version'] == 'system': self.log.info( "Found specified version '%s', going with derived MPI version '%s'", self.cfg['version'], self.mpi_version) elif self.cfg['version'] == self.mpi_version: self.log.info("Specified MPI version %s matches found version" % self.mpi_version) else: raise EasyBuildError( "Specified version (%s) does not match version reported by MPI (%s)", self.cfg['version'], self.mpi_version)
def make_module_step(self, fake=False): """Install .modulerc file.""" modfile_path = self.module_generator.get_module_filepath(fake=fake) modulerc = os.path.join(os.path.dirname(modfile_path), self.module_generator.DOT_MODULERC) deps = self.cfg['dependencies'] if len(deps) != 1: raise EasyBuildError("There should be exactly one dependency specified, found %d", len(deps)) # names should match if self.name != deps[0]['name']: raise EasyBuildError("Name does not match dependency name: %s vs %s", self.name, deps[0]['name']) # ensure version to alias to is a prefix of the version of the dependency if not deps[0]['version'].startswith(self.version): raise EasyBuildError("Version is not a prefix of dependency version: %s vs %s", self.version, deps[0]['version']) alias_modname = deps[0]['short_mod_name'] self.log.info("Adding module version alias for %s to %s", alias_modname, modulerc) # add symlink to wrapped module file when generating .modulerc in temporary directory (done during sanity check) # this is strictly required for Lmod 6.x, for which .modulerc and wrapped module file must be in same location if fake: wrapped_mod_path = self.modules_tool.modulefile_path(alias_modname) wrapped_mod_filename = os.path.basename(wrapped_mod_path) target = os.path.join(os.path.dirname(modulerc), wrapped_mod_filename) mkdir(os.path.dirname(target), parents=True) symlink(wrapped_mod_path, target) module_version_specs = { 'modname': alias_modname, 'sym_version': self.version, 'version': deps[0]['version'], } self.module_generator.modulerc(module_version=module_version_specs, filepath=modulerc) if not fake: print_msg("updated .modulerc file at %s" % modulerc, log=self.log) # symlink .modulerc in other locations (unless they're already linked) mod_symlink_dirs = ActiveMNS().det_module_symlink_paths(self.cfg) mod_subdir = os.path.dirname(ActiveMNS().det_full_module_name(self.cfg)) mod_install_path = install_path('mod') modulerc_filename = os.path.basename(modulerc) for mod_symlink_dir in mod_symlink_dirs: modulerc_symlink = os.path.join(mod_install_path, mod_symlink_dir, mod_subdir, modulerc_filename) if os.path.islink(modulerc_symlink): if resolve_path(modulerc_symlink) == resolve_path(modulerc): print_msg("symlink %s to %s already exists", modulerc_symlink, modulerc) else: raise EasyBuildError("%s exists but is not a symlink to %s", modulerc_symlink, modulerc) else: symlink(modulerc, modulerc_symlink) print_msg("created symlink %s to .modulerc file at %s", modulerc_symlink, modulerc, log=self.log) modpath = self.module_generator.get_modules_path(fake=fake) self.invalidate_module_caches(modpath) return modpath
def build_step(self): """Custom build procedure for TensorFlow.""" # pre-create target installation directory mkdir(os.path.join(self.installdir, self.pylibdir), parents=True) binutils_root = get_software_root('binutils') if binutils_root: binutils_bin = os.path.join(binutils_root, 'bin') else: raise EasyBuildError("Failed to determine installation prefix for binutils") gcc_root = get_software_root('GCCcore') or get_software_root('GCC') if gcc_root: gcc_lib64 = os.path.join(gcc_root, 'lib64') gcc_ver = get_software_version('GCCcore') or get_software_version('GCC') # figure out location of GCC include files res = glob.glob(os.path.join(gcc_root, 'lib', 'gcc', '*', gcc_ver, 'include')) if res and len(res) == 1: gcc_lib_inc = res[0] else: raise EasyBuildError("Failed to pinpoint location of GCC include files: %s", res) # make sure include-fixed directory is where we expect it to be gcc_lib_inc_fixed = os.path.join(os.path.dirname(gcc_lib_inc), 'include-fixed') if not os.path.exists(gcc_lib_inc_fixed): raise EasyBuildError("Derived directory %s does not exist", gcc_lib_inc_fixed) # also check on location of include/c++/<gcc version> directory gcc_cplusplus_inc = os.path.join(gcc_root, 'include', 'c++', gcc_ver) if not os.path.exists(gcc_cplusplus_inc): raise EasyBuildError("Derived directory %s does not exist", gcc_cplusplus_inc) else: raise EasyBuildError("Failed to determine installation prefix for GCC") inc_paths = [gcc_lib_inc, gcc_lib_inc_fixed, gcc_cplusplus_inc] lib_paths = [gcc_lib64] cuda_root = get_software_root('CUDA') if cuda_root: inc_paths.append(os.path.join(cuda_root, 'include')) lib_paths.append(os.path.join(cuda_root, 'lib64')) # fix hardcoded locations of compilers & tools cxx_inc_dir_lines = '\n'.join(r'cxx_builtin_include_directory: "%s"' % resolve_path(p) for p in inc_paths) cxx_inc_dir_lines_no_resolv_path = '\n'.join(r'cxx_builtin_include_directory: "%s"' % p for p in inc_paths) regex_subs = [ (r'-B/usr/bin/', '-B%s/ %s' % (binutils_bin, ' '.join('-L%s/' % p for p in lib_paths))), (r'(cxx_builtin_include_directory:).*', ''), (r'^toolchain {', 'toolchain {\n' + cxx_inc_dir_lines + '\n' + cxx_inc_dir_lines_no_resolv_path), ] for tool in ['ar', 'cpp', 'dwp', 'gcc', 'gcov', 'ld', 'nm', 'objcopy', 'objdump', 'strip']: path = which(tool) if path: regex_subs.append((os.path.join('/usr', 'bin', tool), path)) else: raise EasyBuildError("Failed to determine path to '%s'", tool) # -fPIE/-pie and -fPIC are not compatible, so patch out hardcoded occurences of -fPIE/-pie if -fPIC is used if self.toolchain.options.get('pic', None): regex_subs.extend([('-fPIE', '-fPIC'), ('"-pie"', '"-fPIC"')]) # patch all CROSSTOOL* scripts to fix hardcoding of locations of binutils/GCC binaries for path, dirnames, filenames in os.walk(self.start_dir): for filename in filenames: if filename.startswith('CROSSTOOL'): full_path = os.path.join(path, filename) self.log.info("Patching %s", full_path) apply_regex_substitutions(full_path, regex_subs) tmpdir = tempfile.mkdtemp(suffix='-bazel-build') # compose "bazel build" command with all its options... cmd = [self.cfg['prebuildopts'], 'bazel', '--output_base=%s' % tmpdir, 'build'] # build with optimization enabled # cfr. https://docs.bazel.build/versions/master/user-manual.html#flag--compilation_mode cmd.append('--compilation_mode=opt') # select 'opt' config section (this is *not* the same as --compilation_mode=opt!) # https://docs.bazel.build/versions/master/user-manual.html#flag--config cmd.append('--config=opt') # make Bazel print full command line + make it verbose on failures # https://docs.bazel.build/versions/master/user-manual.html#flag--subcommands # https://docs.bazel.build/versions/master/user-manual.html#flag--verbose_failures cmd.extend(['--subcommands', '--verbose_failures']) # limit the number of parallel jobs running simultaneously (useful on KNL)... cmd.append('--jobs=%s' % self.cfg['parallel']) if self.toolchain.options.get('pic', None): cmd.append('--copt="-fPIC"') cmd.append(self.cfg['buildopts']) if cuda_root: cmd.append('--config=cuda') # enable mkl-dnn by default, but only if cuDNN is not listed as dependency if self.cfg['with_mkl_dnn'] is None and get_software_root('cuDNN') is None: self.log.info("Enabling use of mkl-dnn since cuDNN is not listed as dependency") self.cfg['with_mkl_dnn'] = True # if mkl-dnn is listed as a dependency it is used. Otherwise downloaded if with_mkl_dnn is true mkl_root = get_software_root('mkl-dnn') if mkl_root: cmd.extend(['--config=mkl']) cmd.insert(0, "export TF_MKL_DOWNLOAD=0 &&") cmd.insert(0, "export TF_MKL_ROOT=%s &&" % mkl_root) elif self.cfg['with_mkl_dnn']: # this makes TensorFlow use mkl-dnn (cfr. https://github.com/01org/mkl-dnn) cmd.extend(['--config=mkl']) cmd.insert(0, "export TF_MKL_DOWNLOAD=1 && ") # specify target of the build command as last argument cmd.append('//tensorflow/tools/pip_package:build_pip_package') run_cmd(' '.join(cmd), log_all=True, simple=True, log_ok=True) # run generated 'build_pip_package' script to build the .whl cmd = "bazel-bin/tensorflow/tools/pip_package/build_pip_package %s" % self.builddir run_cmd(cmd, log_all=True, simple=True, log_ok=True)
def __init__(self, *args, **kwargs): """Extra initialization: determine system compiler version and prefix.""" super(SystemCompiler, self).__init__(*args, **kwargs) # Determine compiler path (real path, with resolved symlinks) compiler_name = self.cfg['name'].lower() if compiler_name == 'gcccore': compiler_name = 'gcc' path_to_compiler = which(compiler_name) if path_to_compiler: path_to_compiler = resolve_path(path_to_compiler) self.log.info( "Found path to compiler '%s' (with symlinks resolved): %s", compiler_name, path_to_compiler) else: raise EasyBuildError("%s not found in $PATH", compiler_name) # Determine compiler version self.compiler_version = extract_compiler_version(compiler_name) # Determine installation prefix if compiler_name == 'gcc': # strip off 'bin/gcc' self.compiler_prefix = os.path.dirname( os.path.dirname(path_to_compiler)) elif compiler_name in ['icc', 'ifort']: intelvars_fn = path_to_compiler + 'vars.sh' if os.path.isfile(intelvars_fn): self.log.debug( "Trying to determine compiler install prefix from %s", intelvars_fn) intelvars_txt = read_file(intelvars_fn) prod_dir_regex = re.compile(r'^PROD_DIR=(.*)$', re.M) res = prod_dir_regex.search(intelvars_txt) if res: self.compiler_prefix = res.group(1) else: raise EasyBuildError( "Failed to determine %s installation prefix from %s", compiler_name, intelvars_fn) else: # strip off 'bin/intel*/icc' self.compiler_prefix = os.path.dirname( os.path.dirname(os.path.dirname(path_to_compiler))) # For versions 2016+ of Intel compilers they changed the installation path so must shave off 2 more # directories from result of the above if LooseVersion(self.compiler_version) >= LooseVersion('2016'): self.compiler_prefix = os.path.dirname( os.path.dirname(self.compiler_prefix)) else: raise EasyBuildError("Unknown system compiler %s" % self.cfg['name']) if not os.path.exists(self.compiler_prefix): raise EasyBuildError( "Path derived for system compiler (%s) does not exist: %s!", compiler_name, self.compiler_prefix) self.log.debug( "Derived version/install prefix for system compiler %s: %s, %s", compiler_name, self.compiler_version, self.compiler_prefix) # If EasyConfig specified "real" version (not 'system' which means 'derive automatically'), check it if self.cfg['version'] == 'system': self.log.info( "Found specified version '%s', going with derived compiler version '%s'", self.cfg['version'], self.compiler_version) elif self.cfg['version'] != self.compiler_version: raise EasyBuildError( "Specified version (%s) does not match version reported by compiler (%s)" % (self.cfg['version'], self.compiler_version)) # fix installdir and module names (may differ because of changes to version) mns = ActiveMNS() self.cfg.full_mod_name = mns.det_full_module_name(self.cfg) self.cfg.short_mod_name = mns.det_short_module_name(self.cfg) self.cfg.mod_subdir = mns.det_module_subdir(self.cfg) # keep track of original values, for restoring later self.orig_version = self.cfg['version'] self.orig_installdir = self.installdir
def patch_crosstool_files(self): """Patches the CROSSTOOL files to include EasyBuild provided compiler paths""" inc_paths, lib_paths = [], [] gcc_root = get_software_root('GCCcore') or get_software_root('GCC') if gcc_root: gcc_lib64 = os.path.join(gcc_root, 'lib64') lib_paths.append(gcc_lib64) gcc_ver = get_software_version('GCCcore') or get_software_version( 'GCC') # figure out location of GCC include files res = glob.glob( os.path.join(gcc_root, 'lib', 'gcc', '*', gcc_ver, 'include')) if res and len(res) == 1: gcc_lib_inc = res[0] inc_paths.append(gcc_lib_inc) else: raise EasyBuildError( "Failed to pinpoint location of GCC include files: %s", res) # make sure include-fixed directory is where we expect it to be gcc_lib_inc_fixed = os.path.join(os.path.dirname(gcc_lib_inc), 'include-fixed') if os.path.exists(gcc_lib_inc_fixed): inc_paths.append(gcc_lib_inc_fixed) else: self.log.info( "Derived directory %s does not exist, so discarding it", gcc_lib_inc_fixed) # also check on location of include/c++/<gcc version> directory gcc_cplusplus_inc = os.path.join(gcc_root, 'include', 'c++', gcc_ver) if os.path.exists(gcc_cplusplus_inc): inc_paths.append(gcc_cplusplus_inc) else: raise EasyBuildError("Derived directory %s does not exist", gcc_cplusplus_inc) else: raise EasyBuildError( "Failed to determine installation prefix for GCC") cuda_root = get_software_root('CUDA') if cuda_root: inc_paths.append(os.path.join(cuda_root, 'include')) lib_paths.append(os.path.join(cuda_root, 'lib64')) # fix hardcoded locations of compilers & tools cxx_inc_dirs = [ 'cxx_builtin_include_directory: "%s"' % resolve_path(p) for p in inc_paths ] cxx_inc_dirs += [ 'cxx_builtin_include_directory: "%s"' % p for p in inc_paths ] regex_subs = [ (r'-B/usr/bin/', '-B%s %s' % (self.binutils_bin_path, ' '.join('-L%s/' % p for p in lib_paths))), (r'(cxx_builtin_include_directory:).*', ''), (r'^toolchain {', 'toolchain {\n' + '\n'.join(cxx_inc_dirs)), ] for tool in [ 'ar', 'cpp', 'dwp', 'gcc', 'gcov', 'ld', 'nm', 'objcopy', 'objdump', 'strip' ]: path = which(tool) if path: regex_subs.append((os.path.join('/usr', 'bin', tool), path)) else: raise EasyBuildError("Failed to determine path to '%s'", tool) # -fPIE/-pie and -fPIC are not compatible, so patch out hardcoded occurences of -fPIE/-pie if -fPIC is used if self.toolchain.options.get('pic', None): regex_subs.extend([('-fPIE', '-fPIC'), ('"-pie"', '"-fPIC"')]) # patch all CROSSTOOL* scripts to fix hardcoding of locations of binutils/GCC binaries for path, dirnames, filenames in os.walk(os.getcwd()): for filename in filenames: if filename.startswith('CROSSTOOL'): full_path = os.path.join(path, filename) self.log.info("Patching %s", full_path) apply_regex_substitutions(full_path, regex_subs)
def prepare_step(self, *args, **kwargs): """Load all dependencies, determine system MPI version, prefix and any associated envvars.""" # Do the bundle prepare step to ensure any deps are loaded (no need to worry about licences for Intel MPI) Bundle.prepare_step(self, *args, **kwargs) # Prepare additional parameters: determine system MPI version, prefix and any associated envvars. mpi_name = self.cfg['name'].lower() # Determine MPI wrapper path (real path, with resolved symlinks) to ensure it exists if mpi_name == 'impi': # For impi the version information is only found in *some* of the wrappers it ships, in particular it is # not in mpicc mpi_c_wrapper = 'mpiicc' path_to_mpi_c_wrapper = which(mpi_c_wrapper) if not path_to_mpi_c_wrapper: mpi_c_wrapper = 'mpigcc' path_to_mpi_c_wrapper = which(mpi_c_wrapper) if not path_to_mpi_c_wrapper: raise EasyBuildError("Could not find suitable MPI wrapper to extract version for impi") else: mpi_c_wrapper = 'mpicc' path_to_mpi_c_wrapper = which(mpi_c_wrapper) if path_to_mpi_c_wrapper: path_to_mpi_c_wrapper = resolve_path(path_to_mpi_c_wrapper) self.log.info("Found path to MPI implementation '%s' %s compiler (with symlinks resolved): %s", mpi_name, mpi_c_wrapper, path_to_mpi_c_wrapper) else: raise EasyBuildError("%s not found in $PATH", mpi_c_wrapper) # Determine MPI version, installation prefix and underlying compiler if mpi_name in ('openmpi', 'spectrummpi'): # Spectrum MPI is based on Open MPI so is also covered by this logic output_of_ompi_info, _ = run_cmd("ompi_info", simple=False) # Extract the version of the MPI implementation if mpi_name == 'spectrummpi': mpi_version_string = 'Spectrum MPI' else: mpi_version_string = 'Open MPI' self.mpi_version = self.extract_ompi_setting(mpi_version_string, output_of_ompi_info) # Extract the installation prefix self.mpi_prefix = self.extract_ompi_setting("Prefix", output_of_ompi_info) # Extract any OpenMPI environment variables in the current environment and ensure they are added to the # final module self.mpi_env_vars = dict((key, value) for key, value in os.environ.iteritems() if key.startswith("OMPI_")) # Extract the C compiler used underneath the MPI implementation, check for the definition of OMPI_MPICC self.mpi_c_compiler = self.extract_ompi_setting("C compiler", output_of_ompi_info) elif mpi_name == 'impi': # Extract the version of IntelMPI # The prefix in the the mpiicc (or mpigcc) script can be used to extract the explicit version contents_of_mpixcc = read_file(path_to_mpi_c_wrapper) prefix_regex = re.compile(r'(?<=compilers_and_libraries_)(.*)(?=/linux/mpi)', re.M) self.mpi_version = None res = prefix_regex.search(contents_of_mpixcc) if res: self.mpi_version = res.group(1) else: # old iimpi version prefix_regex = re.compile(r'^prefix=(.*)$', re.M) res = prefix_regex.search(contents_of_mpixcc) if res: self.mpi_version = res.group(1).split('/')[-1] if self.mpi_version is None: raise EasyBuildError("No version found for system Intel MPI") else: self.log.info("Found Intel MPI version %s for system MPI" % self.mpi_version) # Extract the installation prefix, if I_MPI_ROOT is defined, let's use that i_mpi_root = os.environ.get('I_MPI_ROOT') if i_mpi_root: self.mpi_prefix = i_mpi_root else: # Else just go up three directories from where mpiicc is found # (it's 3 because bin64 is a symlink to intel64/bin and we are assuming 64 bit) self.mpi_prefix = os.path.dirname(os.path.dirname(os.path.dirname(path_to_mpi_c_wrapper))) # Extract any IntelMPI environment variables in the current environment and ensure they are added to the # final module self.mpi_env_vars = {} for key, value in os.environ.iteritems(): i_mpi_key = key.startswith('I_MPI_') or key.startswith('MPICH_') mpi_profile_key = key.startswith('MPI') and key.endswith('PROFILE') if i_mpi_key or mpi_profile_key: self.mpi_env_vars[key] = value # Extract the C compiler used underneath Intel MPI compile_info, exit_code = run_cmd("%s -compile-info" % mpi_c_wrapper, simple=False) if exit_code == 0: self.mpi_c_compiler = compile_info.split(' ', 1)[0] else: raise EasyBuildError("Could not determine C compiler underneath Intel MPI, '%s -compiler-info' " "returned %s", mpi_c_wrapper, compile_info) else: raise EasyBuildError("Unrecognised system MPI implementation %s", mpi_name) # Ensure install path of system MPI actually exists if not os.path.exists(self.mpi_prefix): raise EasyBuildError("Path derived for system MPI (%s) does not exist: %s!", mpi_name, self.mpi_prefix) self.log.debug("Derived version/install prefix for system MPI %s: %s, %s", mpi_name, self.mpi_version, self.mpi_prefix) # For the version of the underlying C compiler need to explicitly extract (to be certain) self.c_compiler_version = extract_compiler_version(self.mpi_c_compiler) self.log.debug("Derived compiler/version for C compiler underneath system MPI %s: %s, %s", mpi_name, self.mpi_c_compiler, self.c_compiler_version) # If EasyConfig specified "real" version (not 'system' which means 'derive automatically'), check it if self.cfg['version'] == 'system': self.log.info("Found specified version '%s', going with derived MPI version '%s'", self.cfg['version'], self.mpi_version) elif self.cfg['version'] == self.mpi_version: self.log.info("Specified MPI version %s matches found version" % self.mpi_version) else: raise EasyBuildError("Specified version (%s) does not match version reported by MPI (%s)", self.cfg['version'], self.mpi_version)