def test_remove_file(self): """Test remove_file""" testfile = os.path.join(self.test_prefix, 'foo') ft.write_file(testfile, 'bar') self.assertTrue(os.path.exists(testfile)) ft.remove_file(testfile) ft.write_file(testfile, 'bar') ft.adjust_permissions(self.test_prefix, stat.S_IWUSR|stat.S_IWGRP|stat.S_IWOTH, add=False) self.assertErrorRegex(EasyBuildError, "Failed to remove", ft.remove_file, testfile) # also test behaviour of remove_file under --dry-run build_options = { 'extended_dry_run': True, 'silent': False, } init_config(build_options=build_options) self.mock_stdout(True) ft.remove_file(testfile) txt = self.get_stdout() self.mock_stdout(False) regex = re.compile("^file [^ ]* removed$") self.assertTrue(regex.match(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt))
def test_load_hooks(self): """Test for load_hooks function.""" self.assertErrorRegex(EasyBuildError, "Specified path .* does not exist.*", load_hooks, '/no/such/hooks.py') hooks = load_hooks(self.test_hooks_pymod) self.assertEqual(len(hooks), 4) self.assertEqual(sorted(hooks.keys()), ['parse_hook', 'post_configure_hook', 'pre_install_hook', 'start_hook']) self.assertTrue(all(callable(h) for h in hooks.values())) # test caching of hooks remove_file(self.test_hooks_pymod) cached_hooks = load_hooks(self.test_hooks_pymod) self.assertTrue(cached_hooks is hooks) # hooks file can be empty empty_hooks_path = os.path.join(self.test_prefix, 'empty_hooks.py') write_file(empty_hooks_path, '') empty_hooks = load_hooks(empty_hooks_path) self.assertEqual(empty_hooks, {}) # loading another hooks file doesn't affect cached hooks prev_hooks = load_hooks(self.test_hooks_pymod) self.assertTrue(prev_hooks is hooks) # clearing cached hooks results in error because hooks file is not found easybuild.tools.hooks._cached_hooks = {} self.assertErrorRegex(EasyBuildError, "Specified path .* does not exist.*", load_hooks, self.test_hooks_pymod)
def build_singularity_image(def_path): """Build Singularity container image by calling out to 'singularity' (requires admin privileges!).""" cont_path = container_path() def_file = os.path.basename(def_path) # use --imagename if specified, otherwise derive based on filename of recipe img_name = build_option('container_image_name') if img_name is None: # definition file Singularity.<app>-<version, container name <app>-<version>.<img|simg> img_name = def_file.split('.', 1)[1] cmd_opts = '' image_format = build_option('container_image_format') # squashfs image format (default for Singularity) if image_format in [None, CONT_IMAGE_FORMAT_SQUASHFS]: img_path = os.path.join(cont_path, img_name + '.simg') # ext3 image format, creating as writable container elif image_format == CONT_IMAGE_FORMAT_EXT3: img_path = os.path.join(cont_path, img_name + '.img') cmd_opts = '--writable' # sandbox image format, creates as a directory but acts like a container elif image_format == CONT_IMAGE_FORMAT_SANDBOX: img_path = os.path.join(cont_path, img_name) cmd_opts = '--sandbox' else: raise EasyBuildError("Unknown container image format specified for Singularity: %s" % image_format) if os.path.exists(img_path): if build_option('force'): print_msg("WARNING: overwriting existing container image at %s due to --force" % img_path) remove_file(img_path) else: raise EasyBuildError("Container image already exists at %s, not overwriting it without --force", img_path) # resolve full path to 'singularity' binary, since it may not be available via $PATH under sudo... singularity = which('singularity') cmd_env = '' singularity_tmpdir = build_option('container_tmpdir') if singularity_tmpdir: cmd_env += 'SINGULARITY_TMPDIR=%s' % singularity_tmpdir cmd = ' '.join(['sudo', cmd_env, singularity, 'build', cmd_opts, img_path, def_path]) print_msg("Running '%s', you may need to enter your 'sudo' password..." % cmd) run_cmd(cmd, stream_output=True) print_msg("Singularity image created at %s" % img_path, log=_log)
def test_end2end_dockerfile(self): test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') toy_ec = os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb') containerpath = os.path.join(self.test_prefix, 'containers') os.environ['EASYBUILD_CONTAINERPATH'] = containerpath # --containerpath must be an existing directory (this is done to avoid misconfiguration) mkdir(containerpath) base_args = [ toy_ec, '--containerize', '--container-type=docker', '--experimental', ] error_pattern = "Unsupported container base image 'not-supported'" self.assertErrorRegex(EasyBuildError, error_pattern, self.run_main, base_args + ['--container-base=not-supported'], raise_error=True) for cont_base in ['ubuntu:16.04', 'centos:7']: stdout, stderr = self.run_main(base_args + ['--container-base=%s' % cont_base]) self.assertFalse(stderr) regexs = ["^== Dockerfile definition file created at %s/containers/Dockerfile.toy-0.0" % self.test_prefix] self.check_regexs(regexs, stdout) remove_file(os.path.join(self.test_prefix, 'containers', 'Dockerfile.toy-0.0')) self.run_main(base_args + ['--container-base=centos:7']) error_pattern = "Container recipe at %s/containers/Dockerfile.toy-0.0 already exists, " \ "not overwriting it without --force" % self.test_prefix self.assertErrorRegex(EasyBuildError, error_pattern, self.run_main, base_args + ['--container-base=centos:7'], raise_error=True) remove_file(os.path.join(self.test_prefix, 'containers', 'Dockerfile.toy-0.0')) base_args.insert(1, os.path.join(test_ecs, 'g', 'GCC', 'GCC-4.9.2.eb')) self.run_main(base_args + ['--container-base=ubuntu:16.04']) def_file = read_file(os.path.join(self.test_prefix, 'containers', 'Dockerfile.toy-0.0')) regexs = [ "FROM ubuntu:16.04", "eb toy-0.0.eb GCC-4.9.2.eb", "module load toy/0.0 GCC/4.9.2", ] self.check_regexs(regexs, def_file)
def clean_home_subdir(self): """Remove contents of (local) 'intel' directory home subdir, where stuff is cached.""" self.log.debug("Cleaning up %s..." % self.home_subdir_local) try: for tree in os.listdir(self.home_subdir_local): self.log.debug("... removing %s subtree" % tree) path = os.path.join(self.home_subdir_local, tree) if os.path.isfile(path) or os.path.islink(path): remove_file(path) else: shutil.rmtree(path) except OSError, err: raise EasyBuildError("Cleaning up intel dir %s failed: %s", self.home_subdir_local, err)
def test_dep_graph(self): """Unit test that builds a full dependency graph.""" # pygraph dependencies required for constructing dependency graph are not available prior to Python 2.6 if LooseVersion(sys.version) >= LooseVersion('2.6') and single_tests_ok: # temporary file for dep graph (hn, fn) = tempfile.mkstemp(suffix='.dot') os.close(hn) if self.ordered_specs is None: self.process_all_easyconfigs() dep_graph(fn, self.ordered_specs) remove_file(fn) else: print "(skipped dep graph test)"
def obtain_config_guess(self, download_source_path=None, search_source_paths=None): """ Locate or download an up-to-date config.guess for use with ConfigureMake :param download_source_path: Path to download config.guess to :param search_source_paths: Paths to search for config.guess :return: Path to config.guess or None """ eb_source_paths = source_paths() if download_source_path is None: download_source_path = eb_source_paths[0] if search_source_paths is None: search_source_paths = eb_source_paths config_guess = 'config.guess' sourcepath_subdir = os.path.join('generic', 'eb_v%s' % EASYBLOCKS_VERSION, 'ConfigureMake') config_guess_path = None # check if config.guess has already been downloaded to source path for path in eb_source_paths: cand_config_guess_path = os.path.join(path, sourcepath_subdir, config_guess) if os.path.isfile(cand_config_guess_path): config_guess_path = cand_config_guess_path self.log.info("Found recent %s at %s, using it if required", config_guess, config_guess_path) break # if not found, try to download it if config_guess_path is None: cand_config_guess_path = os.path.join(download_source_path, sourcepath_subdir, config_guess) config_guess_url = CONFIG_GUESS_URL_STUB + CONFIG_GUESS_COMMIT_ID downloaded_path = download_file(config_guess, config_guess_url, cand_config_guess_path) if downloaded_path is not None: # verify SHA256 checksum of download to avoid using a corrupted download if verify_checksum(downloaded_path, CONFIG_GUESS_SHA256): config_guess_path = downloaded_path # add execute permissions adjust_permissions(downloaded_path, stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH, add=True) self.log.info("Downloaded recent %s to %s, using it if required", config_guess, config_guess_path) else: self.log.warning("Checksum failed for downloaded file %s, not using it!", downloaded_path) remove_file(downloaded_path) else: self.log.warning("Failed to download recent %s to %s for use with ConfigureMake easyblock (if needed)", config_guess, cand_config_guess_path) return config_guess_path
def set_as_default(self, module_folder_path, module_version): """ Create a symlink named 'default' inside the package's module folder in order to set the default module version :param module_folder_path: module folder path, e.g. $HOME/easybuild/modules/all/Bison :param module_version: module version, e.g. 3.0.4 """ default_filepath = os.path.join(module_folder_path, 'default') if os.path.islink(default_filepath): link_target = resolve_path(default_filepath) remove_file(default_filepath) self.log.info("Removed default version marking from %s.", link_target) elif os.path.exists(default_filepath): raise EasyBuildError('Found an unexpected file named default in dir %s' % module_folder_path) symlink(module_version + self.MODULE_FILE_EXTENSION, default_filepath, use_abspath_source=False) self.log.info("Module default version file written to point to %s", default_filepath)
def install_step(self): """Install by copying files and creating group library file.""" self.log.debug("Installing SCOTCH by copying files") for subdir in ['bin', 'include', 'lib', 'man']: copy_dir(os.path.join(self.cfg['start_dir'], subdir), os.path.join(self.installdir, subdir)) # remove metis.h and parmetis.h include files, since those can only cause trouble for header in ['metis.h', 'parmetis.h']: remove_file(os.path.join(self.installdir, 'include', header)) # create group library file scotchlibdir = os.path.join(self.installdir, 'lib') scotchgrouplib = os.path.join(scotchlibdir, 'libscotch_group.a') line = "GROUP (%s)" % ' '.join(os.listdir(scotchlibdir)) write_file(scotchgrouplib, line) self.log.info("Successfully written group lib file: %s", scotchgrouplib)
def move_after_install(self): """Move installed files to correct location after installation.""" subdir = os.path.join(self.installdir, self.name, self.version) self.log.debug("Moving contents of %s to %s" % (subdir, self.installdir)) try: # remove senseless symlinks, e.g. impi_5.0.1 and impi_latest majver = '.'.join(self.version.split('.')[:-1]) for symlink in ['%s_%s' % (self.name, majver), '%s_latest' % self.name]: symlink_fp = os.path.join(self.installdir, symlink) if os.path.exists(symlink_fp): remove_file(symlink_fp) # move contents of 'impi/<version>' dir to installdir for fil in os.listdir(subdir): source = os.path.join(subdir, fil) target = os.path.join(self.installdir, fil) self.log.debug("Moving %s to %s" % (source, target)) shutil.move(source, target) shutil.rmtree(os.path.join(self.installdir, self.name)) except OSError, err: raise EasyBuildError("Failed to move contents of %s to %s: %s", subdir, self.installdir, err)
def run_test(): """Run a single test and check for success.""" # regex to check for successful test run re_success = re.compile("SUCCESS COMPLETE WRF") # run test run_cmd(test_cmd, log_all=True, simple=True) # check for success txt = read_file('rsl.error.0000') if re_success.search(txt): self.log.info("Test %s ran successfully." % test) else: raise EasyBuildError("Test %s failed, pattern '%s' not found.", test, re_success.pattern) # clean up stuff that gets in the way fn_prefs = ["wrfinput_", "namelist.output", "wrfout_", "rsl.out.", "rsl.error."] for filename in os.listdir('.'): for pref in fn_prefs: if filename.startswith(pref): remove_file(filename) self.log.debug("Cleaned up file %s", filename)
def setup_local_home_subdir(self): """ Intel script use $HOME/intel to cache stuff. To enable parallel builds, we symlink $HOME/intel to a temporary dir on the local disk.""" try: # make sure local directory exists if not os.path.exists(self.home_subdir_local): os.makedirs(self.home_subdir_local) self.log.debug("Created local dir %s" % self.home_subdir_local) if os.path.exists(self.home_subdir): # if 'intel' dir in $HOME already exists, make sure it's the right symlink symlink_ok = os.path.islink(self.home_subdir) and os.path.samefile(self.home_subdir, self.home_subdir_local) if not symlink_ok: # rename current 'intel' dir home_intel_bk = tempfile.mkdtemp(dir=os.path.dirname(self.home_subdir), prefix='%s.bk.' % os.path.basename(self.home_subdir)) self.log.info("Moving %(ih)s to %(ihl)s, I need %(ih)s myself..." % {'ih': self.home_subdir, 'ihl': home_intel_bk}) shutil.move(self.home_subdir, home_intel_bk) # set symlink in place os.symlink(self.home_subdir_local, self.home_subdir) self.log.debug("Created symlink (1) %s to %s" % (self.home_subdir, self.home_subdir_local)) else: # if a broken symlink is present, remove it first if os.path.islink(self.home_subdir): remove_file(self.home_subdir) os.symlink(self.home_subdir_local, self.home_subdir) self.log.debug("Created symlink (2) %s to %s" % (self.home_subdir, self.home_subdir_local)) except OSError, err: raise EasyBuildError("Failed to symlink %s to %s: %s", self.home_subdir_local, self.home_subdir, err)
def configure_step(self): """Custom configuration procedure for NWChem.""" # check whether a (valid) symlink to a .nwchemrc config file exists (via a dummy file if necessary) # fail early if the link is not what's we expect, since running the test cases will likely fail in this case try: if os.path.exists(self.home_nwchemrc) or os.path.islink(self.home_nwchemrc): # create a dummy file to check symlink if not os.path.exists(self.local_nwchemrc): write_file(self.local_nwchemrc, 'dummy') self.log.debug("Contents of %s: %s", os.path.dirname(self.local_nwchemrc), os.listdir(os.path.dirname(self.local_nwchemrc))) if os.path.islink(self.home_nwchemrc): home_nwchemrc_target = os.readlink(self.home_nwchemrc) if home_nwchemrc_target != self.local_nwchemrc: raise EasyBuildError("Found %s, but it's not a symlink to %s. " "Please (re)move %s while installing NWChem; it can be restored later", self.home_nwchemrc, self.local_nwchemrc, self.home_nwchemrc) # ok to remove, we'll recreate it anyway remove_file(self.local_nwchemrc) except (IOError, OSError), err: raise EasyBuildError("Failed to validate %s symlink: %s", self.home_nwchemrc, err)
def install_step(self): """Install CUDA using Perl install script.""" # define how to run the installer # script has /usr/bin/perl hardcoded, but we want to have control over which perl is being used if LooseVersion(self.version) <= LooseVersion("5"): install_interpreter = "perl" install_script = "install-linux.pl" self.cfg.update('installopts', '--prefix=%s' % self.installdir) elif LooseVersion(self.version) > LooseVersion("5") and LooseVersion( self.version) < LooseVersion("10.1"): install_interpreter = "perl" install_script = "cuda-installer.pl" # note: also including samples (via "-samplespath=%(installdir)s -samples") would require libglut self.cfg.update( 'installopts', "-verbose -silent -toolkitpath=%s -toolkit" % self.installdir) else: install_interpreter = "" install_script = "./cuda-installer" # note: also including samples (via "-samplespath=%(installdir)s -samples") would require libglut self.cfg.update( 'installopts', "--silent --toolkit --toolkitpath=%s --defaultroot=%s" % (self.installdir, self.installdir)) if LooseVersion("10.0") < LooseVersion(self.version) < LooseVersion( "10.2") and get_cpu_architecture() == POWER: # Workaround for # https://devtalk.nvidia.com/default/topic/1063995/cuda-setup-and-installation/cuda-10-1-243-10-1-update-2-ppc64le-run-file-installation-issue/ install_script = " && ".join([ "mkdir -p %(installdir)s/targets/ppc64le-linux/include", "([ -e %(installdir)s/include ] || ln -s targets/ppc64le-linux/include %(installdir)s/include)", "cp -r %(builddir)s/builds/cublas/src %(installdir)s/.", install_script ]) % { 'installdir': self.installdir, 'builddir': self.builddir } # Use C locale to avoid localized questions and crash on CUDA 10.1 self.cfg.update('preinstallopts', "export LANG=C && ") cmd = "%(preinstallopts)s %(interpreter)s %(script)s %(installopts)s" % { 'preinstallopts': self.cfg['preinstallopts'], 'interpreter': install_interpreter, 'script': install_script, 'installopts': self.cfg['installopts'] } # prepare for running install script autonomously qanda = {} stdqa = { # this question is only asked if CUDA tools are already available system-wide r"Would you like to remove all CUDA files under .*? (yes/no/abort): ": "no", } noqanda = [ r"^Configuring", r"Installation Complete", r"Verifying archive integrity.*", r"^Uncompressing NVIDIA CUDA", r".* -> .*", ] # patch install script to handle Q&A autonomously if install_interpreter == "perl": patch_perl_script_autoflush( os.path.join(self.builddir, install_script)) # make sure $DISPLAY is not defined, which may lead to (weird) problems # this is workaround for not being able to specify --nox11 to the Perl install scripts if 'DISPLAY' in os.environ: os.environ.pop('DISPLAY') # cuda-installer creates /tmp/cuda-installer.log (ignoring TMPDIR) # Try to remove it before running the installer. # This will fail with a usable error if it can't be removed # instead of segfaulting in the cuda-installer. remove_file('/tmp/cuda-installer.log') # overriding maxhits default value to 1000 (seconds to wait for nothing to change in the output # without seeing a known question) run_cmd_qa(cmd, qanda, std_qa=stdqa, no_qa=noqanda, log_all=True, simple=True, maxhits=1000) # Remove the cuda-installer log file remove_file('/tmp/cuda-installer.log') # check if there are patches to apply if len(self.src) > 1: for patch in self.src[1:]: self.log.debug("Running patch %s", patch['name']) run_cmd("/bin/sh " + patch['path'] + " --accept-eula --silent --installdir=" + self.installdir)
def build_image(self, recipe_path): """Build container image by calling out to 'sudo singularity build'.""" cont_path = container_path() def_file = os.path.basename(recipe_path) # use --imagename if specified, otherwise derive based on filename of recipe img_name = self.img_name if img_name is None: # definition file Singularity.<app>-<version, container name <app>-<version>.<img|simg> img_name = def_file.split('.', 1)[1] cmd_opts = '' image_format = self.image_format singularity_version = self.singularity_version() # squashfs image format (default for Singularity) if image_format in [ None, CONT_IMAGE_FORMAT_SQUASHFS, CONT_IMAGE_FORMAT_SIF ]: if LooseVersion(singularity_version) > LooseVersion('3.0'): ext = '.sif' else: ext = '.simg' img_path = os.path.join(cont_path, img_name + ext) # ext3 image format, creating as writable container elif image_format == CONT_IMAGE_FORMAT_EXT3: if LooseVersion(singularity_version) > LooseVersion('3.0'): raise EasyBuildError( "ext3 image format is only supported with Singularity 2.x (found Singularity %s)", singularity_version) else: img_path = os.path.join(cont_path, img_name + '.img') cmd_opts = '--writable' # sandbox image format, creates as a directory but acts like a container elif image_format == CONT_IMAGE_FORMAT_SANDBOX: img_path = os.path.join(cont_path, img_name) cmd_opts = '--sandbox' else: raise EasyBuildError( "Unknown container image format specified for Singularity: %s" % image_format) if os.path.exists(img_path): if build_option('force'): print_msg( "WARNING: overwriting existing container image at %s due to --force" % img_path) remove_file(img_path) else: raise EasyBuildError( "Container image already exists at %s, not overwriting it without --force", img_path) # resolve full path to 'singularity' binary, since it may not be available via $PATH under sudo... singularity = which('singularity') cmd_env = '' singularity_tmpdir = self.tmpdir if singularity_tmpdir: cmd_env += 'SINGULARITY_TMPDIR=%s' % singularity_tmpdir cmd = ' '.join([ 'sudo', cmd_env, singularity, 'build', cmd_opts, img_path, recipe_path ]) print_msg( "Running '%s', you may need to enter your 'sudo' password..." % cmd) run_cmd(cmd, stream_output=True) print_msg("Singularity image created at %s" % img_path, log=self.log)
def test_end2end_singularity_recipe(self): """End-to-end test for --containerize (recipe only).""" test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') toy_ec = os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb') containerpath = os.path.join(self.test_prefix, 'containers') os.environ['EASYBUILD_CONTAINERPATH'] = containerpath # --containerpath must be an existing directory (this is done to avoid misconfiguration) mkdir(containerpath) args = [ toy_ec, '--containerize', '--experimental', ] error_pattern = "--container-base must be specified" self.assertErrorRegex(EasyBuildError, error_pattern, self.run_main, args, raise_error=True) # generating Singularity definition file with 'docker' or 'shub' bootstrap agents always works, # i.e. image label is not verified, image tag can be anything for cont_base in [ 'docker:test123', 'docker:test123:foo', 'shub:test123', 'shub:test123:foo' ]: stdout, stderr = self.run_main(args + ['--container-base=%s' % cont_base]) self.assertFalse(stderr) regexs = [ "^== Singularity definition file created at %s/containers/Singularity.toy-0.0" % self.test_prefix ] self.check_regexs(regexs, stdout) remove_file( os.path.join(self.test_prefix, 'containers', 'Singularity.toy-0.0')) args.append("--container-base=shub:test123") self.run_main(args) # existing definition file is not overwritten without use of --force error_pattern = "Container recipe at .* already exists, not overwriting it without --force" self.assertErrorRegex(EasyBuildError, error_pattern, self.run_main, args, raise_error=True) stdout, stderr = self.run_main(args + ['--force']) self.assertFalse(stderr) regexs = [ "^== WARNING: overwriting existing container recipe at .* due to --force", "^== Singularity definition file created at %s/containers/Singularity.toy-0.0" % self.test_prefix, ] self.check_regexs(regexs, stdout) remove_file( os.path.join(self.test_prefix, 'containers', 'Singularity.toy-0.0')) # add another easyconfig file to check if multiple easyconfigs are handled correctly args.insert(1, os.path.join(test_ecs, 'g', 'GCC', 'GCC-4.9.2.eb')) # with 'localimage' bootstrap agent, specified image must exist test_img = os.path.join(self.test_prefix, 'test123.img') args[-1] = "--container-base=localimage:%s" % test_img error_pattern = "Singularity base image at specified path does not exist" self.assertErrorRegex(EasyBuildError, error_pattern, self.run_main, args, raise_error=True) write_file(test_img, '') stdout, stderr = self.run_main(args) self.assertFalse(stderr) regexs = [ "^== Singularity definition file created at %s/containers/Singularity.toy-0.0" % self.test_prefix ] self.check_regexs(regexs, stdout) # check contents of generated recipe def_file = read_file( os.path.join(self.test_prefix, 'containers', 'Singularity.toy-0.0')) regexs = [ "^Bootstrap: localimage$", "^From: %s$" % test_img, "^eb toy-0.0.eb GCC-4.9.2.eb", "module load toy/0.0 GCC/4.9.2$", ] self.check_regexs(regexs, def_file) # image extension must make sense when localimage is used for img_name in ['test123.foo', 'test123']: test_img = os.path.join(self.test_prefix, img_name) args[-1] = "--container-base=localimage:%s" % test_img write_file(test_img, '') error_pattern = "Invalid image extension '.*' must be \.img or \.simg" self.assertErrorRegex(EasyBuildError, error_pattern, self.run_main, args, raise_error=True)
def test_end2end_singularity_image(self): """End-to-end test for --containerize (recipe + image).""" topdir = os.path.dirname(os.path.abspath(__file__)) toy_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb') containerpath = os.path.join(self.test_prefix, 'containers') os.environ['EASYBUILD_CONTAINERPATH'] = containerpath # --containerpath must be an existing directory (this is done to avoid misconfiguration) mkdir(containerpath) test_img = os.path.join(self.test_prefix, 'test123.img') write_file(test_img, '') args = [ toy_ec, '-C', # equivalent with --containerize '--experimental', '--container-config=bootstrap=localimage,from=%s' % test_img, '--container-build-image', ] if which('singularity') is None: error_pattern = "singularity with version 2.4 or higher not found on your system." self.assertErrorRegex(EasyBuildError, error_pattern, self.eb_main, args, raise_error=True) # install mocked versions of 'sudo' and 'singularity' commands singularity = os.path.join(self.test_prefix, 'bin', 'singularity') write_file(singularity, '') # placeholder adjust_permissions(singularity, stat.S_IXUSR, add=True) sudo = os.path.join(self.test_prefix, 'bin', 'sudo') write_file( sudo, '#!/bin/bash\necho "running command \'$@\' with sudo..."\neval "$@"\n' ) adjust_permissions(sudo, stat.S_IXUSR, add=True) os.environ['PATH'] = os.path.pathsep.join( [os.path.join(self.test_prefix, 'bin'), os.getenv('PATH')]) for (version, ext) in [('2.4.0', 'simg'), ('3.1.0', 'sif')]: write_file(singularity, MOCKED_SINGULARITY % {'version': version}) stdout, stderr = self.run_main(args) self.assertFalse(stderr) regexs = [ r"^== singularity tool found at %s/bin/singularity" % self.test_prefix, r"^== singularity version '%s' is 2.4 or higher ... OK" % version, r"^== Singularity definition file created at %s/containers/Singularity\.toy-0.0" % self.test_prefix, r"^== Running 'sudo\s*\S*/singularity build\s*/.* /.*', you may need to enter your 'sudo' password...", r"^== Singularity image created at %s/containers/toy-0.0\.%s" % (self.test_prefix, ext), ] self.check_regexs(regexs, stdout) self.assertTrue( os.path.exists(os.path.join(containerpath, 'toy-0.0.%s' % ext))) remove_file(os.path.join(containerpath, 'Singularity.toy-0.0')) # check use of --container-image-format & --container-image-name write_file(singularity, MOCKED_SINGULARITY % {'version': '2.4.0'}) args.extend([ "--container-image-format=ext3", "--container-image-name=foo-bar", ]) stdout, stderr = self.run_main(args) self.assertFalse(stderr) regexs = [ r"^== singularity tool found at %s/bin/singularity" % self.test_prefix, r"^== singularity version '2.4.0' is 2.4 or higher ... OK", r"^== Singularity definition file created at %s/containers/Singularity\.foo-bar" % self.test_prefix, r"^== Running 'sudo\s*\S*/singularity build --writable /.* /.*', you may need to enter .*", r"^== Singularity image created at %s/containers/foo-bar\.img$" % self.test_prefix, ] self.check_regexs(regexs, stdout) cont_img = os.path.join(containerpath, 'foo-bar.img') self.assertTrue(os.path.exists(cont_img)) remove_file(os.path.join(containerpath, 'Singularity.foo-bar')) # test again with container image already existing error_pattern = "Container image already exists at %s, not overwriting it without --force" % cont_img self.mock_stdout(True) self.assertErrorRegex(EasyBuildError, error_pattern, self.run_main, args, raise_error=True) self.mock_stdout(False) args.append('--force') stdout, stderr = self.run_main(args) self.assertFalse(stderr) regexs.extend([ "WARNING: overwriting existing container image at %s due to --force" % cont_img, ]) self.check_regexs(regexs, stdout) self.assertTrue(os.path.exists(cont_img)) # also check behaviour under --extended-dry-run args.append('--extended-dry-run') stdout, stderr = self.run_main(args) self.assertFalse(stderr) self.check_regexs(regexs, stdout) # test use of --container-tmpdir args.append('--container-tmpdir=%s' % self.test_prefix) stdout, stderr = self.run_main(args) self.assertFalse(stderr) regexs[ -3] = "^== Running 'sudo\s*SINGULARITY_TMPDIR=%s \S*/singularity build .*" % self.test_prefix self.check_regexs(regexs, stdout)
def test_step(self): """Build and run tests included in the WRF distribution.""" if self.cfg['runtest']: if self.cfg[ 'buildtype'] in self.parallel_build_types and not build_option( 'mpi_tests'): self.log.info( "Skipping testing of WRF with build type '%s' since MPI testing is disabled", self.cfg['buildtype']) return # get list of WRF test cases self.testcases = [] if os.path.exists('test'): self.testcases = os.listdir('test') elif not self.dry_run: raise EasyBuildError( "Test directory not found, failed to determine list of test cases" ) # exclude 2d testcases in parallel WRF builds if self.cfg['buildtype'] in self.parallel_build_types: self.testcases = [ test for test in self.testcases if '2d_' not in test ] # exclude real testcases self.testcases = [ test for test in self.testcases if not test.endswith("_real") ] self.log.debug("intermediate list of testcases: %s" % self.testcases) # exclude tests that should not be run for test in ["em_esmf_exp", "em_scm_xy", "nmm_tropical_cyclone"]: if test in self.testcases: self.testcases.remove(test) # some tests hang when WRF is built with Intel compilers if self.comp_fam == toolchain.INTELCOMP: # @UndefinedVariable for test in ["em_heldsuarez"]: if test in self.testcases: self.testcases.remove(test) # determine parallel setting (1/2 of available processors + 1) n = self.cfg['parallel'] / 2 + 1 # prepare run command # stack limit needs to be set to unlimited for WRF to work well if self.cfg['buildtype'] in self.parallel_build_types: test_cmd = "ulimit -s unlimited && %s && %s" % ( self.toolchain.mpi_cmd_for("./ideal.exe", 1), self.toolchain.mpi_cmd_for("./wrf.exe", n)) else: test_cmd = "ulimit -s unlimited && ./ideal.exe && ./wrf.exe" % n def run_test(): """Run a single test and check for success.""" # regex to check for successful test run re_success = re.compile("SUCCESS COMPLETE WRF") # run test run_cmd(test_cmd, log_all=True, simple=True) # check for success txt = read_file('rsl.error.0000') if re_success.search(txt): self.log.info("Test %s ran successfully." % test) else: raise EasyBuildError( "Test %s failed, pattern '%s' not found.", test, re_success.pattern) # clean up stuff that gets in the way fn_prefs = [ "wrfinput_", "namelist.output", "wrfout_", "rsl.out.", "rsl.error." ] for filename in os.listdir('.'): for pref in fn_prefs: if filename.startswith(pref): remove_file(filename) self.log.debug("Cleaned up file %s", filename) # build and run each test case individually for test in self.testcases: self.log.debug("Building and running test %s" % test) # build and install cmd = "tcsh ./compile %s %s" % (self.par, test) run_cmd(cmd, log_all=True, simple=True) # run test try: prev_dir = change_dir('run') if test in ["em_fire"]: # handle tests with subtests seperately testdir = os.path.join("..", "test", test) for subtest in [ x for x in os.listdir(testdir) if os.path.isdir(x) ]: subtestdir = os.path.join(testdir, subtest) # link required files for filename in os.listdir(subtestdir): if os.path.exists(filename): remove_file(filename) symlink(os.path.join(subtestdir, filename), filename) # run test run_test() else: # run test run_test() change_dir(prev_dir) except OSError as err: raise EasyBuildError( "An error occured when running test %s: %s", test, err)
def build_step(self): """Build ScaLAPACK using make after setting make options.""" # MPI compiler commands known_mpi_libs = [toolchain.MPICH, toolchain.MPICH2, toolchain.MVAPICH2] #@UndefinedVariable known_mpi_libs += [toolchain.OPENMPI, toolchain.QLOGICMPI] #@UndefinedVariable known_mpi_libs += [toolchain.INTELMPI] #@UndefinedVariable if os.getenv('MPICC') and os.getenv('MPIF77') and os.getenv('MPIF90'): mpicc = os.getenv('MPICC') mpif77 = os.getenv('MPIF77') mpif90 = os.getenv('MPIF90') elif self.toolchain.mpi_family() in known_mpi_libs: mpicc = 'mpicc' mpif77 = 'mpif77' mpif90 = 'mpif90' else: raise EasyBuildError("Don't know which compiler commands to use.") # determine build options BLAS and LAPACK libs extra_makeopts = [] acml = get_software_root(Acml.LAPACK_MODULE_NAME[0]) lapack = get_software_root(Lapack.LAPACK_MODULE_NAME[0]) openblas = get_software_root(OpenBLAS.LAPACK_MODULE_NAME[0]) intelmkl = get_software_root(IntelMKL.LAPACK_MODULE_NAME[0]) if lapack: extra_makeopts.append('LAPACKLIB=%s' % os.path.join(lapack, 'lib', 'liblapack.a')) for blas in [Atlas, Blis, GotoBLAS]: blas_root = get_software_root(blas.BLAS_MODULE_NAME[0]) if blas_root: blas_libs = ' '.join(['-l%s' % lib for lib in blas.BLAS_LIB]) extra_makeopts.append('BLASLIB="-L%s %s -lpthread"' % (os.path.join(blas_root, 'lib'), blas_libs)) break if not blas_root: raise EasyBuildError("Failed to find a known BLAS library, don't know how to define 'BLASLIB'") elif acml: acml_base_dir = os.getenv('ACML_BASEDIR', 'NO_ACML_BASEDIR') acml_static_lib = os.path.join(acml, acml_base_dir, 'lib', 'libacml.a') extra_makeopts.extend([ 'BLASLIB="%s -lpthread"' % acml_static_lib, 'LAPACKLIB=%s' % acml_static_lib ]) elif openblas: libdir = os.path.join(openblas, 'lib') blas_libs = ' '.join(['-l%s' % lib for lib in OpenBLAS.BLAS_LIB]) extra_makeopts.extend([ 'BLASLIB="-L%s %s -lpthread"' % (libdir, blas_libs), 'LAPACKLIB="-L%s %s"' % (libdir, blas_libs), ]) elif intelmkl: libdir = os.path.join(intelmkl, 'mkl', 'lib', 'intel64') blas_libs = os.environ['LIBLAPACK'] extra_makeopts.extend([ 'BLASLIB="-L%s %s -lpthread"' % (libdir, blas_libs), 'LAPACKLIB="-L%s %s"' % (libdir, blas_libs), ]) else: raise EasyBuildError("Unknown LAPACK library used, no idea how to set BLASLIB/LAPACKLIB make options") # build procedure changed in v2.0.0 if self.loosever < LooseVersion('2.0.0'): blacs = get_software_root(Blacs.BLACS_MODULE_NAME[0]) if not blacs: raise EasyBuildError("BLACS not available, yet required for ScaLAPACK version < 2.0.0") # determine interface interface = det_interface(self.log, os.path.join(blacs, 'bin')) # set build and BLACS dir correctly extra_makeopts.append('home=%s BLACSdir=%s' % (self.cfg['start_dir'], blacs)) # set BLACS libs correctly blacs_libs = [ ('BLACSFINIT', "F77init"), ('BLACSCINIT', "Cinit"), ('BLACSLIB', "") ] for (var, lib) in blacs_libs: extra_makeopts.append('%s=%s/lib/libblacs%s.a' % (var, blacs, lib)) # set compilers and options noopt = '' if self.toolchain.options['noopt']: noopt += " -O0" if self.toolchain.options['pic']: noopt += " -fPIC" extra_makeopts += [ 'F77="%s"' % mpif77, 'CC="%s"' % mpicc, 'NOOPT="%s"' % noopt, 'CCFLAGS="-O3 %s"' % os.getenv('CFLAGS') ] # set interface extra_makeopts.append("CDEFS='-D%s -DNO_IEEE $(USEMPI)'" % interface) else: # determine interface if self.toolchain.mpi_family() in known_mpi_libs: interface = 'Add_' else: raise EasyBuildError("Don't know which interface to pick for the MPI library being used.") # set compilers and options extra_makeopts += [ 'FC="%s"' % mpif90, 'CC="%s"' % mpicc, 'CCFLAGS="%s"' % os.getenv('CFLAGS'), 'FCFLAGS="%s"' % os.getenv('FFLAGS'), ] # set interface extra_makeopts.append('CDEFS="-D%s"' % interface) # update make opts, and build_step saved_buildopts = self.cfg['buildopts'] # Only build the library first, that can be done in parallel. # Creating libscalapack.a may fail in parallel, but should work # fine with non-parallel make afterwards self.cfg.update('buildopts', 'lib') self.cfg.update('buildopts', ' '.join(extra_makeopts)) # Copied from ConfigureMake easyblock paracmd = '' if self.cfg['parallel']: paracmd = "-j %s" % self.cfg['parallel'] cmd = "%s make %s %s" % (self.cfg['prebuildopts'], paracmd, self.cfg['buildopts']) # Ignore exit code for parallel run (out, _) = run_cmd(cmd, log_ok=False, log_all=False, simple=False) # Now remake libscalapack.a serially and the tests. self.cfg['buildopts'] = saved_buildopts self.cfg.update('buildopts', ' '.join(extra_makeopts)) remove_file('libscalapack.a') self.cfg['parallel'] = 1 super(EB_ScaLAPACK, self).build_step()
def test_step(self): """Build and run tests included in the WRF distribution.""" if self.cfg['runtest']: if self.cfg[ 'buildtype'] in self.parallel_build_types and not build_option( 'mpi_tests'): self.log.info( "Skipping testing of WRF with build type '%s' since MPI testing is disabled", self.cfg['buildtype']) return # get list of WRF test cases self.testcases = [] if os.path.exists('test'): self.testcases = os.listdir('test') elif not self.dry_run: raise EasyBuildError( "Test directory not found, failed to determine list of test cases" ) # exclude 2d testcases in parallel WRF builds if self.cfg['buildtype'] in self.parallel_build_types: self.testcases = [ test for test in self.testcases if '2d_' not in test ] # exclude real testcases self.testcases = [ test for test in self.testcases if not test.endswith("_real") ] self.log.debug("intermediate list of testcases: %s" % self.testcases) # exclude tests that should not be run for test in ["em_esmf_exp", "em_scm_xy", "nmm_tropical_cyclone"]: if test in self.testcases: self.testcases.remove(test) # some tests hang when WRF is built with Intel compilers if self.comp_fam == toolchain.INTELCOMP: # @UndefinedVariable for test in ["em_heldsuarez"]: if test in self.testcases: self.testcases.remove(test) # determine number of MPI ranks to use in tests (1/2 of available processors + 1); # we need to limit max number of MPI ranks (8 is too high for some tests, 4 is OK), # since otherwise run may fail because domain size is too small n_mpi_ranks = min(self.cfg['parallel'] // 2 + 1, 4) # prepare run command # stack limit needs to be set to unlimited for WRF to work well if self.cfg['buildtype'] in self.parallel_build_types: test_cmd = "ulimit -s unlimited && %s && %s" % ( self.toolchain.mpi_cmd_for("./ideal.exe", 1), self.toolchain.mpi_cmd_for("./wrf.exe", n_mpi_ranks)) else: test_cmd = "ulimit -s unlimited && ./ideal.exe && ./wrf.exe >rsl.error.0000 2>&1" # regex to check for successful test run re_success = re.compile("SUCCESS COMPLETE WRF") def run_test(): """Run a single test and check for success.""" # run test (_, ec) = run_cmd(test_cmd, log_all=False, log_ok=False, simple=False) # read output file out_fn = 'rsl.error.0000' if os.path.exists(out_fn): out_txt = read_file(out_fn) else: out_txt = 'FILE NOT FOUND' if ec == 0: # exit code zero suggests success, but let's make sure... if re_success.search(out_txt): self.log.info( "Test %s ran successfully (found '%s' in %s)", test, re_success.pattern, out_fn) else: raise EasyBuildError( "Test %s failed, pattern '%s' not found in %s: %s", test, re_success.pattern, out_fn, out_txt) else: # non-zero exit code means trouble, show command output raise EasyBuildError( "Test %s failed with exit code %s, output: %s", test, ec, out_txt) # clean up stuff that gets in the way fn_prefs = [ "wrfinput_", "namelist.output", "wrfout_", "rsl.out.", "rsl.error." ] for filename in os.listdir('.'): for pref in fn_prefs: if filename.startswith(pref): remove_file(filename) self.log.debug("Cleaned up file %s", filename) # build and run each test case individually for test in self.testcases: self.log.debug("Building and running test %s" % test) # build and install cmd = "./compile %s %s" % (self.par, test) run_cmd(cmd, log_all=True, simple=True) # run test try: prev_dir = change_dir('run') if test in ["em_fire"]: # handle tests with subtests seperately testdir = os.path.join("..", "test", test) for subtest in [ x for x in os.listdir(testdir) if os.path.isdir(x) ]: subtestdir = os.path.join(testdir, subtest) # link required files for filename in os.listdir(subtestdir): if os.path.exists(filename): remove_file(filename) symlink(os.path.join(subtestdir, filename), filename) # run test run_test() else: # run test run_test() change_dir(prev_dir) except OSError as err: raise EasyBuildError( "An error occured when running test %s: %s", test, err)
def test_step(self): """Build and run tests included in the WRF distribution.""" if self.cfg['runtest']: if self.cfg['buildtype'] in self.parallel_build_types and not build_option('mpi_tests'): self.log.info("Skipping testing of WRF with build type '%s' since MPI testing is disabled", self.cfg['buildtype']) return # get list of WRF test cases self.testcases = [] if os.path.exists('test'): self.testcases = os.listdir('test') elif not self.dry_run: raise EasyBuildError("Test directory not found, failed to determine list of test cases") # exclude 2d testcases in parallel WRF builds if self.cfg['buildtype'] in self.parallel_build_types: self.testcases = [test for test in self.testcases if '2d_' not in test] # exclude real testcases self.testcases = [test for test in self.testcases if not test.endswith("_real")] self.log.debug("intermediate list of testcases: %s" % self.testcases) # exclude tests that should not be run for test in ["em_esmf_exp", "em_scm_xy", "nmm_tropical_cyclone"]: if test in self.testcases: self.testcases.remove(test) # some tests hang when WRF is built with Intel compilers if self.comp_fam == toolchain.INTELCOMP: # @UndefinedVariable for test in ["em_heldsuarez"]: if test in self.testcases: self.testcases.remove(test) # determine parallel setting (1/2 of available processors + 1) n = self.cfg['parallel'] / 2 + 1 # prepare run command # stack limit needs to be set to unlimited for WRF to work well if self.cfg['buildtype'] in self.parallel_build_types: test_cmd = "ulimit -s unlimited && %s && %s" % (self.toolchain.mpi_cmd_for("./ideal.exe", 1), self.toolchain.mpi_cmd_for("./wrf.exe", n)) else: test_cmd = "ulimit -s unlimited && ./ideal.exe && ./wrf.exe >rsl.error.0000 2>&1" def run_test(): """Run a single test and check for success.""" # regex to check for successful test run re_success = re.compile("SUCCESS COMPLETE WRF") # run test run_cmd(test_cmd, log_all=True, simple=True) # check for success txt = read_file('rsl.error.0000') if re_success.search(txt): self.log.info("Test %s ran successfully." % test) else: raise EasyBuildError("Test %s failed, pattern '%s' not found.", test, re_success.pattern) # clean up stuff that gets in the way fn_prefs = ["wrfinput_", "namelist.output", "wrfout_", "rsl.out.", "rsl.error."] for filename in os.listdir('.'): for pref in fn_prefs: if filename.startswith(pref): remove_file(filename) self.log.debug("Cleaned up file %s", filename) # build and run each test case individually for test in self.testcases: self.log.debug("Building and running test %s" % test) # build and install cmd = "tcsh ./compile %s %s" % (self.par, test) run_cmd(cmd, log_all=True, simple=True) # run test try: prev_dir = change_dir('run') if test in ["em_fire"]: # handle tests with subtests seperately testdir = os.path.join("..", "test", test) for subtest in [x for x in os.listdir(testdir) if os.path.isdir(x)]: subtestdir = os.path.join(testdir, subtest) # link required files for filename in os.listdir(subtestdir): if os.path.exists(filename): remove_file(filename) symlink(os.path.join(subtestdir, filename), filename) # run test run_test() else: # run test run_test() change_dir(prev_dir) except OSError as err: raise EasyBuildError("An error occured when running test %s: %s", test, err)
def install_step(self): """Install CUDA using Perl install script.""" # define how to run the installer # script has /usr/bin/perl hardcoded, but we want to have control over which perl is being used if LooseVersion(self.version) <= LooseVersion("5"): install_interpreter = "perl" install_script = "install-linux.pl" self.cfg.update('installopts', '--prefix=%s' % self.installdir) elif LooseVersion(self.version) > LooseVersion("5") and LooseVersion( self.version) < LooseVersion("10.1"): install_interpreter = "perl" install_script = "cuda-installer.pl" # note: also including samples (via "-samplespath=%(installdir)s -samples") would require libglut self.cfg.update( 'installopts', "-verbose -silent -toolkitpath=%s -toolkit" % self.installdir) else: install_interpreter = "" install_script = "./cuda-installer" # note: also including samples (via "-samplespath=%(installdir)s -samples") would require libglut self.cfg.update( 'installopts', "--silent --toolkit --toolkitpath=%s --defaultroot=%s" % (self.installdir, self.installdir)) cmd = "%(preinstallopts)s %(interpreter)s %(script)s %(installopts)s" % { 'preinstallopts': self.cfg['preinstallopts'], 'interpreter': install_interpreter, 'script': install_script, 'installopts': self.cfg['installopts'] } # prepare for running install script autonomously qanda = {} stdqa = { # this question is only asked if CUDA tools are already available system-wide r"Would you like to remove all CUDA files under .*? (yes/no/abort): ": "no", } noqanda = [ r"^Configuring", r"Installation Complete", r"Verifying archive integrity.*", r"^Uncompressing NVIDIA CUDA", r".* -> .*", ] # patch install script to handle Q&A autonomously if install_interpreter == "perl": patch_perl_script_autoflush( os.path.join(self.builddir, install_script)) # make sure $DISPLAY is not defined, which may lead to (weird) problems # this is workaround for not being able to specify --nox11 to the Perl install scripts if 'DISPLAY' in os.environ: os.environ.pop('DISPLAY') # cuda-installer creates /tmp/cuda-installer.log (ignoring TMPDIR) # Try to remove it before running the installer. # This will fail with a usable error if it can't be removed # instead of segfaulting in the cuda-installer. remove_file('/tmp/cuda-installer.log') # overriding maxhits default value to 300 (300s wait for nothing to change in the output without seeing a known # question) run_cmd_qa(cmd, qanda, std_qa=stdqa, no_qa=noqanda, log_all=True, simple=True, maxhits=300) # Remove the cuda-installer log file remove_file('/tmp/cuda-installer.log') # check if there are patches to apply if len(self.src) > 1: for patch in self.src[1:]: self.log.debug("Running patch %s", patch['name']) run_cmd("/bin/sh " + patch['path'] + " --accept-eula --silent --installdir=" + self.installdir)
def test_end2end_singularity_recipe_config(self): """End-to-end test for --containerize (recipe only), using --container-config.""" test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') toy_ec = os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb') containerpath = os.path.join(self.test_prefix, 'containers') os.environ['EASYBUILD_CONTAINERPATH'] = containerpath # --containerpath must be an existing directory (this is done to avoid misconfiguration) mkdir(containerpath) test_container_recipe = os.path.join(self.test_prefix, 'containers', 'Singularity.toy-0.0') args = [ toy_ec, '--containerize', '--experimental', ] args.extend(['--container-config', 'osversion=7.6.1810']) error_pattern = r"Keyword 'bootstrap' is required in container base config" self.assertErrorRegex(EasyBuildError, error_pattern, self.run_main, args, raise_error=True) args.extend(['--container-config', 'bootstrap=foobar']) error_pattern = r"Unknown value specified for 'bootstrap' keyword: foobar \(known: arch, busybox, debootstrap, " self.assertErrorRegex(EasyBuildError, error_pattern, self.run_main, args, raise_error=True) # default mirror URL for yum bootstrap agent uses ${OSVERSION}, so 'osversion' must be specified too args.extend(['--container-config', 'bootstrap=yum']) error_pattern = "Keyword 'osversion' is required in container base config when '%{OSVERSION}' is used" self.assertErrorRegex(EasyBuildError, error_pattern, self.run_main, args, raise_error=True) args[-1] = 'bootstrap=yum,osversion=7.6.1810' stdout, stderr = self.run_main(args, raise_error=True) txt = read_file(test_container_recipe) expected = '\n'.join([ "Bootstrap: yum", "OSVersion: 7.6.1810", "MirrorURL: http://mirror.centos.org/centos-%{OSVERSION}/%{OSVERSION}/os/x86_64/", "Include: yum", '\n', ]) self.assertTrue( txt.startswith(expected), "Container recipe starts with '%s':\n\n%s" % (expected, txt)) # when installing from scratch, a bunch of OS packages are installed too pkgs = [ 'epel-release', 'python', 'setuptools', 'Lmod', r'gcc-c\+\+', 'make', 'patch', 'tar' ] for pkg in pkgs: regex = re.compile(r"^yum install .*%s" % pkg, re.M) self.assertTrue(regex.search(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt)) pip_patterns = [ # EasyBuild is installed with pip by default "pip install easybuild", ] post_commands_patterns = [ # easybuild user is added if it doesn't exist yet r"id easybuild \|\| useradd easybuild", # /app and /scratch are created (if missing) by default r"if \[ ! -d /app \]; then mkdir -p /app", r"if \[ ! -d /scratch \]; then mkdir -p /scratch", ] eb_pattern = r"eb toy-0.0.eb --robot\s*$" for pattern in pip_patterns + post_commands_patterns + [eb_pattern]: regex = re.compile('^' + pattern, re.M) self.assertTrue(regex.search(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt)) remove_file(test_container_recipe) # can also specify a custom mirror URL args[-1] += ',mirrorurl=https://example.com' stdout, stderr = self.run_main(args, raise_error=True) txt = read_file(test_container_recipe) expected = '\n'.join([ "Bootstrap: yum", "OSVersion: 7.6.1810", "MirrorURL: https://example.com", "Include: yum", '\n', ]) self.assertTrue( txt.startswith(expected), "Container recipe starts with '%s':\n\n%s" % (expected, txt)) remove_file(test_container_recipe) # osversion is not required when %{OSVERSION} is nost used in mirror URL args[ -1] = 'bootstrap=yum,mirrorurl=https://example.com,include=test123' stdout, stderr = self.run_main(args, raise_error=True) txt = read_file(test_container_recipe) expected = '\n'.join([ "Bootstrap: yum", "MirrorURL: https://example.com", "Include: test123", '\n', ]) self.assertTrue( txt.startswith(expected), "Container recipe starts with '%s':\n\n%s" % (expected, txt)) # also check with image-based bootstrap agent, which requires 'from' test_cases = [ ('docker', 'test'), ('localimage', 'test.simg'), ('library', 'sylabsed/examples/lolcow:latest'), ('shub', 'test'), ] error_pattern = "Keyword 'from' is required in container base config when using bootstrap agent" for (bootstrap, from_spec) in test_cases: args[-1] = 'bootstrap=%s' % bootstrap self.assertErrorRegex(EasyBuildError, error_pattern, self.run_main, args, raise_error=True) args[-1] += ',from=%s' % from_spec remove_file(test_container_recipe) stdout, stderr = self.run_main(args, raise_error=True) txt = read_file(test_container_recipe) expected = '\n'.join([ "Bootstrap: %s" % bootstrap, "From: %s" % from_spec, '', ]) self.assertTrue( txt.startswith(expected), "Container recipe starts with '%s':\n\n%s" % (expected, txt)) # no OS packages are installed by default when starting from an existing image self.assertFalse("yum install" in txt) for pattern in pip_patterns + post_commands_patterns + [ eb_pattern ]: regex = re.compile('^' + pattern, re.M) self.assertTrue( regex.search(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt)) remove_file(test_container_recipe) # commands to install EasyBuild can be customized via 'eb_install' keyword args[ -1] = 'bootstrap=yum,osversion=7.6.1810,install_eb=easy_install easybuild' stdout, stderr = self.run_main(args, raise_error=True) txt = read_file(test_container_recipe) for pattern in pip_patterns: regex = re.compile('^' + pattern, re.M) self.assertFalse( regex.search(txt), "Pattern '%s' should not be found in: %s" % (regex.pattern, txt)) for pattern in ["easy_install easybuild", eb_pattern]: regex = re.compile('^' + pattern, re.M) self.assertTrue( regex.search(txt), "Pattern '%s' should be found in: %s" % (regex.pattern, txt)) remove_file(test_container_recipe) # post commands be be customized via 'post_commands' keyword args[ -1] = 'bootstrap=yum,osversion=7.6.1810,post_commands=id easybuild' stdout, stderr = self.run_main(args, raise_error=True) txt = read_file(test_container_recipe) for pattern in post_commands_patterns: regex = re.compile('^' + pattern, re.M) self.assertFalse( regex.search(txt), "Pattern '%s' should not be found in: %s" % (regex.pattern, txt)) for pattern in ["id easybuild", eb_pattern]: regex = re.compile('^' + pattern, re.M) self.assertTrue( regex.search(txt), "Pattern '%s' should be found in: %s" % (regex.pattern, txt)) remove_file(test_container_recipe) # options can be passed to 'eb' command in recipe via 'eb_args' keyword args[-1] = 'bootstrap=yum,osversion=7.6.1810,eb_args=--debug -l' stdout, stderr = self.run_main(args, raise_error=True) txt = read_file(test_container_recipe) regex = re.compile(r"^eb toy-0.0.eb --robot --debug -l", re.M) self.assertTrue( regex.search(txt), "Pattern '%s' should be found in: %s" % (regex.pattern, txt))
def test_end2end_dockerfile(self): test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') toy_ec = os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb') containerpath = os.path.join(self.test_prefix, 'containers') os.environ['EASYBUILD_CONTAINERPATH'] = containerpath # --containerpath must be an existing directory (this is done to avoid misconfiguration) mkdir(containerpath) base_args = [ toy_ec, '--containerize', '--container-type=docker', '--experimental', ] error_pattern = "Unsupported container config 'not-supported'" self.assertErrorRegex(EasyBuildError, error_pattern, self.run_main, base_args + ['--container-config=not-supported'], raise_error=True) for cont_base in ['ubuntu:16.04', 'centos:7']: stdout, stderr = self.run_main( base_args + ['--container-config=%s' % cont_base]) self.assertFalse(stderr) regexs = [ "^== Dockerfile definition file created at %s/containers/Dockerfile.toy-0.0" % self.test_prefix ] self.check_regexs(regexs, stdout) remove_file( os.path.join(self.test_prefix, 'containers', 'Dockerfile.toy-0.0')) self.run_main(base_args + ['--container-config=centos:7']) error_pattern = "Container recipe at %s/containers/Dockerfile.toy-0.0 already exists, " \ "not overwriting it without --force" % self.test_prefix self.assertErrorRegex(EasyBuildError, error_pattern, self.run_main, base_args + ['--container-config=centos:7'], raise_error=True) remove_file( os.path.join(self.test_prefix, 'containers', 'Dockerfile.toy-0.0')) base_args.insert(1, os.path.join(test_ecs, 'g', 'GCC', 'GCC-4.9.2.eb')) self.run_main(base_args + ['--container-config=ubuntu:16.04']) def_file = read_file( os.path.join(self.test_prefix, 'containers', 'Dockerfile.toy-0.0')) regexs = [ "FROM ubuntu:16.04", "eb toy-0.0.eb GCC-4.9.2.eb", "module load toy/0.0 GCC/4.9.2", ] self.check_regexs(regexs, def_file) # there should be no leading/trailing whitespace included for pattern in [r'^\s+', r'\s+$']: regex = re.compile(pattern) self.assertFalse( regex.search(def_file), "Pattern '%s' should *not* be found in: %s" % (pattern, def_file))
def obtain_config_guess(download_source_path=None, search_source_paths=None): """ Locate or download an up-to-date config.guess :param download_source_path: Path to download config.guess to :param search_source_paths: Paths to search for config.guess :return: Path to config.guess or None """ log = fancylogger.getLogger('obtain_config_guess') eb_source_paths = source_paths() if download_source_path is None: download_source_path = eb_source_paths[0] else: log.deprecated( "Specifying custom source path to download config.guess via 'download_source_path'", '5.0') if search_source_paths is None: search_source_paths = eb_source_paths else: log.deprecated( "Specifying custom location to search for updated config.guess via 'search_source_paths'", '5.0') config_guess = 'config.guess' sourcepath_subdir = os.path.join('generic', 'eb_v%s' % EASYBLOCKS_VERSION, 'ConfigureMake') config_guess_path = None # check if config.guess has already been downloaded to source path for path in search_source_paths: cand_config_guess_path = os.path.join(path, sourcepath_subdir, config_guess) if os.path.isfile(cand_config_guess_path) and check_config_guess( cand_config_guess_path): force_download = build_option('force_download') if force_download: print_warning( "Found file %s at %s, but re-downloading it anyway..." % (config_guess, cand_config_guess_path)) else: config_guess_path = cand_config_guess_path log.info("Found %s at %s", config_guess, config_guess_path) break if not config_guess_path: cand_config_guess_path = os.path.join(download_source_path, sourcepath_subdir, config_guess) config_guess_url = CONFIG_GUESS_URL_STUB + CONFIG_GUESS_COMMIT_ID if not download_file(config_guess, config_guess_url, cand_config_guess_path): print_warning("Failed to download recent %s to %s", config_guess, cand_config_guess_path, log=log) elif not check_config_guess(cand_config_guess_path): print_warning("Verification failed for file %s, not using it!", cand_config_guess_path, log=log) remove_file(cand_config_guess_path) else: config_guess_path = cand_config_guess_path adjust_permissions(config_guess_path, stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH, add=True) log.info("Verified %s at %s, using it if required", config_guess, config_guess_path) return config_guess_path
def test_end2end_singularity_recipe(self): """End-to-end test for --containerize (recipe only).""" test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') toy_ec = os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb') containerpath = os.path.join(self.test_prefix, 'containers') os.environ['EASYBUILD_CONTAINERPATH'] = containerpath # --containerpath must be an existing directory (this is done to avoid misconfiguration) mkdir(containerpath) args = [ toy_ec, '--containerize', '--experimental', ] error_pattern = "--container-base must be specified" self.assertErrorRegex(EasyBuildError, error_pattern, self.eb_main, args, raise_error=True) # generating Singularity definition file with 'docker' or 'shub' bootstrap agents always works, # i.e. image label is not verified, image tag can be anything for cont_base in ['docker:test123', 'docker:test123:foo', 'shub:test123', 'shub:test123:foo']: stdout, stderr = self.run_main(args + ['--container-base=%s' % cont_base]) self.assertFalse(stderr) regexs = ["^== Singularity definition file created at %s/containers/Singularity.toy-0.0" % self.test_prefix] self.check_regexs(regexs, stdout) remove_file(os.path.join(self.test_prefix, 'containers', 'Singularity.toy-0.0')) args.append("--container-base=shub:test123") self.run_main(args) # existing definition file is not overwritten without use of --force error_pattern = "Container recipe at .* already exists, not overwriting it without --force" self.assertErrorRegex(EasyBuildError, error_pattern, self.eb_main, args, raise_error=True) stdout, stderr = self.run_main(args + ['--force']) self.assertFalse(stderr) regexs = [ "^== WARNING: overwriting existing container recipe at .* due to --force", "^== Singularity definition file created at %s/containers/Singularity.toy-0.0" % self.test_prefix, ] self.check_regexs(regexs, stdout) remove_file(os.path.join(self.test_prefix, 'containers', 'Singularity.toy-0.0')) # add another easyconfig file to check if multiple easyconfigs are handled correctly args.insert(1, os.path.join(test_ecs, 'g', 'GCC', 'GCC-4.9.2.eb')) # with 'localimage' bootstrap agent, specified image must exist test_img = os.path.join(self.test_prefix, 'test123.img') args[-1] = "--container-base=localimage:%s" % test_img error_pattern = "Singularity base image at specified path does not exist" self.assertErrorRegex(EasyBuildError, error_pattern, self.eb_main, args, raise_error=True) write_file(test_img, '') stdout, stderr = self.run_main(args) self.assertFalse(stderr) regexs = ["^== Singularity definition file created at %s/containers/Singularity.toy-0.0" % self.test_prefix] self.check_regexs(regexs, stdout) # check contents of generated recipe def_file = read_file(os.path.join(self.test_prefix, 'containers', 'Singularity.toy-0.0')) regexs = [ "^Bootstrap: localimage$", "^From: %s$" % test_img, "^eb toy-0.0.eb GCC-4.9.2.eb", "module load toy/0.0 GCC/4.9.2$", ] self.check_regexs(regexs, def_file) # image extension must make sense when localimage is used for img_name in ['test123.foo', 'test123']: test_img = os.path.join(self.test_prefix, img_name) args[-1] = "--container-base=localimage:%s" % test_img write_file(test_img, '') error_pattern = "Invalid image extension '.*' must be \.img or \.simg" self.assertErrorRegex(EasyBuildError, error_pattern, self.eb_main, args, raise_error=True)
def obtain_config_guess(self, download_source_path=None, search_source_paths=None): """ Locate or download an up-to-date config.guess for use with ConfigureMake :param download_source_path: Path to download config.guess to :param search_source_paths: Paths to search for config.guess :return: Path to config.guess or None """ eb_source_paths = source_paths() if download_source_path is None: download_source_path = eb_source_paths[0] if search_source_paths is None: search_source_paths = eb_source_paths config_guess = 'config.guess' sourcepath_subdir = os.path.join('generic', 'eb_v%s' % EASYBLOCKS_VERSION, 'ConfigureMake') config_guess_path = None # check if config.guess has already been downloaded to source path for path in eb_source_paths: cand_config_guess_path = os.path.join(path, sourcepath_subdir, config_guess) if os.path.isfile(cand_config_guess_path): config_guess_path = cand_config_guess_path self.log.info("Found recent %s at %s, using it if required", config_guess, config_guess_path) break # if not found, try to download it if config_guess_path is None: cand_config_guess_path = os.path.join(download_source_path, sourcepath_subdir, config_guess) config_guess_url = CONFIG_GUESS_URL_STUB + CONFIG_GUESS_COMMIT_ID downloaded_path = download_file(config_guess, config_guess_url, cand_config_guess_path) if downloaded_path is not None: # verify SHA256 checksum of download to avoid using a corrupted download if verify_checksum(downloaded_path, CONFIG_GUESS_SHA256): config_guess_path = downloaded_path # add execute permissions adjust_permissions(downloaded_path, stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH, add=True) self.log.info( "Downloaded recent %s to %s, using it if required", config_guess, config_guess_path) else: self.log.warning( "Checksum failed for downloaded file %s, not using it!", downloaded_path) remove_file(downloaded_path) else: self.log.warning( "Failed to download recent %s to %s for use with ConfigureMake easyblock (if needed)", config_guess, cand_config_guess_path) return config_guess_path
def install_step(self): """ Install by copying files over to the right places. Also create symlinks where expected by other software (Lib directory). """ includedir = os.path.join(self.installdir, 'include') libdir = os.path.join(self.installdir, 'lib') if LooseVersion(self.version) >= LooseVersion("4"): # includedir etc changed in v4, use a normal make install cmd = "make install %s" % self.cfg['installopts'] try: os.chdir(self.parmetis_builddir) run_cmd(cmd, log_all=True, simple=True) os.chdir(self.cfg['start_dir']) except OSError as err: raise EasyBuildError("Running '%s' in %s failed: %s", cmd, self.parmetis_builddir, err) # libraries try: src = os.path.join(self.cfg['start_dir'], 'build', 'libmetis', 'libmetis.a') dst = os.path.join(libdir, 'libmetis.a') shutil.copy2(src, dst) except OSError as err: raise EasyBuildError( "Copying files to installation dir failed: %s", err) # include files try: src = os.path.join(self.cfg['start_dir'], 'build', 'metis', 'include', 'metis.h') dst = os.path.join(includedir, 'metis.h') shutil.copy2(src, dst) except OSError as err: raise EasyBuildError( "Copying files to installation dir failed: %s", err) else: mkdir(libdir) mkdir(includedir) # libraries try: for fil in ['libmetis.a', 'libparmetis.a']: src = os.path.join(self.cfg['start_dir'], fil) dst = os.path.join(libdir, fil) shutil.copy2(src, dst) except OSError as err: raise EasyBuildError( "Copying files to installation dir failed: %s", err) # include files try: src = os.path.join(self.cfg['start_dir'], 'parmetis.h') dst = os.path.join(includedir, 'parmetis.h') shutil.copy2(src, dst) # some applications (SuiteSparse) can only use METIS (not ParMETIS), but header files are the same dst = os.path.join(includedir, 'metis.h') shutil.copy2(src, dst) except OSError as err: raise EasyBuildError( "Copying files to installation dir failed: %s", err) # other applications depending on ParMETIS (SuiteSparse for one) look for both ParMETIS libraries # and header files in the Lib directory (capital L). The following symlink are hence created. try: caplibdir = os.path.join(self.installdir, 'Lib') remove_file(caplibdir) symlink(libdir, caplibdir) for header_file in ['metis.h', 'parmetis.h']: header_path = os.path.join(libdir, header_file) remove_file(header_path) symlink(os.path.join(includedir, header_file), header_path) except OSError as err: raise EasyBuildError( "Something went wrong during symlink creation: %s", err)
def template_module_only_test(self, easyblock, name='foo', version='1.3.2', extra_txt=''): """Test whether all easyblocks are compatible with --module-only.""" tmpdir = tempfile.mkdtemp() class_regex = re.compile(r"^class (.*)\(.*", re.M) self.log.debug("easyblock: %s" % easyblock) # read easyblock Python module f = open(easyblock, "r") txt = f.read() f.close() # obtain easyblock class name using regex res = class_regex.search(txt) if res: ebname = res.group(1) self.log.debug("Found class name for easyblock %s: %s" % (easyblock, ebname)) toolchain = None # figure out list of mandatory variables, and define with dummy values as necessary app_class = get_easyblock_class(ebname) # easyblocks deriving from IntelBase require a license file to be found for --module-only bases = list(app_class.__bases__) for base in copy.copy(bases): bases.extend(base.__bases__) if app_class == IntelBase or IntelBase in bases: os.environ['INTEL_LICENSE_FILE'] = os.path.join( tmpdir, 'intel.lic') write_file(os.environ['INTEL_LICENSE_FILE'], '# dummy license') elif app_class == EB_IMOD: # $JAVA_HOME must be set for IMOD os.environ['JAVA_HOME'] = tmpdir elif app_class == PythonBundle: # $EBROOTPYTHON must be set for PythonBundle easyblock os.environ[ 'EBROOTPYTHON'] = '/fake/install/prefix/Python/2.7.14-foss-2018a' elif app_class == GoPackage: # $EBROOTGO must be set for GoPackage easyblock os.environ['EBROOTGO'] = '/fake/install/prefix/Go/1.14' os.environ['EBVERSIONGO'] = '1.14' elif app_class == EB_OpenFOAM: # proper toolchain must be used for OpenFOAM(-Extend), to determine value to set for $WM_COMPILER write_file( os.path.join(tmpdir, 'GCC', '4.9.3-2.25'), '\n'.join([ '#%Module', 'setenv EBROOTGCC %s' % tmpdir, 'setenv EBVERSIONGCC 4.9.3', ])) write_file( os.path.join(tmpdir, 'OpenMPI', '1.10.2-GCC-4.9.3-2.25'), '\n'.join([ '#%Module', 'setenv EBROOTOPENMPI %s' % tmpdir, 'setenv EBVERSIONOPENMPI 1.10.2', ])) write_file( os.path.join(tmpdir, 'gompi', '2016a'), '\n'.join([ '#%Module', 'module load GCC/4.9.3-2.25', 'module load OpenMPI/1.10.2-GCC-4.9.3-2.25', ])) os.environ['MODULEPATH'] = tmpdir toolchain = {'name': 'gompi', 'version': '2016a'} # extend easyconfig to make sure mandatory custom easyconfig paramters are defined extra_options = app_class.extra_options() for (key, val) in extra_options.items(): if val[2] == MANDATORY: extra_txt += '%s = "foo"\n' % key # write easyconfig file self.writeEC(ebname, name=name, version=version, extratxt=extra_txt, toolchain=toolchain) # take into account that for some easyblock, particular dependencies are hard required early on # (in prepare_step for exampel); # we just set the corresponding $EBROOT* environment variables here to fool it... req_deps = { # QScintilla easyblock requires that either PyQt or PyQt5 are available as dependency # (PyQt is easier, since PyQt5 is only supported for sufficiently recent QScintilla versions) 'qscintilla.py': [('PyQt', '4.12')], # MotionCor2 and Gctf easyblock requires CUDA as dependency 'motioncor2.py': [('CUDA', '10.1.105')], 'gctf.py': [('CUDA', '10.1.105')], } easyblock_fn = os.path.basename(easyblock) for (dep_name, dep_version) in req_deps.get(easyblock_fn, []): dep_root_envvar = get_software_root_env_var_name(dep_name) os.environ[dep_root_envvar] = '/value/should/not/matter' dep_version_envvar = get_software_version_env_var_name(dep_name) os.environ[dep_version_envvar] = dep_version # initialize easyblock # if this doesn't fail, the test succeeds app = app_class(EasyConfig(self.eb_file)) # run all steps, most should be skipped orig_workdir = os.getcwd() try: app.run_all_steps(run_test_cases=False) finally: change_dir(orig_workdir) if os.path.basename(easyblock) == 'modulerc.py': # .modulerc must be cleaned up to avoid causing trouble (e.g. "Duplicate version symbol" errors) modulerc = os.path.join(TMPDIR, 'modules', 'all', name, '.modulerc') if os.path.exists(modulerc): remove_file(modulerc) modulerc += '.lua' if os.path.exists(modulerc): remove_file(modulerc) else: modfile = os.path.join(TMPDIR, 'modules', 'all', name, version) luamodfile = '%s.lua' % modfile self.assertTrue( os.path.exists(modfile) or os.path.exists(luamodfile), "Module file %s or %s was generated" % (modfile, luamodfile)) if os.path.exists(modfile): modtxt = read_file(modfile) else: modtxt = read_file(luamodfile) none_regex = re.compile('None') self.assertFalse(none_regex.search(modtxt), "None not found in module file: %s" % modtxt) # cleanup app.close_log() remove_file(app.logfile) remove_dir(tmpdir) else: self.assertTrue(False, "Class found in easyblock %s" % easyblock)
def build_step(self): """Build ScaLAPACK using make after setting make options.""" # MPI compiler commands known_mpi_libs = [toolchain.MPICH, toolchain.MPICH2, toolchain.MVAPICH2] #@UndefinedVariable known_mpi_libs += [toolchain.OPENMPI, toolchain.QLOGICMPI] #@UndefinedVariable known_mpi_libs += [toolchain.INTELMPI] #@UndefinedVariable if os.getenv('MPICC') and os.getenv('MPIF77') and os.getenv('MPIF90'): mpicc = os.getenv('MPICC') mpif77 = os.getenv('MPIF77') mpif90 = os.getenv('MPIF90') elif self.toolchain.mpi_family() in known_mpi_libs: mpicc = 'mpicc' mpif77 = 'mpif77' mpif90 = 'mpif90' else: raise EasyBuildError("Don't know which compiler commands to use.") # determine build options BLAS and LAPACK libs extra_makeopts = [] acml = get_software_root(Acml.LAPACK_MODULE_NAME[0]) lapack = get_software_root(Lapack.LAPACK_MODULE_NAME[0]) openblas = get_software_root(OpenBLAS.LAPACK_MODULE_NAME[0]) intelmkl = get_software_root(IntelMKL.LAPACK_MODULE_NAME[0]) if lapack: extra_makeopts.append('LAPACKLIB=%s' % os.path.join(lapack, 'lib', 'liblapack.a')) for blas in [Atlas, GotoBLAS]: blas_root = get_software_root(blas.BLAS_MODULE_NAME[0]) if blas_root: blas_libs = ' '.join(['-l%s' % lib for lib in blas.BLAS_LIB]) extra_makeopts.append('BLASLIB="-L%s %s -lpthread"' % (os.path.join(blas_root, 'lib'), blas_libs)) break if not blas_root: raise EasyBuildError("Failed to find a known BLAS library, don't know how to define 'BLASLIB'") elif acml: acml_base_dir = os.getenv('ACML_BASEDIR', 'NO_ACML_BASEDIR') acml_static_lib = os.path.join(acml, acml_base_dir, 'lib', 'libacml.a') extra_makeopts.extend([ 'BLASLIB="%s -lpthread"' % acml_static_lib, 'LAPACKLIB=%s' % acml_static_lib ]) elif openblas: libdir = os.path.join(openblas, 'lib') blas_libs = ' '.join(['-l%s' % lib for lib in OpenBLAS.BLAS_LIB]) extra_makeopts.extend([ 'BLASLIB="-L%s %s -lpthread"' % (libdir, blas_libs), 'LAPACKLIB="-L%s %s"' % (libdir, blas_libs), ]) elif intelmkl: libdir = os.path.join(intelmkl, 'mkl', 'lib', 'intel64') blas_libs = os.environ['LIBLAPACK'] extra_makeopts.extend([ 'BLASLIB="-L%s %s -lpthread"' % (libdir, blas_libs), 'LAPACKLIB="-L%s %s"' % (libdir, blas_libs), ]) else: raise EasyBuildError("Unknown LAPACK library used, no idea how to set BLASLIB/LAPACKLIB make options") # build procedure changed in v2.0.0 if self.loosever < LooseVersion('2.0.0'): blacs = get_software_root(Blacs.BLACS_MODULE_NAME[0]) if not blacs: raise EasyBuildError("BLACS not available, yet required for ScaLAPACK version < 2.0.0") # determine interface interface = det_interface(self.log, os.path.join(blacs, 'bin')) # set build and BLACS dir correctly extra_makeopts.append('home=%s BLACSdir=%s' % (self.cfg['start_dir'], blacs)) # set BLACS libs correctly blacs_libs = [ ('BLACSFINIT', "F77init"), ('BLACSCINIT', "Cinit"), ('BLACSLIB', "") ] for (var, lib) in blacs_libs: extra_makeopts.append('%s=%s/lib/libblacs%s.a' % (var, blacs, lib)) # set compilers and options noopt = '' if self.toolchain.options['noopt']: noopt += " -O0" if self.toolchain.options['pic']: noopt += " -fPIC" extra_makeopts += [ 'F77="%s"' % mpif77, 'CC="%s"' % mpicc, 'NOOPT="%s"' % noopt, 'CCFLAGS="-O3 %s"' % os.getenv('CFLAGS') ] # set interface extra_makeopts.append("CDEFS='-D%s -DNO_IEEE $(USEMPI)'" % interface) else: # determine interface if self.toolchain.mpi_family() in known_mpi_libs: interface = 'Add_' else: raise EasyBuildError("Don't know which interface to pick for the MPI library being used.") # set compilers and options extra_makeopts += [ 'FC="%s"' % mpif90, 'CC="%s"' % mpicc, 'CCFLAGS="%s"' % os.getenv('CFLAGS'), 'FCFLAGS="%s"' % os.getenv('FFLAGS'), ] # set interface extra_makeopts.append('CDEFS="-D%s"' % interface) # update make opts, and build_step saved_buildopts = self.cfg['buildopts'] # Only build the library first, that can be done in parallel. # Creating libscalapack.a may fail in parallel, but should work # fine with non-parallel make afterwards self.cfg.update('buildopts', 'lib') self.cfg.update('buildopts', ' '.join(extra_makeopts)) # Copied from ConfigureMake easyblock paracmd = '' if self.cfg['parallel']: paracmd = "-j %s" % self.cfg['parallel'] cmd = "%s make %s %s" % (self.cfg['prebuildopts'], paracmd, self.cfg['buildopts']) # Ignore exit code for parallel run (out, _) = run_cmd(cmd, log_ok=False, log_all=False, simple=False) # Now remake libscalapack.a serially and the tests. self.cfg['buildopts'] = saved_buildopts self.cfg.update('buildopts', ' '.join(extra_makeopts)) remove_file('libscalapack.a') self.cfg['parallel'] = 1 super(EB_ScaLAPACK, self).build_step()
def test_end2end_singularity_image(self): """End-to-end test for --containerize (recipe + image).""" topdir = os.path.dirname(os.path.abspath(__file__)) toy_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb') containerpath = os.path.join(self.test_prefix, 'containers') os.environ['EASYBUILD_CONTAINERPATH'] = containerpath # --containerpath must be an existing directory (this is done to avoid misconfiguration) mkdir(containerpath) test_img = os.path.join(self.test_prefix, 'test123.img') write_file(test_img, '') args = [ toy_ec, '-C', # equivalent with --containerize '--experimental', '--container-base=localimage:%s' % test_img, '--container-build-image', ] if which('singularity') is None: error_pattern = "Singularity not found in your system" self.assertErrorRegex(EasyBuildError, error_pattern, self.eb_main, args, raise_error=True) # install mocked versions of 'sudo' and 'singularity' commands singularity = os.path.join(self.test_prefix, 'bin', 'singularity') write_file(singularity, MOCKED_SINGULARITY) adjust_permissions(singularity, stat.S_IXUSR, add=True) sudo = os.path.join(self.test_prefix, 'bin', 'sudo') write_file(sudo, '#!/bin/bash\necho "running command \'$@\' with sudo..."\neval "$@"\n') adjust_permissions(sudo, stat.S_IXUSR, add=True) os.environ['PATH'] = '%s:%s' % (os.path.join(self.test_prefix, 'bin'), os.getenv('PATH')) stdout, stderr = self.run_main(args) self.assertFalse(stderr) regexs = [ "^== Singularity tool found at %s/bin/singularity" % self.test_prefix, "^== Singularity version '2.4.0' is 2.4 or higher ... OK", "^== Singularity definition file created at %s/containers/Singularity\.toy-0.0" % self.test_prefix, "^== Running 'sudo\s*\S*/singularity build\s*/.* /.*', you may need to enter your 'sudo' password...", "^== Singularity image created at %s/containers/toy-0.0\.simg" % self.test_prefix, ] self.check_regexs(regexs, stdout) self.assertTrue(os.path.exists(os.path.join(containerpath, 'toy-0.0.simg'))) remove_file(os.path.join(containerpath, 'Singularity.toy-0.0')) # check use of --container-image-format & --container-image-name args.extend([ "--container-image-format=ext3", "--container-image-name=foo-bar", ]) stdout, stderr = self.run_main(args) self.assertFalse(stderr) regexs[-3] = "^== Singularity definition file created at %s/containers/Singularity\.foo-bar" % self.test_prefix regexs[-2] = "^== Running 'sudo\s*\S*/singularity build --writable /.* /.*', you may need to enter .*" regexs[-1] = "^== Singularity image created at %s/containers/foo-bar\.img$" % self.test_prefix self.check_regexs(regexs, stdout) cont_img = os.path.join(containerpath, 'foo-bar.img') self.assertTrue(os.path.exists(cont_img)) remove_file(os.path.join(containerpath, 'Singularity.foo-bar')) # test again with container image already existing error_pattern = "Container image already exists at %s, not overwriting it without --force" % cont_img self.mock_stdout(True) self.assertErrorRegex(EasyBuildError, error_pattern, self.eb_main, args, raise_error=True) self.mock_stdout(False) args.append('--force') stdout, stderr = self.run_main(args) self.assertFalse(stderr) regexs.extend([ "WARNING: overwriting existing container image at %s due to --force" % cont_img, ]) self.check_regexs(regexs, stdout) self.assertTrue(os.path.exists(cont_img)) # also check behaviour under --extended-dry-run args.append('--extended-dry-run') stdout, stderr = self.run_main(args) self.assertFalse(stderr) self.check_regexs(regexs, stdout) # test use of --container-tmpdir args.append('--container-tmpdir=%s' % self.test_prefix) stdout, stderr = self.run_main(args) self.assertFalse(stderr) regexs[-3] = "^== Running 'sudo\s*SINGULARITY_TMPDIR=%s \S*/singularity build .*" % self.test_prefix self.check_regexs(regexs, stdout)
def install_step(self): """Custom install procedure for TensorFlow.""" # avoid that pip (ab)uses $HOME/.cache/pip # cfr. https://pip.pypa.io/en/stable/reference/pip_install/#caching env.setvar('XDG_CACHE_HOME', tempfile.gettempdir()) self.log.info("Using %s as pip cache directory", os.environ['XDG_CACHE_HOME']) # find .whl file that was built, and install it using 'pip install' if ("-rc" in self.version): whl_version = self.version.replace("-rc", "rc") else: whl_version = self.version whl_paths = glob.glob( os.path.join(self.builddir, 'tensorflow-%s-*.whl' % whl_version)) if not whl_paths: whl_paths = glob.glob( os.path.join(self.builddir, 'tensorflow-*.whl')) if len(whl_paths) == 1: # --ignore-installed is required to ensure *this* wheel is installed cmd = "pip install --ignore-installed --prefix=%s %s" % ( self.installdir, whl_paths[0]) # if extensions are listed, assume they will provide all required dependencies, # so use --no-deps to prevent pip from downloading & installing them if self.cfg['exts_list']: cmd += ' --no-deps' run_cmd(cmd, log_all=True, simple=True, log_ok=True) else: raise EasyBuildError("Failed to isolate built .whl in %s: %s", whl_paths, self.builddir) # Fix for https://github.com/tensorflow/tensorflow/issues/6341 on Python < 3.3 # If the site-packages/google/__init__.py file is missing, make it an empty file. # This fixes the "No module named google.protobuf" error that sometimes shows up during sanity_check # For Python >= 3.3 the logic is reversed: The __init__.py must not exist. # See e.g. http://python-notes.curiousefficiency.org/en/latest/python_concepts/import_traps.html google_protobuf_dir = os.path.join(self.installdir, self.pylibdir, 'google', 'protobuf') google_init_file = os.path.join(self.installdir, self.pylibdir, 'google', '__init__.py') if LooseVersion(det_python_version( self.python_cmd)) < LooseVersion('3.3'): if os.path.isdir( google_protobuf_dir) and not is_readable(google_init_file): self.log.debug("Creating (empty) missing %s", google_init_file) write_file(google_init_file, '') else: if os.path.exists(google_init_file): self.log.debug("Removing %s for Python >= 3.3", google_init_file) remove_file(google_init_file) # Fix cuda header paths # This is needed for building custom TensorFlow ops if LooseVersion(self.version) < LooseVersion('1.14'): pyshortver = '.'.join( get_software_version('Python').split('.')[:2]) regex_subs = [(r'#include "cuda/include/', r'#include "')] base_path = os.path.join(self.installdir, 'lib', 'python%s' % pyshortver, 'site-packages', 'tensorflow', 'include', 'tensorflow') for header in glob.glob( os.path.join(base_path, 'stream_executor', 'cuda', 'cuda*.h')) + glob.glob( os.path.join(base_path, 'core', 'util', 'cuda*.h')): apply_regex_substitutions(header, regex_subs)
def test_make_module_pythonpackage(self): """Test make_module_step of PythonPackage easyblock.""" app_class = get_easyblock_class('PythonPackage') self.writeEC('PythonPackage', name='testpypkg', version='3.14') app = app_class(EasyConfig(self.eb_file)) # install dir should not be there yet self.assertFalse(os.path.exists(app.installdir), "%s should not exist" % app.installdir) # create install dir and populate it with subdirs/files mkdir(app.installdir, parents=True) # $PATH, $LD_LIBRARY_PATH, $LIBRARY_PATH, $CPATH, $PKG_CONFIG_PATH write_file(os.path.join(app.installdir, 'bin', 'foo'), 'echo foo!') write_file(os.path.join(app.installdir, 'include', 'foo.h'), 'bar') write_file(os.path.join(app.installdir, 'lib', 'libfoo.a'), 'libfoo') pyver = '.'.join(map(str, sys.version_info[:2])) write_file( os.path.join(app.installdir, 'lib', 'python%s' % pyver, 'site-packages', 'foo.egg'), 'foo egg') write_file( os.path.join(app.installdir, 'lib64', 'pkgconfig', 'foo.pc'), 'libfoo: foo') # PythonPackage relies on the fact that 'python' points to the right Python version tmpdir = tempfile.mkdtemp() python = os.path.join(tmpdir, 'python') write_file(python, '#!/bin/bash\necho $0 $@\n%s "$@"' % sys.executable) adjust_permissions(python, stat.S_IXUSR) os.environ['PATH'] = '%s:%s' % (tmpdir, os.getenv('PATH', '')) from easybuild.tools.filetools import which print(which('python')) # create module file app.make_module_step() remove_file(python) self.assertTrue(TMPDIR in app.installdir) self.assertTrue(TMPDIR in app.installdir_mod) modtxt = None for cand_mod_filename in ['3.14', '3.14.lua']: full_modpath = os.path.join(app.installdir_mod, 'testpypkg', cand_mod_filename) if os.path.exists(full_modpath): modtxt = read_file(full_modpath) break self.assertFalse(modtxt is None) regexs = [ (r'^prepend.path.*\WCPATH\W.*include"?\W*$', True), (r'^prepend.path.*\WLD_LIBRARY_PATH\W.*lib"?\W*$', True), (r'^prepend.path.*\WLIBRARY_PATH\W.*lib"?\W*$', True), (r'^prepend.path.*\WPATH\W.*bin"?\W*$', True), (r'^prepend.path.*\WPKG_CONFIG_PATH\W.*lib64/pkgconfig"?\W*$', True), (r'^prepend.path.*\WPYTHONPATH\W.*lib/python[23]\.[0-9]/site-packages"?\W*$', True), # lib64 doesn't contain any library files, so these are *not* included in $LD_LIBRARY_PATH or $LIBRARY_PATH (r'^prepend.path.*\WLD_LIBRARY_PATH\W.*lib64', False), (r'^prepend.path.*\WLIBRARY_PATH\W.*lib64', False), ] for (pattern, found) in regexs: regex = re.compile(pattern, re.M) if found: assert_msg = "Pattern '%s' found in: %s" % (regex.pattern, modtxt) else: assert_msg = "Pattern '%s' not found in: %s" % (regex.pattern, modtxt) self.assertEqual(bool(regex.search(modtxt)), found, assert_msg)
def configure_step(self): """Custom configuration procedure for NWChem.""" # check whether a (valid) symlink to a .nwchemrc config file exists (via a dummy file if necessary) # fail early if the link is not what's we expect, since running the test cases will likely fail in this case try: if os.path.exists(self.home_nwchemrc) or os.path.islink( self.home_nwchemrc): # create a dummy file to check symlink if not os.path.exists(self.local_nwchemrc): write_file(self.local_nwchemrc, 'dummy') self.log.debug( "Contents of %s: %s", os.path.dirname(self.local_nwchemrc), os.listdir(os.path.dirname(self.local_nwchemrc))) if os.path.islink(self.home_nwchemrc): home_nwchemrc_target = os.readlink(self.home_nwchemrc) if home_nwchemrc_target != self.local_nwchemrc: raise EasyBuildError( "Found %s, but it's not a symlink to %s. " "Please (re)move %s while installing NWChem; it can be restored later", self.home_nwchemrc, self.local_nwchemrc, self.home_nwchemrc) # ok to remove, we'll recreate it anyway remove_file(self.local_nwchemrc) except (IOError, OSError) as err: raise EasyBuildError("Failed to validate %s symlink: %s", self.home_nwchemrc, err) # building NWChem in a long path name is an issue, so let's try to make sure we have a short one try: # NWChem insists that version is in name of build dir tmpdir = tempfile.mkdtemp(suffix='-%s-%s' % (self.name, self.version)) # remove created directory, since we're not going to use it as is os.rmdir(tmpdir) # avoid having '['/']' characters in build dir name, NWChem doesn't like that start_dir = tmpdir.replace('[', '_').replace(']', '_') mkdir(os.path.dirname(start_dir), parents=True) symlink(self.cfg['start_dir'], start_dir) change_dir(start_dir) self.cfg['start_dir'] = start_dir except OSError as err: raise EasyBuildError( "Failed to symlink build dir to a shorter path name: %s", err) # change to actual build dir change_dir('src') nwchem_modules = self.cfg['modules'] # set required NWChem environment variables env.setvar('NWCHEM_TOP', self.cfg['start_dir']) if len(self.cfg['start_dir']) > 64: # workaround for: # "The directory name chosen for NWCHEM_TOP is longer than the maximum allowed value of 64 characters" # see also https://svn.pnl.gov/svn/nwchem/trunk/src/util/util_nwchem_srcdir.F self.setvar_env_makeopt('NWCHEM_LONG_PATHS', 'Y') env.setvar('NWCHEM_TARGET', self.cfg['target']) garoot = get_software_root('GlobalArrays') if garoot: self.setvar_env_makeopt('EXTERNAL_GA_PATH', garoot) else: env.setvar('MSG_COMMS', self.cfg['msg_comms']) env.setvar('ARMCI_NETWORK', self.cfg['armci_network']) if self.cfg['armci_network'] in ["OPENIB"]: env.setvar('IB_INCLUDE', "/usr/include") env.setvar('IB_LIB', "/usr/lib64") env.setvar('IB_LIB_NAME', "-libumad -libverbs -lpthread") if 'python' in self.cfg['modules']: python_root = get_software_root('Python') if not python_root: raise EasyBuildError( "Python module not loaded, you should add Python as a dependency." ) env.setvar('PYTHONHOME', python_root) pyver = '.'.join(get_software_version('Python').split('.')[0:2]) env.setvar('PYTHONVERSION', pyver) # if libreadline is loaded, assume it was a dependency for Python # pass -lreadline to avoid linking issues (libpython2.7.a doesn't include readline symbols) libreadline = get_software_root('libreadline') if libreadline: libreadline_libdir = os.path.join( libreadline, get_software_libdir('libreadline')) ncurses = get_software_root('ncurses') if not ncurses: raise EasyBuildError( "ncurses is not loaded, but required to link with libreadline" ) ncurses_libdir = os.path.join(ncurses, get_software_libdir('ncurses')) readline_libs = ' '.join([ os.path.join(libreadline_libdir, 'libreadline.a'), os.path.join(ncurses_libdir, 'libcurses.a'), ]) extra_libs = os.environ.get('EXTRA_LIBS', '') env.setvar('EXTRA_LIBS', ' '.join([extra_libs, readline_libs])) env.setvar('LARGE_FILES', 'TRUE') env.setvar('USE_NOFSCHECK', 'TRUE') env.setvar('CCSDTLR', 'y') # enable CCSDTLR env.setvar( 'CCSDTQ', 'y') # enable CCSDTQ (compilation is long, executable is big) if LooseVersion(self.version) >= LooseVersion("6.2"): env.setvar('MRCC_METHODS', 'y') # enable multireference coupled cluster capability if LooseVersion(self.version) >= LooseVersion("6.5"): env.setvar( 'EACCSD', 'y' ) # enable EOM electron-attachemnt coupled cluster capability env.setvar( 'IPCCSD', 'y' ) # enable EOM ionization-potential coupled cluster capability env.setvar( 'USE_NOIO', 'TRUE') # avoid doing I/O for the ddscf, mp2 and ccsd modules for var in ['USE_MPI', 'USE_MPIF', 'USE_MPIF4']: env.setvar(var, 'y') for var in ['CC', 'CXX', 'F90']: env.setvar('MPI_%s' % var, os.getenv('MPI%s' % var)) libmpi = "" # for NWChem 6.6 and newer, $LIBMPI & co should no longer be # set, the correct values are determined by the NWChem build # procedure automatically, see # http://www.nwchem-sw.org/index.php/Compiling_NWChem#MPI_variables if LooseVersion(self.version) < LooseVersion("6.6"): env.setvar('MPI_LOC', os.path.dirname(os.getenv('MPI_INC_DIR'))) env.setvar('MPI_LIB', os.getenv('MPI_LIB_DIR')) env.setvar('MPI_INCLUDE', os.getenv('MPI_INC_DIR')) mpi_family = self.toolchain.mpi_family() if mpi_family in toolchain.OPENMPI: ompi_ver = get_software_version('OpenMPI') if LooseVersion(ompi_ver) < LooseVersion("1.10"): if LooseVersion(ompi_ver) < LooseVersion("1.8"): libmpi = "-lmpi_f90 -lmpi_f77 -lmpi -ldl -Wl,--export-dynamic -lnsl -lutil" else: libmpi = "-lmpi_usempi -lmpi_mpifh -lmpi" else: libmpi = "-lmpi_usempif08 -lmpi_usempi_ignore_tkr -lmpi_mpifh -lmpi" elif mpi_family in [toolchain.INTELMPI]: if self.cfg['armci_network'] in ["MPI-MT"]: libmpi = "-lmpigf -lmpigi -lmpi_ilp64 -lmpi_mt" else: libmpi = "-lmpigf -lmpigi -lmpi_ilp64 -lmpi" elif mpi_family in [toolchain.MPICH, toolchain.MPICH2]: libmpi = "-lmpichf90 -lmpich -lopa -lmpl -lrt -lpthread" else: raise EasyBuildError("Don't know how to set LIBMPI for %s", mpi_family) env.setvar('LIBMPI', libmpi) if not garoot: if self.cfg['armci_network'] in ["OPENIB"]: libmpi += " -libumad -libverbs -lpthread" # compiler optimization flags: set environment variables _and_ add them to list of make options self.setvar_env_makeopt('COPTIMIZE', os.getenv('CFLAGS')) self.setvar_env_makeopt('FOPTIMIZE', os.getenv('FFLAGS')) # BLAS and ScaLAPACK mpi_lib_dirs = ' '.join('-L' + d for d in os.getenv('MPI_LIB_DIR').split()) self.setvar_env_makeopt( 'BLASOPT', ' '.join([ os.getenv('LDFLAGS'), mpi_lib_dirs, os.getenv('LIBSCALAPACK_MT'), libmpi ])) # Setting LAPACK_LIB is required from 7.0.0 onwards. self.setvar_env_makeopt('LAPACK_LIB', os.getenv('LIBLAPACK')) self.setvar_env_makeopt( 'SCALAPACK', '%s %s' % (os.getenv('LDFLAGS'), os.getenv('LIBSCALAPACK_MT'))) if self.toolchain.options['i8']: size = 8 self.setvar_env_makeopt('USE_SCALAPACK_I8', 'y') self.cfg.update('lib_defines', '-DSCALAPACK_I8') else: self.setvar_env_makeopt('HAS_BLAS', 'yes') self.setvar_env_makeopt('USE_SCALAPACK', 'y') size = 4 # set sizes for lib in ['BLAS', 'LAPACK', 'SCALAPACK']: self.setvar_env_makeopt('%s_SIZE' % lib, str(size)) env.setvar('NWCHEM_MODULES', nwchem_modules) env.setvar('LIB_DEFINES', self.cfg['lib_defines']) # clean first (why not) run_cmd("make clean", simple=True, log_all=True, log_ok=True) # configure build cmd = "make %s nwchem_config" % self.cfg['buildopts'] run_cmd(cmd, simple=True, log_all=True, log_ok=True, log_output=True)