def extract_step(self): """Custom extract step for VMD.""" super(EB_VMD, self).extract_step() if LooseVersion(self.version) >= LooseVersion("1.9.3"): change_dir(self.surf_dir) extract_file('surf.tar.Z', os.getcwd())
def configure_step(self): """Add some extra configure options.""" # also build shared libraries (not enabled by default) self.cfg.update('configopts', "--enable-shared") if self.toolchain.options['pic']: # Enforce consistency. self.cfg.update('configopts', "--with-pic") if LooseVersion(self.version) >= LooseVersion('2.0') and LooseVersion(self.version) < LooseVersion('2.1'): # the code in libint is automatically generated and hence it is in some # parts so complex that -O2 or -O3 compiler optimization takes forever self.cfg.update('configopts', "--with-cxx-optflags='-O1'") elif LooseVersion(self.version) >= LooseVersion('2.1'): # pass down $CXXFLAGS to --with-cxxgen-optflags configure option; # mainly to avoid warning about it not being set (but $CXXFLAGS is picked up anyway in practice) self.cfg.update('configopts', "--with-cxxgen-optflags='%s'" % os.getenv('CXXFLAGS')) if LooseVersion(self.version) >= LooseVersion('2.6.0'): # Libint 2.6.0 requires first compiling the Libint compiler, # by running configure with appropriate options, followed by 'make export' # and unpacking the resulting source tarball; # see https://github.com/evaleev/libint/wiki#compiling-libint-compiler # CMake is recommended, but configuring with Fortran support doesn't work correctly yet in Libint 2.6.0 # so stick to traditional configure script for now print_msg("configuring Libint compiler...") # first run autogen.sh script to generate initial configure script run_cmd("./autogen.sh") cmd = ' '.join([ self.cfg['preconfigopts'], './configure', self.cfg['configopts'], self.cfg['libint_compiler_configopts'], ]) run_cmd(cmd) print_msg("generating Libint library...") run_cmd("make export") source_fn = 'libint-%s.tgz' % self.version if os.path.exists(source_fn): extract_file(source_fn, os.getcwd(), change_into_dir=False) change_dir('libint-%s' % self.version) else: raise EasyBuildError("Could not find generated source tarball after 'make export'!") # --enable-fortran is only a known configure option for Libint library, not for Libint compiler, # so only add --enable-fortran *after* configuring & generating Libint compiler if self.cfg['with_fortran']: self.cfg.update('configopts', '--enable-fortran') super(EB_Libint, self).configure_step()
def extract_step(self): """Custom extract step for NAMD, we need to extract charm++ so we can patch it.""" super(EB_NAMD, self).extract_step() change_dir(self.src[0]['finalpath']) self.charm_tarballs = glob.glob('charm-*.tar') if len(self.charm_tarballs) != 1: raise EasyBuildError("Expected to find exactly one tarball for Charm++, found: %s", self.charm_tarballs) extract_file(self.charm_tarballs[0], os.getcwd())
def configure_step(self): """Custom configure step for NAMD, we build charm++ first (if required).""" # complete Charm ++ and NAMD architecture string with compiler family comp_fam = self.toolchain.comp_family() if self.toolchain.options["usempi"]: charm_arch_comp = "mpicxx" else: charm_arch_comps = {toolchain.GCC: "gcc", toolchain.INTELCOMP: "icc"} charm_arch_comp = charm_arch_comps.get(comp_fam, None) namd_comps = {toolchain.GCC: "g++", toolchain.INTELCOMP: "icc"} namd_comp = namd_comps.get(comp_fam, None) if charm_arch_comp is None or namd_comp is None: raise EasyBuildError("Unknown compiler family, can't complete Charm++/NAMD target architecture.") self.cfg.update("charm_arch", charm_arch_comp) self.log.info("Updated 'charm_arch': %s" % self.cfg["charm_arch"]) self.namd_arch = "%s-%s" % (self.cfg["namd_basearch"], namd_comp) self.log.info("Completed NAMD target architecture: %s" % self.namd_arch) charm_tarballs = glob.glob("charm-*.tar") if len(charm_tarballs) != 1: raise EasyBuildError("Expected to find exactly one tarball for Charm++, found: %s", charm_tarballs) extract_file(charm_tarballs[0], os.getcwd()) tup = (self.cfg["charm_arch"], self.cfg["charm_opts"], self.cfg["parallel"], os.environ["CXXFLAGS"]) cmd = "./build charm++ %s %s -j%s %s -DMPICH_IGNORE_CXX_SEEK" % tup charm_subdir = ".".join(os.path.basename(charm_tarballs[0]).split(".")[:-1]) self.log.debug("Building Charm++ using cmd '%s' in '%s'" % (cmd, charm_subdir)) run_cmd(cmd, path=charm_subdir) # compiler (options) self.cfg.update("namd_cfg_opts", '--cc "%s" --cc-opts "%s"' % (os.environ["CC"], os.environ["CFLAGS"])) self.cfg.update("namd_cfg_opts", '--cxx "%s" --cxx-opts "%s"' % (os.environ["CXX"], os.environ["CXXFLAGS"])) # NAMD dependencies: CUDA, FFTW cuda = get_software_root("CUDA") if cuda: self.cfg.update("namd_cfg_opts", "--with-cuda --cuda-prefix %s" % cuda) fftw = get_software_root("FFTW") if fftw: if LooseVersion(get_software_version("FFTW")) >= LooseVersion("3.0"): if LooseVersion(self.version) >= LooseVersion("2.9"): self.cfg.update("namd_cfg_opts", "--with-fftw3") else: raise EasyBuildError("Using FFTW v3.x only supported in NAMD v2.9 and up.") else: self.cfg.update("namd_cfg_opts", "--with-fftw") self.cfg.update("namd_cfg_opts", "--fftw-prefix %s" % fftw) namd_charm_arch = "--charm-arch %s" % "-".join(self.cfg["charm_arch"].strip().split(" ")) cmd = "./config %s %s %s " % (self.namd_arch, namd_charm_arch, self.cfg["namd_cfg_opts"]) run_cmd(cmd)
def test_extract_file(self): """Test extract_file""" testdir = os.path.dirname(os.path.abspath(__file__)) toy_tarball = os.path.join(testdir, 'sandbox', 'sources', 'toy', 'toy-0.0.tar.gz') self.assertFalse( os.path.exists( os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) path = ft.extract_file(toy_tarball, self.test_prefix) self.assertTrue( os.path.exists( os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) self.assertTrue(os.path.samefile(path, self.test_prefix)) shutil.rmtree(os.path.join(path, 'toy-0.0')) toy_tarball_renamed = os.path.join(self.test_prefix, 'toy_tarball') shutil.copyfile(toy_tarball, toy_tarball_renamed) path = ft.extract_file(toy_tarball_renamed, self.test_prefix, cmd="tar xfvz %s") self.assertTrue( os.path.exists( os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) self.assertTrue(os.path.samefile(path, self.test_prefix)) shutil.rmtree(os.path.join(path, 'toy-0.0')) # also test behaviour of extract_file under --dry-run build_options = { 'extended_dry_run': True, 'silent': False, } init_config(build_options=build_options) self.mock_stdout(True) path = ft.extract_file(toy_tarball, self.test_prefix) txt = self.get_stdout() self.mock_stdout(False) self.assertTrue(os.path.samefile(path, self.test_prefix)) self.assertFalse( os.path.exists(os.path.join(self.test_prefix, 'toy-0.0'))) self.assertTrue( re.search('running command "tar xzf .*/toy-0.0.tar.gz"', txt)) path = ft.extract_file(toy_tarball, self.test_prefix, forced=True) self.assertTrue( os.path.exists( os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) self.assertTrue(os.path.samefile(path, self.test_prefix))
def extract_step(self): """Extract sources, if they haven't been already.""" super(EB_Rosetta, self).extract_step() # locate sources, and unpack if necessary # old 'bundles' tarballs contain a gzipped tarball for source, recent ones contain unpacked source try: subdirs = os.listdir(self.builddir) if len(subdirs) == 1: prefix = os.path.join(self.builddir, subdirs[0]) else: raise EasyBuildError( "Found no or multiple subdirectories, expected exactly one: %s", subdirs) self.srcdir = os.path.join(prefix, 'rosetta_source') if not os.path.exists(self.srcdir): self.srcdir = os.path.join(prefix, 'main', 'source') if not os.path.exists(self.srcdir): src_tarball = os.path.join( prefix, 'rosetta%s_source.tgz' % self.version) if os.path.isfile(src_tarball): self.srcdir = extract_file(src_tarball, prefix, change_into_dir=False) change_dir(self.srcdir) else: raise EasyBuildError( "Neither source directory '%s', nor source tarball '%s' found.", self.srcdir, src_tarball) except OSError as err: raise EasyBuildError( "Getting Rosetta sources dir ready failed: %s", err)
def extract_and_copy(dirname_tmpl, optional=False, symlinks=False): """Copy specified directory, after extracting it (if required).""" try: srcdir = os.path.join(self.cfg['start_dir'], dirname_tmpl % '') if not os.path.exists(srcdir): # try to extract if directory is not there yet src_tarball = os.path.join(self.cfg['start_dir'], (dirname_tmpl % self.version) + '.tgz') if os.path.isfile(src_tarball): srcdir = extract_file(src_tarball, self.cfg['start_dir']) if os.path.exists(srcdir): shutil.copytree(srcdir, os.path.join(self.installdir, os.path.basename(srcdir)), symlinks=symlinks) elif not optional: raise EasyBuildError( "Neither source directory '%s', nor source tarball '%s' found.", srcdir, src_tarball) except OSError as err: raise EasyBuildError("Getting Rosetta %s dir ready failed: %s", dirname_tmpl, err)
def download_repo(repo=GITHUB_EASYCONFIGS_REPO, branch='master', account=GITHUB_EB_MAIN, path=None): """ Download entire GitHub repo as a tar.gz archive, and extract it into specified path. @param repo: repo to download @param branch: branch to download @param account: GitHub account to download repo from @param path: path to extract to """ # make sure path exists, create it if necessary if path is None: path = tempfile.mkdtemp() # add account subdir path = os.path.join(path, account) mkdir(path, parents=True) extracted_dir_name = '%s-%s' % (repo, branch) base_name = '%s.tar.gz' % branch latest_commit_sha = fetch_latest_commit_sha(repo, account, branch) expected_path = os.path.join(path, extracted_dir_name) latest_sha_path = os.path.join(expected_path, 'latest-sha') # check if directory already exists, don't download if 'latest-sha' file indicates that it's up to date if os.path.exists(latest_sha_path): sha = read_file(latest_sha_path).split('\n')[0].rstrip() if latest_commit_sha == sha: _log.debug("Not redownloading %s/%s as it already exists: %s" % (account, repo, expected_path)) return expected_path url = URL_SEPARATOR.join([GITHUB_URL, account, repo, 'archive', base_name]) target_path = os.path.join(path, base_name) _log.debug("downloading repo %s/%s as archive from %s to %s" % (account, repo, url, target_path)) download_file(base_name, url, target_path) _log.debug("%s downloaded to %s, extracting now" % (base_name, path)) extracted_path = os.path.join(extract_file(target_path, path), extracted_dir_name) # check if extracted_path exists if not os.path.isdir(extracted_path): raise EasyBuildError( "%s should exist and contain the repo %s at branch %s", extracted_path, repo, branch) write_file(latest_sha_path, latest_commit_sha) _log.debug("Repo %s at branch %s extracted into %s" % (repo, branch, extracted_path)) return extracted_path
def run(self, *args, **kwargs): """Install as extension: unpack sources and copy (via install step).""" if self.cfg['install_type'] is None: self.log.info( "Auto-enabled install_type=merge because Tarball is being used to install an extension" ) self.cfg['install_type'] = 'merge' # unpack sources and call install_step to copy unpacked sources to install directory srcdir = extract_file(self.src, self.builddir, change_into_dir=False) kwargs['src'] = srcdir self.install_step(*args, **kwargs)
def run(self, unpack_src=False): """Common operations for extensions: unpacking sources, patching, ...""" # unpack file if desired if unpack_src: targetdir = os.path.join(self.master.builddir, remove_unwanted_chars(self.name)) self.ext_dir = extract_file("%s" % self.src, targetdir, extra_options=self.unpack_options) # patch if needed if self.patches: for patchfile in self.patches: if not apply_patch(patchfile, self.ext_dir): raise EasyBuildError("Applying patch %s failed", patchfile)
def run(self, unpack_src=False): """Common operations for extensions: unpacking sources, patching, ...""" # unpack file if desired if unpack_src: targetdir = os.path.join(self.master.builddir, remove_unwanted_chars(self.name)) self.ext_dir = extract_file("%s" % self.src, targetdir, extra_options=self.unpack_options) # patch if needed if self.patches: for patchfile in self.patches: if not apply_patch(patchfile, self.ext_dir): self.log.error("Applying patch %s failed" % patchfile)
def test_extract_file(self): """Test extract_file""" testdir = os.path.dirname(os.path.abspath(__file__)) toy_tarball = os.path.join(testdir, 'sandbox', 'sources', 'toy', 'toy-0.0.tar.gz') self.assertFalse(os.path.exists(os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) path = ft.extract_file(toy_tarball, self.test_prefix) self.assertTrue(os.path.exists(os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) self.assertTrue(os.path.samefile(path, self.test_prefix)) shutil.rmtree(os.path.join(path, 'toy-0.0')) toy_tarball_renamed = os.path.join(self.test_prefix, 'toy_tarball') shutil.copyfile(toy_tarball, toy_tarball_renamed) path = ft.extract_file(toy_tarball_renamed, self.test_prefix, cmd="tar xfvz %s") self.assertTrue(os.path.exists(os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) self.assertTrue(os.path.samefile(path, self.test_prefix)) shutil.rmtree(os.path.join(path, 'toy-0.0')) # also test behaviour of extract_file under --dry-run build_options = { 'extended_dry_run': True, 'silent': False, } init_config(build_options=build_options) self.mock_stdout(True) path = ft.extract_file(toy_tarball, self.test_prefix) txt = self.get_stdout() self.mock_stdout(False) self.assertTrue(os.path.samefile(path, self.test_prefix)) self.assertFalse(os.path.exists(os.path.join(self.test_prefix, 'toy-0.0'))) self.assertTrue(re.search('running command "tar xzf .*/toy-0.0.tar.gz"', txt)) path = ft.extract_file(toy_tarball, self.test_prefix, forced=True) self.assertTrue(os.path.exists(os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) self.assertTrue(os.path.samefile(path, self.test_prefix))
def test_apply_patch(self): """ Test apply_patch """ testdir = os.path.dirname(os.path.abspath(__file__)) tmpdir = self.test_prefix path = ft.extract_file(os.path.join(testdir, 'sandbox', 'sources', 'toy', 'toy-0.0.tar.gz'), tmpdir) toy_patch = os.path.join(testdir, 'sandbox', 'sources', 'toy', 'toy-0.0_typo.patch') self.assertTrue(ft.apply_patch(toy_patch, path)) patched = ft.read_file(os.path.join(path, 'toy-0.0', 'toy.source')) pattern = "I'm a toy, and very proud of it" self.assertTrue(pattern in patched) # trying the patch again should fail self.assertErrorRegex(EasyBuildError, "Couldn't apply patch file", ft.apply_patch, toy_patch, path)
def run(self, unpack_src=False): """Common operations for extensions: unpacking sources, patching, ...""" # unpack file if desired if unpack_src: targetdir = os.path.join(self.master.builddir, remove_unwanted_chars(self.name)) self.ext_dir = extract_file("%s" % self.src, targetdir, extra_options=self.unpack_options) if self.start_dir and os.path.isdir(self.start_dir): self.log.debug("Using start_dir: %s", self.start_dir) change_dir(self.start_dir) # patch if needed EasyBlock.patch_step(self, beginpath=self.ext_dir)
def extract_step(self): """Extract sources, if they haven't been already.""" super(EB_Rosetta, self).extract_step() # locate sources, and unpack if necessary # old 'bundles' tarballs contain a gzipped tarball for source, recent ones contain unpacked source try: prefix = os.path.join(self.builddir, '%s-%s' % (self.name.lower(), self.version)) self.srcdir = os.path.join(prefix, 'rosetta_source') if not os.path.exists(self.srcdir): src_tarball = os.path.join(prefix, 'rosetta%s_source.tgz' % self.version) if os.path.isfile(src_tarball): self.srcdir = extract_file(src_tarball, prefix) else: self.log.error("Neither source directory '%s', nor source tarball '%s' found." % (self.srcdir, src_tarball)) except OSError, err: self.log.error("Getting Rosetta sources dir ready failed: %s" % err)
def extract_and_copy(dirname_tmpl, optional=False): """Copy specified directory, after extracting it (if required).""" try: srcdir = os.path.join(self.cfg['start_dir'], dirname_tmpl % '') if not os.path.exists(srcdir): # try to extract if directory is not there yet src_tarball = os.path.join(self.cfg['start_dir'], (dirname_tmpl % self.version) + '.tgz') if os.path.isfile(src_tarball): srcdir = extract_file(src_tarball, self.cfg['start_dir']) if os.path.exists(srcdir): shutil.copytree(srcdir, os.path.join(self.installdir, os.path.basename(srcdir))) elif not optional: self.log.error("Neither source directory '%s', nor source tarball '%s' found." % srcdir, src_tarball) except OSError, err: self.log.error("Getting Rosetta %s dir ready failed: %s" % (dirname_tmpl, err))
def install_step(self): """ Install by unpacking tarball in dist directory, and copying site-packages dir to installdir. """ # locate tarball tarball = None shortver = '.'.join(self.version.split('.')[0:2]) fn_pattern = os.path.join(self.cfg['start_dir'], 'dist', "%s-%s.*.tar.gz" % (self.name, shortver)) matches = glob.glob(fn_pattern) if not matches: raise EasyBuildError("No tarball found at %s", fn_pattern) elif len(matches) > 1: raise EasyBuildError("Multiple matches found for tarball: %s", matches) else: tarball = matches[0] self.log.info("Tarball found at %s" % tarball) # unpack tarball to temporary directory tmpdir = tempfile.mkdtemp() srcdir = extract_file(tarball, tmpdir, change_into_dir=False) if srcdir: change_dir(srcdir) else: raise EasyBuildError("Unpacking tarball %s failed?", tarball) # locate site-packages dir to copy by diving into unpacked tarball src = srcdir while len(os.listdir(src)) == 1: src = os.path.join(src, os.listdir(src)[0]) if not os.path.basename(src) == 'site-packages': raise EasyBuildError( "Expected to find a site-packages path, but found something else: %s", src) # copy contents of site-packages dir dest = os.path.join(self.installdir, 'site-packages') try: shutil.copytree(src, dest) remove_dir(tmpdir) os.chdir(self.installdir) except OSError as err: raise EasyBuildError("Failed to copy directory %s to %s: %s", src, dest, err)
def download_repo(repo=GITHUB_EASYCONFIGS_REPO, branch='master', account=GITHUB_EB_MAIN, path=None): """ Download entire GitHub repo as a tar.gz archive, and extract it into specified path. @param repo: repo to download @param branch: branch to download @param account: GitHub account to download repo from @param path: path to extract to """ # make sure path exists, create it if necessary if path is None: path = tempfile.mkdtemp() # add account subdir path = os.path.join(path, account) mkdir(path, parents=True) extracted_dir_name = '%s-%s' % (repo, branch) base_name = '%s.tar.gz' % branch latest_commit_sha = fetch_latest_commit_sha(repo, account, branch) expected_path = os.path.join(path, extracted_dir_name) latest_sha_path = os.path.join(expected_path, 'latest-sha') # check if directory already exists, don't download if 'latest-sha' file indicates that it's up to date if os.path.exists(latest_sha_path): sha = read_file(latest_sha_path).split('\n')[0].rstrip() if latest_commit_sha == sha: _log.debug("Not redownloading %s/%s as it already exists: %s" % (account, repo, expected_path)) return expected_path url = URL_SEPARATOR.join([GITHUB_URL, account, repo, 'archive', base_name]) target_path = os.path.join(path, base_name) _log.debug("downloading repo %s/%s as archive from %s to %s" % (account, repo, url, target_path)) download_file(base_name, url, target_path) _log.debug("%s downloaded to %s, extracting now" % (base_name, path)) extracted_path = os.path.join(extract_file(target_path, path), extracted_dir_name) # check if extracted_path exists if not os.path.isdir(extracted_path): raise EasyBuildError("%s should exist and contain the repo %s at branch %s", extracted_path, repo, branch) write_file(latest_sha_path, latest_commit_sha) _log.debug("Repo %s at branch %s extracted into %s" % (repo, branch, extracted_path)) return extracted_path
def test_apply_patch(self): """ Test apply_patch """ testdir = os.path.dirname(os.path.abspath(__file__)) tmpdir = self.test_prefix path = ft.extract_file( os.path.join(testdir, 'sandbox', 'sources', 'toy', 'toy-0.0.tar.gz'), tmpdir) toy_patch = os.path.join(testdir, 'sandbox', 'sources', 'toy', 'toy-0.0_typo.patch') self.assertTrue(ft.apply_patch(toy_patch, path)) patched = ft.read_file(os.path.join(path, 'toy-0.0', 'toy.source')) pattern = "I'm a toy, and very proud of it" self.assertTrue(pattern in patched) # trying the patch again should fail self.assertErrorRegex(EasyBuildError, "Couldn't apply patch file", ft.apply_patch, toy_patch, path)
def install_step(self): """ Install by unpacking tarball in dist directory, and copying site-packages dir to installdir. """ # locate tarball tarball = None shortver = '.'.join(self.version.split('.')[0:2]) fn_pattern = os.path.join(self.cfg['start_dir'], 'dist', "%s-%s.*.tar.gz" % (self.name, shortver)) matches = glob.glob(fn_pattern) if not matches: raise EasyBuildError("No tarball found at %s", fn_pattern) elif len(matches) > 1: raise EasyBuildError("Multiple matches found for tarball: %s", matches) else: tarball = matches[0] self.log.info("Tarball found at %s" % tarball) # unpack tarball to temporary directory tmpdir = tempfile.mkdtemp() srcdir = extract_file(tarball, tmpdir) if not srcdir: raise EasyBuildError("Unpacking tarball %s failed?", tarball) # locate site-packages dir to copy by diving into unpacked tarball src = srcdir while len(os.listdir(src)) == 1: src = os.path.join(src, os.listdir(src)[0]) if not os.path.basename(src) =='site-packages': raise EasyBuildError("Expected to find a site-packages path, but found something else: %s", src) # copy contents of site-packages dir dest = os.path.join(self.installdir, 'site-packages') try: shutil.copytree(src, dest) rmtree2(tmpdir) os.chdir(self.installdir) except OSError, err: raise EasyBuildError("Failed to copy directory %s to %s: %s", src, dest, err)
def run(self, unpack_src=False): """Common operations for extensions: unpacking sources, patching, ...""" # unpack file if desired if unpack_src: targetdir = os.path.join(self.master.builddir, remove_unwanted_chars(self.name)) self.ext_dir = extract_file(self.src, targetdir, extra_options=self.unpack_options, change_into_dir=False, cmd=self.src_extract_cmd) # setting start dir must be done from unpacked source directory for extension, # because start_dir value is usually a relative path (if it is set) change_dir(self.ext_dir) self._set_start_dir() change_dir(self.start_dir) else: self._set_start_dir() # patch if needed EasyBlock.patch_step(self, beginpath=self.ext_dir)
def run(self): """Perform the actual Python package build/installation procedure""" # extract_file if not self.src: self.log.error("No source found for Python package %s, required for installation. (src: %s)" % \ (self.name, self.src)) self.ext_dir = extract_file("%s" % self.src, "%s/%s" % (self.builddir, self.name), extra_options=self.unpack_options) # patch if needed if self.patches: for patchfile in self.patches: if not apply_patch(patchfile, self.ext_dir): self.log.error("Applying patch %s failed" % patchfile) # configure, build_step, test, make install self.configure_step() self.build_step() self.test_step() self.install_step()
def extract_step(self): """Extract sources, if they haven't been already.""" super(EB_Rosetta, self).extract_step() # locate sources, and unpack if necessary # old 'bundles' tarballs contain a gzipped tarball for source, recent ones contain unpacked source try: subdirs = os.listdir(self.builddir) if len(subdirs) == 1: prefix = os.path.join(self.builddir, subdirs[0]) else: raise EasyBuildError("Found multiple subdirectories, don't know which one to pick: %s", subdirs) self.srcdir = os.path.join(prefix, 'rosetta_source') if not os.path.exists(self.srcdir): self.srcdir = os.path.join(prefix, 'main', 'source') if not os.path.exists(self.srcdir): src_tarball = os.path.join(prefix, 'rosetta%s_source.tgz' % self.version) if os.path.isfile(src_tarball): self.srcdir = extract_file(src_tarball, prefix) else: raise EasyBuildError("Neither source directory '%s', nor source tarball '%s' found.", self.srcdir, src_tarball) except OSError, err: raise EasyBuildError("Getting Rosetta sources dir ready failed: %s", err)
def configure_step(self): """Custom configure step for NAMD, we build charm++ first (if required).""" # complete Charm ++ and NAMD architecture string with compiler family comp_fam = self.toolchain.comp_family() if self.toolchain.options['usempi']: charm_arch_comp = 'mpicxx' else: charm_arch_comps = { toolchain.GCC: 'gcc', toolchain.INTELCOMP: 'icc', } charm_arch_comp = charm_arch_comps.get(comp_fam, None) namd_comps = { toolchain.GCC: 'g++', toolchain.INTELCOMP: 'icc', } namd_comp = namd_comps.get(comp_fam, None) if charm_arch_comp is None or namd_comp is None: raise EasyBuildError( "Unknown compiler family, can't complete Charm++/NAMD target architecture." ) self.cfg.update('charm_arch', charm_arch_comp) self.log.info("Updated 'charm_arch': %s" % self.cfg['charm_arch']) self.namd_arch = '%s-%s' % (self.cfg['namd_basearch'], namd_comp) self.log.info("Completed NAMD target architecture: %s" % self.namd_arch) charm_tarballs = glob.glob('charm-*.tar') if len(charm_tarballs) != 1: raise EasyBuildError( "Expected to find exactly one tarball for Charm++, found: %s", charm_tarballs) extract_file(charm_tarballs[0], os.getcwd()) tup = (self.cfg['charm_arch'], self.cfg['charm_opts'], self.cfg['parallel'], os.environ['CXXFLAGS']) cmd = "./build charm++ %s %s -j%s %s -DMPICH_IGNORE_CXX_SEEK" % tup charm_subdir = '.'.join( os.path.basename(charm_tarballs[0]).split('.')[:-1]) self.log.debug("Building Charm++ using cmd '%s' in '%s'" % (cmd, charm_subdir)) run_cmd(cmd, path=charm_subdir) # compiler (options) self.cfg.update( 'namd_cfg_opts', '--cc "%s" --cc-opts "%s"' % (os.environ['CC'], os.environ['CFLAGS'])) self.cfg.update( 'namd_cfg_opts', '--cxx "%s" --cxx-opts "%s"' % (os.environ['CXX'], os.environ['CXXFLAGS'])) # NAMD dependencies: CUDA, FFTW cuda = get_software_root('CUDA') if cuda: self.cfg.update('namd_cfg_opts', "--with-cuda --cuda-prefix %s" % cuda) fftw = get_software_root('FFTW') if fftw: if LooseVersion( get_software_version('FFTW')) >= LooseVersion('3.0'): if LooseVersion(self.version) >= LooseVersion('2.9'): self.cfg.update('namd_cfg_opts', "--with-fftw3") else: raise EasyBuildError( "Using FFTW v3.x only supported in NAMD v2.9 and up.") else: self.cfg.update('namd_cfg_opts', "--with-fftw") self.cfg.update('namd_cfg_opts', "--fftw-prefix %s" % fftw) namd_charm_arch = "--charm-arch %s" % '-'.join( self.cfg['charm_arch'].strip().split(' ')) cmd = "./config %s %s %s " % (self.namd_arch, namd_charm_arch, self.cfg["namd_cfg_opts"]) run_cmd(cmd)
def configure_step(self): """Configure Xmipp build via a provided wrapper around scons.""" # check if all our dependencies are in place self.python_root = get_software_root('Python') if not self.python_root: raise EasyBuildError( "Python not loaded as a dependency, which is required for %s", self.name) python_libdir = det_pylibdir() self.python_short_ver = '.'.join( get_software_version('Python').split('.')[:2]) java_root = get_software_root('Java') if not java_root: raise EasyBuildError( "Java not loaded as a dependency, which is required for %s", self.name) # extract some dependencies that we really need and can't find anywhere else # alglib tarball has version in name, so lets find it with a glob # we can't do this in extract step before these are in the original sources tarball, so we need to know # startdir first external_path = os.path.join(self.cfg['start_dir'], 'external') alglib_tar = glob.glob(os.path.join(external_path, 'alglib*.tgz'))[0] for src in [ 'bilib.tgz', 'bilib.tgz', 'condor.tgz', alglib_tar, 'scons.tgz' ]: extract_file(os.path.join(external_path, src), external_path) # make sure we are back in the start dir os.chdir(self.cfg['start_dir']) # build step expects these to exist mkdir(os.path.join(self.cfg['start_dir'], 'bin')) mkdir(os.path.join(self.cfg['start_dir'], 'lib')) python_inc_dir = os.path.join(self.python_root, 'include', 'python%s' % self.python_short_ver) numpy_inc_dir = os.path.join(self.python_root, python_libdir, 'numpy', 'core', 'include') if self.toolchain.mpi_family() == toolchain.INTELMPI: mpi_bindir = os.path.join(get_software_root('impi'), 'intel64', 'bin') else: mpi_bindir = os.path.join( get_software_root(self.toolchain.MPI_MODULE_NAME[0]), 'bin') if not os.path.exists(numpy_inc_dir): raise EasyBuildError("numpy 'include' directory %s not found", numpy_inc_dir) if not os.path.exists(mpi_bindir): raise EasyBuildError("MPI 'bin' subdir %s does not exist", mpi_bindir) cmd = ' '.join([ self.cfg['preconfigopts'], 'python external/scons/scons.py', 'mode=configure', '-j %s' % self.cfg['parallel'], '--config=force', 'profile=no', 'fast=yes', 'warn=no', 'release=yes', 'gtest=no', 'cuda=no', 'debug=no', 'matlab=no', 'java=no', 'LINKERFORPROGRAMS="$CXX"', 'MPI_BINDIR=%s' % mpi_bindir, 'JAVA_HOME=%s' % java_root, 'JAVAC=javac', 'CC="$CC"', 'CXXFLAGS="$CXXFLAGS -DMPICH_IGNORE_CXX_SEEK -I%s -I%s"' % (python_inc_dir, numpy_inc_dir), 'CXX="$CXX"', 'MPI_CC="$MPICC"', 'MPI_CXX="$MPICXX"', 'MPI_INCLUDE="$MPI_INC_DIR"', 'MPI_LIBDIR="$MPI_LIB_DIR"', 'MPI_LINKERFORPROGRAMS="$MPICC"', 'LIBPATH="$LD_LIBRARY_PATH"', self.cfg['configopts'], ]) run_cmd(cmd, log_all=True, simple=True)
def test_step(self): """Run WPS test (requires large dataset to be downloaded). """ wpsdir = None def run_wps_cmd(cmdname, mpi_cmd=True): """Run a WPS command, and check for success.""" cmd = os.path.join(wpsdir, "%s.exe" % cmdname) if mpi_cmd: cmd = self.toolchain.mpi_cmd_for(cmd, 1) (out, _) = run_cmd(cmd, log_all=True, simple=False) re_success = re.compile("Successful completion of %s" % cmdname) if not re_success.search(out): self.log.error("%s.exe failed (pattern '%s' not found)?" % (cmdname, re_success.pattern)) if self.cfg['runtest']: if not self.cfg['testdata']: self.log.error("List of URLs for testdata not provided.") wpsdir = os.path.join(self.builddir, "WPS") try: # create temporary directory tmpdir = tempfile.mkdtemp() os.chdir(tmpdir) # download data testdata_paths = [] for testdata in self.cfg['testdata']: path = self.obtain_file(testdata) if not path: self.log.error("Downloading file from %s failed?" % testdata) testdata_paths.append(path) # unpack data for path in testdata_paths: extract_file(path, tmpdir) # copy namelist.wps file fn = "namelist.wps" shutil.copy2(os.path.join(wpsdir, fn), tmpdir) namelist_file = os.path.join(tmpdir, fn) # GEOGRID # setup directories and files for d in os.listdir(os.path.join(tmpdir, "geog")): os.symlink(os.path.join(tmpdir, "geog", d), os.path.join(tmpdir, d)) # patch namelist.wps file for geogrib for line in fileinput.input(namelist_file, inplace=1, backup='.orig.geogrid'): line = re.sub(r"^(\s*geog_data_path\s*=\s*).*$", r"\1 '%s'" % tmpdir, line) sys.stdout.write(line) # GEOGRID.TBL geogrid_dir = os.path.join(tmpdir, "geogrid") os.mkdir(geogrid_dir) os.symlink(os.path.join(wpsdir, "geogrid", "GEOGRID.TBL.ARW"), os.path.join(geogrid_dir, "GEOGRID.TBL")) # run geogrid.exe run_wps_cmd("geogrid") # UNGRIB # determine start and end time stamps of grib files grib_file_prefix = "fnl_" k = len(grib_file_prefix) fs = [f for f in sorted(os.listdir('.')) if f.startswith(grib_file_prefix)] start = "%s:00:00" % fs[0][k:] end = "%s:00:00" % fs[-1][k:] # patch namelist.wps file for ungrib shutil.copy2(os.path.join(wpsdir, "namelist.wps"), tmpdir) for line in fileinput.input(namelist_file, inplace=1, backup='.orig.ungrib'): line = re.sub(r"^(\s*start_date\s*=\s*).*$", r"\1 '%s','%s'," % (start, start), line) line = re.sub(r"^(\s*end_date\s*=\s*).*$", r"\1 '%s','%s'," % (end, end), line) sys.stdout.write(line) # copy correct Vtable shutil.copy2(os.path.join(wpsdir, "ungrib", "Variable_Tables", "Vtable.ARW"), os.path.join(tmpdir, "Vtable")) # run link_grib.csh script cmd = "%s %s*" % (os.path.join(wpsdir, "link_grib.csh"), grib_file_prefix) run_cmd(cmd, log_all=True, simple=True) # run ungrib.exe run_wps_cmd("ungrib", mpi_cmd=False) # METGRID.TBL metgrid_dir = os.path.join(tmpdir, "metgrid") os.mkdir(metgrid_dir) os.symlink(os.path.join(wpsdir, "metgrid", "METGRID.TBL.ARW"), os.path.join(metgrid_dir, "METGRID.TBL")) # run metgrid.exe run_wps_cmd('metgrid') # clean up rmtree2(tmpdir) os.chdir(self.builddir) except OSError, err: self.log.error("Failed to run WPS test: %s" % err)
def install_step(self): """ Actual installation - create silent cfg file - execute command """ impiver = LooseVersion(self.version) if impiver >= LooseVersion('4.0.1'): # impi starting from version 4.0.1.x uses standard installation procedure. silent_cfg_names_map = {} if impiver < LooseVersion('4.1.1'): # since impi v4.1.1, silent.cfg has been slightly changed to be 'more standard' silent_cfg_names_map.update({ 'activation_name': ACTIVATION_NAME_2012, 'license_file_name': LICENSE_FILE_NAME_2012, }) super(EB_impi, self).install_step(silent_cfg_names_map=silent_cfg_names_map) # impi v4.1.1 and v5.0.1 installers create impi/<version> subdir, so stuff needs to be moved afterwards if impiver == LooseVersion('4.1.1.036') or impiver >= LooseVersion('5.0.1.035'): super(EB_impi, self).move_after_install() else: # impi up until version 4.0.0.x uses custom installation procedure. silent = """[mpi] INSTALLDIR=%(ins)s LICENSEPATH=%(lic)s INSTALLMODE=NONRPM INSTALLUSER=NONROOT UPDATE_LD_SO_CONF=NO PROCEED_WITHOUT_PYTHON=yes AUTOMOUNTED_CLUSTER=yes EULA=accept [mpi-rt] INSTALLDIR=%(ins)s LICENSEPATH=%(lic)s INSTALLMODE=NONRPM INSTALLUSER=NONROOT UPDATE_LD_SO_CONF=NO PROCEED_WITHOUT_PYTHON=yes AUTOMOUNTED_CLUSTER=yes EULA=accept """ % {'lic': self.license_file, 'ins': self.installdir} # already in correct directory silentcfg = os.path.join(os.getcwd(), "silent.cfg") write_file(silentcfg, silent) self.log.debug("Contents of %s: %s", silentcfg, silent) tmpdir = os.path.join(os.getcwd(), self.version, 'mytmpdir') mkdir(tmpdir, parents=True) cmd = "./install.sh --tmp-dir=%s --silent=%s" % (tmpdir, silentcfg) run_cmd(cmd, log_all=True, simple=True) # recompile libfabric (if requested) # some Intel MPI versions (like 2019 update 6) no longer ship libfabric sources libfabric_path = os.path.join(self.installdir, 'libfabric') if impiver >= LooseVersion('2019') and self.cfg['libfabric_rebuild']: if self.cfg['ofi_internal']: libfabric_src_tgz_fn = 'src.tgz' if os.path.exists(os.path.join(libfabric_path, libfabric_src_tgz_fn)): change_dir(libfabric_path) srcdir = extract_file(libfabric_src_tgz_fn, os.getcwd(), change_into_dir=False) change_dir(srcdir) libfabric_installpath = os.path.join(self.installdir, 'intel64', 'libfabric') make = 'make' if self.cfg['parallel']: make += ' -j %d' % self.cfg['parallel'] cmds = [ './configure --prefix=%s %s' % (libfabric_installpath, self.cfg['libfabric_configopts']), make, 'make install' ] for cmd in cmds: run_cmd(cmd, log_all=True, simple=True) else: self.log.info("Rebuild of libfabric is requested, but %s does not exist, so skipping...", libfabric_src_tgz_fn) else: raise EasyBuildError("Rebuild of libfabric is requested, but ofi_internal is set to False.")
def configure_step(self): """Custom configure step for NAMD, we build charm++ first (if required).""" # complete Charm ++ and NAMD architecture string with compiler family comp_fam = self.toolchain.comp_family() if self.toolchain.options['usempi']: charm_arch_comp = 'mpicxx' else: charm_arch_comps = { toolchain.GCC: 'gcc', toolchain.INTELCOMP: 'icc', } charm_arch_comp = charm_arch_comps.get(comp_fam, None) namd_comps = { toolchain.GCC: 'g++', toolchain.INTELCOMP: 'icc', } namd_comp = namd_comps.get(comp_fam, None) if charm_arch_comp is None or namd_comp is None: self.log.error("Unknown compiler family, can't complete Charm++/NAMD target architecture.") self.cfg.update('charm_arch', charm_arch_comp) self.log.info("Updated 'charm_arch': %s" % self.cfg['charm_arch']) self.namd_arch = '%s-%s' % (self.cfg['namd_basearch'], namd_comp) self.log.info("Completed NAMD target architecture: %s" % self.namd_arch) charm_tarballs = glob.glob('charm-*.tar') if len(charm_tarballs) != 1: self.log.error("Expected to find exactly one tarball for Charm++, found: %s" % charm_tarballs) extract_file(charm_tarballs[0], os.getcwd()) tup = (self.cfg['charm_arch'], self.cfg['charm_opts'], self.cfg['parallel'], os.environ['CXXFLAGS']) cmd = "./build charm++ %s %s -j%s %s -DMPICH_IGNORE_CXX_SEEK" % tup charm_subdir = '.'.join(os.path.basename(charm_tarballs[0]).split('.')[:-1]) self.log.debug("Building Charm++ using cmd '%s' in '%s'" % (cmd, charm_subdir)) run_cmd(cmd, path=charm_subdir) # compiler (options) self.cfg.update('namd_cfg_opts', '--cc "%s" --cc-opts "%s"' % (os.environ['CC'], os.environ['CFLAGS'])) self.cfg.update('namd_cfg_opts', '--cxx "%s" --cxx-opts "%s"' % (os.environ['CXX'], os.environ['CXXFLAGS'])) # NAMD dependencies: CUDA, FFTW cuda = get_software_root('CUDA') if cuda: self.cfg.update('namd_cfg_opts', "--with-cuda --cuda-prefix %s" % cuda) fftw = get_software_root('FFTW') if fftw: if LooseVersion(get_software_version('FFTW')) >= LooseVersion('3.0'): if LooseVersion(self.version) >= LooseVersion('2.9'): self.cfg.update('namd_cfg_opts', "--with-fftw3") else: self.log.error("Using FFTW v3.x only supported in NAMD v2.9 and up.") else: self.cfg.update('namd_cfg_opts', "--with-fftw") self.cfg.update('namd_cfg_opts', "--fftw-prefix %s" % fftw) namd_charm_arch = "--charm-arch %s" % '-'.join(self.cfg['charm_arch'].strip().split(' ')) cmd = "./config %s %s %s " % (self.namd_arch, namd_charm_arch, self.cfg["namd_cfg_opts"]) run_cmd(cmd)
def test_step(self): """Run WIEN2k test benchmarks. """ def run_wien2k_test(cmd_arg): """Run a WPS command, and check for success.""" cmd = "x_lapw lapw1 %s" % cmd_arg (out, _) = run_cmd(cmd, log_all=True, simple=False) re_success = re.compile("LAPW1\s+END") if not re_success.search(out): self.log.error( "Test '%s' in %s failed (pattern '%s' not found)?" % (cmd, os.getcwd(), re_success.pattern) ) else: self.log.info("Test '%s' seems to have run successfully: %s" % (cmd, out)) if self.cfg["runtest"]: if not self.cfg["testdata"]: self.log.error("List of URLs for testdata not provided.") path = os.getenv("PATH") env.setvar("PATH", "%s:%s" % (self.installdir, path)) try: cwd = os.getcwd() # create temporary directory tmpdir = tempfile.mkdtemp() os.chdir(tmpdir) # download data testdata_paths = {} for testdata in self.cfg["testdata"]: td_path = self.obtain_file(testdata) if not td_path: self.log.error("Downloading file from %s failed?" % testdata) testdata_paths.update({os.path.basename(testdata): td_path}) self.log.debug("testdata_paths: %s" % testdata_paths) # unpack serial benchmark serial_test_name = "test_case" extract_file(testdata_paths["%s.tar.gz" % serial_test_name], tmpdir) # run serial benchmark os.chdir(os.path.join(tmpdir, serial_test_name)) run_wien2k_test("-c") # unpack parallel benchmark (in serial benchmark dir) parallel_test_name = "mpi-benchmark" extract_file(testdata_paths["%s.tar.gz" % parallel_test_name], tmpdir) # run parallel benchmark os.chdir(os.path.join(tmpdir, serial_test_name)) run_wien2k_test("-p") os.chdir(cwd) shutil.rmtree(tmpdir) except OSError, err: self.log.error("Failed to run WIEN2k benchmark tests: %s" % err) # reset original path env.setvar("PATH", path) self.log.debug("Current dir: %s" % os.getcwd())
def test_step(self): """Run WIEN2k test benchmarks. """ def run_wien2k_test(cmd_arg): """Run a WPS command, and check for success.""" cmd = "x_lapw lapw1 %s" % cmd_arg (out, _) = run_cmd(cmd, log_all=True, simple=False) re_success = re.compile("LAPW1\s+END") if not re_success.search(out): self.log.error( "Test '%s' in %s failed (pattern '%s' not found)?" % (cmd, os.getcwd(), re_success.pattern)) else: self.log.info("Test '%s' seems to have run successfully: %s" % (cmd, out)) if self.cfg['runtest']: if not self.cfg['testdata']: self.log.error("List of URLs for testdata not provided.") path = os.getenv('PATH') env.setvar('PATH', "%s:%s" % (self.installdir, path)) try: cwd = os.getcwd() # create temporary directory tmpdir = tempfile.mkdtemp() os.chdir(tmpdir) # download data testdata_paths = {} for testdata in self.cfg['testdata']: td_path = self.obtain_file(testdata) if not td_path: self.log.error("Downloading file from %s failed?" % testdata) testdata_paths.update( {os.path.basename(testdata): td_path}) self.log.debug('testdata_paths: %s' % testdata_paths) # unpack serial benchmark serial_test_name = "test_case" extract_file(testdata_paths['%s.tar.gz' % serial_test_name], tmpdir) # run serial benchmark os.chdir(os.path.join(tmpdir, serial_test_name)) run_wien2k_test("-c") # unpack parallel benchmark (in serial benchmark dir) parallel_test_name = "mpi-benchmark" extract_file(testdata_paths['%s.tar.gz' % parallel_test_name], tmpdir) # run parallel benchmark os.chdir(os.path.join(tmpdir, serial_test_name)) run_wien2k_test("-p") os.chdir(cwd) rmtree2(tmpdir) except OSError, err: self.log.error("Failed to run WIEN2k benchmark tests: %s" % err) # reset original path env.setvar('PATH', path) self.log.debug("Current dir: %s" % os.getcwd())
def test_step(self): """Run WPS test (requires large dataset to be downloaded). """ wpsdir = None def run_wps_cmd(cmdname, mpi_cmd=True): """Run a WPS command, and check for success.""" cmd = os.path.join(wpsdir, "%s.exe" % cmdname) if mpi_cmd: if build_option('mpi_tests'): cmd = self.toolchain.mpi_cmd_for(cmd, 1) else: self.log.info("Skipping MPI test for %s, since MPI tests are disabled", cmd) return (out, _) = run_cmd(cmd, log_all=True, simple=False) re_success = re.compile("Successful completion of %s" % cmdname) if not re_success.search(out): raise EasyBuildError("%s.exe failed (pattern '%s' not found)?", cmdname, re_success.pattern) if self.cfg['runtest']: if not self.cfg['testdata']: raise EasyBuildError("List of URLs for testdata not provided.") wpsdir = os.path.join(self.builddir, self.wps_subdir) try: # create temporary directory tmpdir = tempfile.mkdtemp() change_dir(tmpdir) # download data testdata_paths = [] for testdata in self.cfg['testdata']: path = self.obtain_file(testdata) if not path: raise EasyBuildError("Downloading file from %s failed?", testdata) testdata_paths.append(path) # unpack data for path in testdata_paths: extract_file(path, tmpdir) namelist_file = os.path.join(tmpdir, 'namelist.wps') # GEOGRID # setup directories and files if LooseVersion(self.version) < LooseVersion("4.0"): geog_data_dir = "geog" else: geog_data_dir = "WPS_GEOG" for dir_name in os.listdir(os.path.join(tmpdir, geog_data_dir)): symlink(os.path.join(tmpdir, geog_data_dir, dir_name), os.path.join(tmpdir, dir_name)) # copy namelist.wps file and patch it for geogrid copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file) regex_subs = [(r"^(\s*geog_data_path\s*=\s*).*$", r"\1 '%s'" % tmpdir)] apply_regex_substitutions(namelist_file, regex_subs) # GEOGRID.TBL geogrid_dir = os.path.join(tmpdir, 'geogrid') mkdir(geogrid_dir) symlink(os.path.join(wpsdir, 'geogrid', 'GEOGRID.TBL.ARW'), os.path.join(geogrid_dir, 'GEOGRID.TBL')) # run geogrid.exe run_wps_cmd("geogrid") # UNGRIB # determine start and end time stamps of grib files grib_file_prefix = "fnl_" k = len(grib_file_prefix) fs = [f for f in sorted(os.listdir('.')) if f.startswith(grib_file_prefix)] start = "%s:00:00" % fs[0][k:] end = "%s:00:00" % fs[-1][k:] # copy namelist.wps file and patch it for ungrib copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file) regex_subs = [ (r"^(\s*start_date\s*=\s*).*$", r"\1 '%s','%s'," % (start, start)), (r"^(\s*end_date\s*=\s*).*$", r"\1 '%s','%s'," % (end, end)), ] apply_regex_substitutions(namelist_file, regex_subs) # copy correct Vtable vtable_dir = os.path.join(wpsdir, 'ungrib', 'Variable_Tables') if os.path.exists(os.path.join(vtable_dir, 'Vtable.ARW')): copy_file(os.path.join(vtable_dir, 'Vtable.ARW'), os.path.join(tmpdir, 'Vtable')) elif os.path.exists(os.path.join(vtable_dir, 'Vtable.ARW.UPP')): copy_file(os.path.join(vtable_dir, 'Vtable.ARW.UPP'), os.path.join(tmpdir, 'Vtable')) else: raise EasyBuildError("Could not find Vtable file to use for testing ungrib") # run link_grib.csh script cmd = "%s %s*" % (os.path.join(wpsdir, "link_grib.csh"), grib_file_prefix) run_cmd(cmd, log_all=True, simple=True) # run ungrib.exe run_wps_cmd("ungrib", mpi_cmd=False) # METGRID.TBL metgrid_dir = os.path.join(tmpdir, 'metgrid') mkdir(metgrid_dir) symlink(os.path.join(wpsdir, 'metgrid', 'METGRID.TBL.ARW'), os.path.join(metgrid_dir, 'METGRID.TBL')) # run metgrid.exe run_wps_cmd('metgrid') # clean up change_dir(self.builddir) remove_dir(tmpdir) except OSError as err: raise EasyBuildError("Failed to run WPS test: %s", err)
def test_step(self): """Run WPS test (requires large dataset to be downloaded). """ wpsdir = None def run_wps_cmd(cmdname, mpi_cmd=True): """Run a WPS command, and check for success.""" cmd = os.path.join(wpsdir, "%s.exe" % cmdname) if mpi_cmd: if build_option('mpi_tests'): cmd = self.toolchain.mpi_cmd_for(cmd, 1) else: self.log.info( "Skipping MPI test for %s, since MPI tests are disabled", cmd) return (out, _) = run_cmd(cmd, log_all=True, simple=False) re_success = re.compile("Successful completion of %s" % cmdname) if not re_success.search(out): raise EasyBuildError("%s.exe failed (pattern '%s' not found)?", cmdname, re_success.pattern) if self.cfg['runtest']: if not self.cfg['testdata']: raise EasyBuildError("List of URLs for testdata not provided.") wpsdir = os.path.join(self.builddir, self.wps_subdir) try: # create temporary directory tmpdir = tempfile.mkdtemp() change_dir(tmpdir) # download data testdata_paths = [] for testdata in self.cfg['testdata']: path = self.obtain_file(testdata) if not path: raise EasyBuildError( "Downloading file from %s failed?", testdata) testdata_paths.append(path) # unpack data for path in testdata_paths: srcdir = extract_file(path, tmpdir, change_into_dir=False) change_dir(srcdir) namelist_file = os.path.join(tmpdir, 'namelist.wps') # GEOGRID # setup directories and files if LooseVersion(self.version) < LooseVersion("4.0"): geog_data_dir = "geog" else: geog_data_dir = "WPS_GEOG" for dir_name in os.listdir(os.path.join(tmpdir, geog_data_dir)): symlink(os.path.join(tmpdir, geog_data_dir, dir_name), os.path.join(tmpdir, dir_name)) # copy namelist.wps file and patch it for geogrid copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file) regex_subs = [(r"^(\s*geog_data_path\s*=\s*).*$", r"\1 '%s'" % tmpdir)] apply_regex_substitutions(namelist_file, regex_subs) # GEOGRID.TBL geogrid_dir = os.path.join(tmpdir, 'geogrid') mkdir(geogrid_dir) symlink(os.path.join(wpsdir, 'geogrid', 'GEOGRID.TBL.ARW'), os.path.join(geogrid_dir, 'GEOGRID.TBL')) # run geogrid.exe run_wps_cmd("geogrid") # UNGRIB # determine start and end time stamps of grib files grib_file_prefix = "fnl_" k = len(grib_file_prefix) fs = [ f for f in sorted(os.listdir('.')) if f.startswith(grib_file_prefix) ] start = "%s:00:00" % fs[0][k:] end = "%s:00:00" % fs[-1][k:] # copy namelist.wps file and patch it for ungrib copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file) regex_subs = [ (r"^(\s*start_date\s*=\s*).*$", r"\1 '%s','%s'," % (start, start)), (r"^(\s*end_date\s*=\s*).*$", r"\1 '%s','%s'," % (end, end)), ] apply_regex_substitutions(namelist_file, regex_subs) # copy correct Vtable vtable_dir = os.path.join(wpsdir, 'ungrib', 'Variable_Tables') if os.path.exists(os.path.join(vtable_dir, 'Vtable.ARW')): copy_file(os.path.join(vtable_dir, 'Vtable.ARW'), os.path.join(tmpdir, 'Vtable')) elif os.path.exists(os.path.join(vtable_dir, 'Vtable.ARW.UPP')): copy_file(os.path.join(vtable_dir, 'Vtable.ARW.UPP'), os.path.join(tmpdir, 'Vtable')) else: raise EasyBuildError( "Could not find Vtable file to use for testing ungrib") # run link_grib.csh script cmd = "%s %s*" % (os.path.join( wpsdir, "link_grib.csh"), grib_file_prefix) run_cmd(cmd, log_all=True, simple=True) # run ungrib.exe run_wps_cmd("ungrib", mpi_cmd=False) # METGRID.TBL metgrid_dir = os.path.join(tmpdir, 'metgrid') mkdir(metgrid_dir) symlink(os.path.join(wpsdir, 'metgrid', 'METGRID.TBL.ARW'), os.path.join(metgrid_dir, 'METGRID.TBL')) # run metgrid.exe run_wps_cmd('metgrid') # clean up change_dir(self.builddir) remove_dir(tmpdir) except OSError as err: raise EasyBuildError("Failed to run WPS test: %s", err)
def test_step(self): """Run WIEN2k test benchmarks. """ def run_wien2k_test(cmd_arg): """Run a WPS command, and check for success.""" cmd = "x_lapw lapw1 %s" % cmd_arg (out, _) = run_cmd(cmd, log_all=True, simple=False) re_success = re.compile("LAPW1\s+END") if not re_success.search(out): raise EasyBuildError( "Test '%s' in %s failed (pattern '%s' not found)?", cmd, os.getcwd(), re_success.pattern) else: self.log.info("Test '%s' seems to have run successfully: %s" % (cmd, out)) if self.cfg['runtest']: if not self.cfg['testdata']: raise EasyBuildError("List of URLs for testdata not provided.") # prepend $PATH with install directory, define $SCRATCH which is used by the tests env.setvar('PATH', "%s:%s" % (self.installdir, os.environ['PATH'])) try: cwd = os.getcwd() # create temporary directory tmpdir = tempfile.mkdtemp() os.chdir(tmpdir) self.log.info("Running tests in %s" % tmpdir) scratch = os.path.join(tmpdir, 'scratch') mkdir(scratch) env.setvar('SCRATCH', scratch) # download data testdata_paths = {} for testdata in self.cfg['testdata']: td_path = self.obtain_file(testdata) if not td_path: raise EasyBuildError( "Downloading file from %s failed?", testdata) testdata_paths.update( {os.path.basename(testdata): td_path}) self.log.debug('testdata_paths: %s' % testdata_paths) # unpack serial benchmark serial_test_name = "test_case" srcdir = extract_file(testdata_paths['%s.tar.gz' % serial_test_name], tmpdir, change_into_dir=False) change_dir(srcdir) # run serial benchmark os.chdir(os.path.join(tmpdir, serial_test_name)) run_wien2k_test("-c") # unpack parallel benchmark (in serial benchmark dir) parallel_test_name = "mpi-benchmark" srcdir = extract_file(testdata_paths['%s.tar.gz' % parallel_test_name], tmpdir, change_into_dir=False) change_dir(srcdir) # run parallel benchmark os.chdir(os.path.join(tmpdir, serial_test_name)) run_wien2k_test("-p") os.chdir(cwd) remove_dir(tmpdir) except OSError as err: raise EasyBuildError( "Failed to run WIEN2k benchmark tests: %s", err) self.log.debug("Current dir: %s" % os.getcwd())
def test_step(self): """Run WIEN2k test benchmarks. """ def run_wien2k_test(cmd_arg): """Run a WPS command, and check for success.""" cmd = "x_lapw lapw1 %s" % cmd_arg (out, _) = run_cmd(cmd, log_all=True, simple=False) re_success = re.compile("LAPW1\s+END") if not re_success.search(out): raise EasyBuildError("Test '%s' in %s failed (pattern '%s' not found)?", cmd, os.getcwd(), re_success.pattern) else: self.log.info("Test '%s' seems to have run successfully: %s" % (cmd, out)) if self.cfg['runtest']: if not self.cfg['testdata']: raise EasyBuildError("List of URLs for testdata not provided.") # prepend $PATH with install directory, define $SCRATCH which is used by the tests env.setvar('PATH', "%s:%s" % (self.installdir, os.environ['PATH'])) try: cwd = os.getcwd() # create temporary directory tmpdir = tempfile.mkdtemp() os.chdir(tmpdir) self.log.info("Running tests in %s" % tmpdir) scratch = os.path.join(tmpdir, 'scratch') mkdir(scratch) env.setvar('SCRATCH', scratch) # download data testdata_paths = {} for testdata in self.cfg['testdata']: td_path = self.obtain_file(testdata) if not td_path: raise EasyBuildError("Downloading file from %s failed?", testdata) testdata_paths.update({os.path.basename(testdata): td_path}) self.log.debug('testdata_paths: %s' % testdata_paths) # unpack serial benchmark serial_test_name = "test_case" extract_file(testdata_paths['%s.tar.gz' % serial_test_name], tmpdir) # run serial benchmark os.chdir(os.path.join(tmpdir, serial_test_name)) run_wien2k_test("-c") # unpack parallel benchmark (in serial benchmark dir) parallel_test_name = "mpi-benchmark" extract_file(testdata_paths['%s.tar.gz' % parallel_test_name], tmpdir) # run parallel benchmark os.chdir(os.path.join(tmpdir, serial_test_name)) run_wien2k_test("-p") os.chdir(cwd) rmtree2(tmpdir) except OSError, err: raise EasyBuildError("Failed to run WIEN2k benchmark tests: %s", err) self.log.debug("Current dir: %s" % os.getcwd())