def build_step(self, *args, **kwargs): """Build with Bazel""" if not get_software_root('Bazel'): raise EasyBuildError("Bazel not found in dependency list.") # separate build dir for Bazel build_dir = os.path.join(self.builddir, 'easybuild_obj') if os.path.exists(build_dir): self.log.warning('Build directory %s already exists (from previous iterations?). Removing...', build_dir) remove_dir(build_dir) mkdir(build_dir, parents=True) bazel_opts = "--output_user_root %s" % build_dir # build target bazel_build_pkg = '//reverb/pip_package:build_pip_package' # generate build command bazel_build_opts = self.cfg['buildopts'] # by default generate a release build if not all(opt in bazel_build_opts for opt in (" --compilation_mode", " -c")): bazel_build_opts += " --compilation_mode=opt" # set C++ standard (--cxxopt can be used multiple times) cstd = self.toolchain.options.get('cstd', None) if cstd: bazel_build_opts += " --cxxopt='-std=%s'" % cstd # use JDK from EB bazel_build_opts += " --host_javabase=@local_jdk//:jdk" # explicitly set the number of processes bazel_build_opts += " --jobs=%d" % self.cfg['parallel'] # print full compilation commands bazel_build_opts += " --subcommands" bazel_cmd = "bazel %s build %s %s" % (bazel_opts, bazel_build_opts, bazel_build_pkg) return run_cmd(bazel_cmd, log_all=True, simple=True, log_output=True)
def install_step(self): if LooseVersion(self.version) < LooseVersion('1.7'): remove_dir(self.installdir) copy_dir(os.path.join(self.builddir, 'jdk%s' % self.version), self.installdir) else: PackedBinary.install_step(self)
def configure_step(self): """Create directories, copy required files and set env vars.""" # Ensure that nothing has been left over from previous installation attempts. # This is necessary here since directories must be created before building # and not removed before the installation step. remove_dir(self.installdir) mkdir(self.installdir) # Create directories recursively dirpath = self.installdir for dirname in ['extras', 'include']: dirpath = os.path.join(dirpath, dirname) mkdir(dirpath) self.log.debug("Created directory: %s" % dirpath) source_dir = os.path.join(self.sources_root, 'extras', 'ELC1.04', 'ELC') dest_dir = os.path.join(self.installdir, 'extras', 'include', 'ELC') copy_dir(source_dir, dest_dir) # FSL is a required dependency since it provides FastPDlib fsl_root = get_software_root('FSL') if not fsl_root: raise EasyBuildError("Required FSL dependency not found") # Find the machine type identified by FSL cmd = ". %s/fsl/etc/fslconf/fslmachtype.sh" % fsl_root (out, _) = run_cmd(cmd, log_all=True, simple=False) fslmachtype = out.strip() self.log.debug("FSL machine type: %s" % fslmachtype) env.setvar('FSLDEVDIR', self.installdir) env.setvar('FSLCONFDIR', os.path.join(fsl_root, 'fsl', 'config')) env.setvar('FSLMACHTYPE', fslmachtype)
def test_cases_step(self): """Run test cases, if specified.""" for test in self.cfg['tests']: # check expected format if not len(test) == 4: raise EasyBuildError("WIEN2k test case not specified in expected format: " "(testcase_name, init_lapw_args, run_lapw_args, [scf_regexp_pattern])") test_name = test[0] init_args = test[1] run_args = test[2] scf_regexp_patterns = test[3] try: cwd = os.getcwd() # WIEN2k enforces that working dir has same name as test case tmpdir = os.path.join(tempfile.mkdtemp(), test_name) scratch = os.path.join(tmpdir, 'scratch') mkdir(scratch, parents=True) env.setvar('SCRATCH', scratch) os.chdir(tmpdir) self.log.info("Running test case %s in %s" % (test_name, tmpdir)) except OSError as err: raise EasyBuildError("Failed to create temporary directory for test %s: %s", test_name, err) # try and find struct file for test test_fp = self.obtain_file("%s.struct" % test_name) try: shutil.copy2(test_fp, tmpdir) except OSError as err: raise EasyBuildError("Failed to copy %s: %s", test_fp, err) # run test cmd = "init_lapw %s" % init_args run_cmd(cmd, log_all=True, simple=True) cmd = "run_lapw %s" % run_args run_cmd(cmd, log_all=True, simple=True) # check output scf_fn = "%s.scf" % test_name self.log.debug("Checking output of test %s in %s" % (str(test), scf_fn)) scftxt = read_file(scf_fn) for regexp_pat in scf_regexp_patterns: regexp = re.compile(regexp_pat, re.M) if not regexp.search(scftxt): raise EasyBuildError("Failed to find pattern %s in %s", regexp.pattern, scf_fn) else: self.log.debug("Found pattern %s in %s" % (regexp.pattern, scf_fn)) # cleanup try: os.chdir(cwd) remove_dir(tmpdir) except OSError as err: raise EasyBuildError("Failed to clean up temporary test dir: %s", err)
def install_step(self): """Install python but only keep the bits we need""" super(EB_Tkinter, self).install_step() tmpdir = tempfile.mkdtemp(dir=self.builddir) self.tkinter_so_basename = self.get_tkinter_so_basename(False) if LooseVersion(self.version) >= LooseVersion('3'): tkparts = [ "tkinter", os.path.join("lib-dynload", self.tkinter_so_basename) ] else: tkparts = [ "lib-tk", os.path.join("lib-dynload", self.tkinter_so_basename) ] pylibdir = os.path.join(self.installdir, det_pylibdir()) copy([os.path.join(os.path.dirname(pylibdir), x) for x in tkparts], tmpdir) remove_dir(self.installdir) move_file(os.path.join(tmpdir, tkparts[0]), os.path.join(pylibdir, tkparts[0])) move_file(os.path.join(tmpdir, self.tkinter_so_basename), os.path.join(pylibdir, self.tkinter_so_basename))
def extract_step(self): """ Prepare a combined MXNet source tree. Move all submodules to their right place. """ # Extract everything into separate directories. super(EB_MXNet, self).extract_step() mxnet_dirs = glob.glob(os.path.join(self.builddir, '*mxnet-*')) if len(mxnet_dirs) == 1: self.mxnet_src_dir = mxnet_dirs[0] self.log.debug("MXNet dir is: %s", self.mxnet_src_dir) else: raise EasyBuildError("Failed to find/isolate MXNet source directory: %s", mxnet_dirs) for srcdir in [d for d in os.listdir(self.builddir) if d != os.path.basename(self.mxnet_src_dir)]: submodule, _, _ = srcdir.rpartition('-') newdir = os.path.join(self.mxnet_src_dir, submodule) olddir = os.path.join(self.builddir, srcdir) # first remove empty existing directory remove_dir(newdir) try: shutil.move(olddir, newdir) except IOError as err: raise EasyBuildError("Failed to move %s to %s: %s", olddir, newdir, err) # the nnvm submodules has dmlc-core as a submodule too. Let's put a symlink in place. newdir = os.path.join(self.mxnet_src_dir, "nnvm", "dmlc-core") olddir = os.path.join(self.mxnet_src_dir, "dmlc-core") remove_dir(newdir) symlink(olddir, newdir)
def install_step(self): """Install IMOD using install script.""" # -dir: Choose location of installation directory # -skip: do not attempt to deploy resource files in /etc # -yes: do not prompt for confirmation script = '{0}_{1}{2}.csh'.format(self.name.lower(), self.version, self.cfg['versionsuffix']) cmd = "tcsh {0} -dir {1} -script {1} -skip -yes".format(script, self.installdir) run_cmd(cmd, log_all=True, simple=True) # The assumption by the install script is that installdir will be something # like /usr/local. So it creates, within the specified install location, a # number of additional directories within which to install IMOD. We will, # therefore, move the contents of these directories up and throw away the # directories themselves. Doing so apparently is not a problem so long as # IMOD_DIR is correctly set in the module. link_to_remove = os.path.join(self.installdir, self.name) dir_to_remove = os.path.join(self.installdir, "{0}_{1}".format(self.name.lower(), self.version)) try: for entry in os.listdir(dir_to_remove): shutil.move(os.path.join(dir_to_remove, entry), self.installdir) if os.path.realpath(link_to_remove) != os.path.realpath(dir_to_remove): raise EasyBuildError("Something went wrong: %s doesn't point to %s", link_to_remove, dir_to_remove) remove_dir(dir_to_remove) os.remove(link_to_remove) except OSError as err: raise EasyBuildError("Failed to clean up install dir: %s", err)
def cleanup(self): """ Clean up SVN working copy. """ try: remove_dir(self.wc) except OSError as err: raise EasyBuildError("Can't remove working copy %s: %s", self.wc, err)
def test_step(self): """Run the basic tests (but not necessarily the full regression tests) using make check""" if self.is_double_precision_cuda_build(): self.log.info("skipping test step") else: # allow to escape testing by setting runtest to False if self.cfg['runtest'] is None or self.cfg['runtest']: libdir = os.path.join(self.installdir, 'lib') libdir_backup = None if build_option('rpath'): # temporarily copy 'lib' to installation directory when RPATH linking is enabled; # required to fix errors like: # "ImportError: libgmxapi.so.0: cannot open shared object file: No such file or directory" # occurs with 'make test' because _gmxapi.*.so only includes %(installdir)/lib in RPATH section, # while the libraries are only there after install step... # keep in mind that we may be performing an iterated installation: # if there already is an existing 'lib' dir in the installation, # we temporarily move it out of the way (and then restore it after running the tests) if os.path.exists(libdir): libdir_backup = find_backup_name_candidate(libdir) self.log.info( "%s already exists, moving it to %s while running tests...", libdir, libdir_backup) shutil.move(libdir, libdir_backup) copy_dir('lib', libdir) orig_runtest = self.cfg['runtest'] # make very sure OMP_NUM_THREADS is set to 1, to avoid hanging GROMACS regression test env.setvar('OMP_NUM_THREADS', '1') if self.cfg['runtest'] is None or isinstance( self.cfg['runtest'], bool): self.cfg['runtest'] = 'check' # run 'make check' or whatever the easyconfig specifies # in parallel since it involves more compilation self.cfg.update('runtest', "-j %s" % self.cfg['parallel']) super(EB_GROMACS, self).test_step() if build_option('rpath'): # clean up temporary copy of 'lib' in installation directory, # this was only there to avoid ImportError when running the tests before populating # the installation directory remove_dir(libdir) if libdir_backup: self.log.info("Restoring %s to %s after running tests", libdir_backup, libdir) shutil.move(libdir_backup, libdir) self.cfg['runtest'] = orig_runtest
def cleanup(self): """ Clean up git working copy. """ try: self.wc = os.path.dirname(self.wc) remove_dir(self.wc) except IOError as err: raise EasyBuildError("Can't remove working copy %s: %s", self.wc, err)
def tearDown(self): """Test cleanup.""" remove_dir(self.tmpdir) sys.stdout = self.orig_sys_stdout sys.stderr = self.orig_sys_stderr # restore original environment modify_env(os.environ, self.orig_environ, verbose=False) super(EasyBlockSpecificTest, self).tearDown()
def pgo_steps(self): """ Set of steps to be performed after the initial installation, if PGO were enabled """ self.log.info("Running PGO steps...") # Remove old profiles remove_dir(self.profdir) mkdir(self.profdir) # Clean the old build run_cmd('make distclean') # Compile and run example to generate profile print_msg("generating PGO profile...") (out, _) = run_cmd('%s 2 hostname' % self.cfg['mpiexec_cmd']) nodes = out.split() if nodes[0] == nodes[1]: raise EasyBuildError( "The profile is generated with 1 node! Use 2 nodes to generate a proper profile!" ) write_file('pingpong.c', PINGPONG_PGO_TEST) run_cmd('%s/bin/mpicc pingpong.c -o pingpong' % self.installdir) run_cmd('PSP_SHM=0 %s 2 pingpong' % self.cfg['mpiexec_cmd']) # Check that the profiles are there new_profs = os.listdir(self.profdir) if not new_profs: raise EasyBuildError( "The PGO profiles where not found in the expected directory (%s)" % self.profdir) # Change PGO related options self.cfg['pgo'] = False self.cfg['configopts'] = re.sub('--with-profile=gen', '--with-profile=use', self.cfg['configopts']) # Reconfigure print_msg("configuring with PGO...") self.log.info("Running configure_step with PGO...") self.configure_step() # Rebuild print_msg("building with PGO...") self.log.info("Running build_step with PGO...") self.build_step() # Reinstall print_msg("installing with PGO...") self.log.info("Running install_step with PGO...") self.install_step()
def install_step(self): """Copy all files in build directory to the install directory""" remove_dir(self.installdir) install_script = self.src[0]['name'] adjust_permissions(os.path.join(self.builddir, install_script), stat.S_IRUSR | stat.S_IXUSR) cmd = "%s ./%s -p %s -b -f" % (self.cfg['preinstallopts'], install_script, self.installdir) self.log.info("Installing %s using command '%s'..." % (self.name, cmd)) run_cmd(cmd, log_all=True, simple=True)
def install_step(self, src=None): """Install by copying from specified source directory (or 'start_dir' if not specified).""" # Run preinstallopts and/or preinstall_cmd before copy of source directory preinstall_cmd = None if self.cfg['preinstallopts']: preinstall_opts = self.cfg['preinstallopts'].split('&&') preinstall_cmd = '&&'.join( [opt for opt in preinstall_opts if opt and not opt.isspace()]) if self.cfg['preinstall_cmd']: preinstall_cmd = '&& '.join([ cmd for cmd in [preinstall_cmd, self.cfg['preinstall_cmd']] if cmd ]) if preinstall_cmd: self.log.info( "Preparing installation of %s using command '%s'..." % (self.name, preinstall_cmd)) run_cmd(preinstall_cmd, log_all=True, simple=True) # Copy source directory source_path = src or self.cfg['start_dir'] if self.cfg['install_type'] == 'subdir': # Wipe and install in a sub-directory with the name of the package install_path = os.path.join(self.installdir, self.name.lower()) dirs_exist_ok = False install_logmsg = "Copying tarball contents of %s to sub-directory %s..." elif self.cfg['install_type'] == 'merge': # Enable merging with root of existing installdir install_path = self.installdir dirs_exist_ok = True install_logmsg = "Merging tarball contents of %s into %s..." elif self.cfg['install_type'] is None: # Wipe and copy root of installation directory (default) install_path = self.installdir dirs_exist_ok = False install_logmsg = "Copying tarball contents of %s into %s after wiping it..." else: raise EasyBuildError("Unknown option '%s' for index_type.", self.cfg['install_type']) self.log.info(install_logmsg, self.name, install_path) if not dirs_exist_ok: remove_dir(install_path) copy_dir(source_path, install_path, symlinks=self.cfg['keepsymlinks'], dirs_exist_ok=dirs_exist_ok)
def install_step(self): """Copy all files in build directory to the install directory""" remove_dir(self.installdir) install_script = self.src[0]['name'] adjust_permissions(os.path.join(self.builddir, install_script), stat.S_IRUSR | stat.S_IXUSR) # Anacondas own install instructions specify "bash [script]" despite using different shebangs cmd = "%s bash ./%s -p %s -b -f" % (self.cfg['preinstallopts'], install_script, self.installdir) self.log.info("Installing %s using command '%s'..." % (self.name, cmd)) run_cmd(cmd, log_all=True, simple=True)
def install_step(self): """Copy all files in build directory to the install directory""" install_cmd = self.cfg.get('install_cmd', None) if install_cmd is None: try: # shutil.copytree doesn't allow the target directory to exist already remove_dir(self.installdir) shutil.copytree(self.cfg['start_dir'], self.installdir, symlinks=self.cfg['keepsymlinks']) except OSError as err: raise EasyBuildError("Failed to copy %s to %s: %s", self.cfg['start_dir'], self.installdir, err) else: cmd = ' '.join([self.cfg['preinstallopts'], install_cmd, self.cfg['installopts']]) self.log.info("Installing %s using command '%s'..." % (self.name, cmd)) run_cmd(cmd, log_all=True, simple=True)
def install_step(self): """Install numpy and remove numpy build dir, so scipy doesn't find it by accident.""" super(EB_numpy, self).install_step() builddir = os.path.join(self.builddir, "numpy") try: if os.path.isdir(builddir): os.chdir(self.builddir) remove_dir(builddir) else: self.log.debug("build dir %s already clean" % builddir) except OSError as err: raise EasyBuildError("Failed to clean up numpy build dir %s: %s", builddir, err)
def test_step(self): """Test the built Python package.""" if isinstance(self.cfg['runtest'], string_type): self.testcmd = self.cfg['runtest'] if self.cfg['runtest'] and self.testcmd is not None: extrapath = "" testinstalldir = None if self.testinstall: # install in test directory and export PYTHONPATH try: testinstalldir = tempfile.mkdtemp() for pylibdir in self.all_pylibdirs: mkdir(os.path.join(testinstalldir, pylibdir), parents=True) except OSError as err: raise EasyBuildError( "Failed to create test install dir: %s", err) # print Python search path (just debugging purposes) run_cmd("%s -c 'import sys; print(sys.path)'" % self.python_cmd, verbose=False, trace=False) abs_pylibdirs = [ os.path.join(testinstalldir, pylibdir) for pylibdir in self.all_pylibdirs ] extrapath = "export PYTHONPATH=%s &&" % os.pathsep.join( abs_pylibdirs + ['$PYTHONPATH']) cmd = self.compose_install_command(testinstalldir, extrapath=extrapath) run_cmd(cmd, log_all=True, simple=True, verbose=False) if self.testcmd: testcmd = self.testcmd % {'python': self.python_cmd} cmd = ' '.join([ extrapath, self.cfg['pretestopts'], testcmd, self.cfg['testopts'] ]) run_cmd(cmd, log_all=True, simple=True) if testinstalldir: remove_dir(testinstalldir)
def post_install_step(self): """Copy installation to actual installation directory in case of a staged installation.""" if self.cfg.get('staged_install', False): staged_installdir = self.installdir self.installdir = self.actual_installdir try: # copytree expects target directory to not exist yet if os.path.exists(self.installdir): remove_dir(self.installdir) shutil.copytree(staged_installdir, self.installdir) except OSError as err: raise EasyBuildError("Failed to move staged install from %s to %s: %s", staged_installdir, self.installdir, err) super(Binary, self).post_install_step()
def install_step(self): """Installation of OpenSSL and SSL certificates""" super(EB_OpenSSL, self).install_step() # SSL certificates # OPENSSLDIR is already populated by the installation of OpenSSL # try to symlink system certificates in the empty 'certs' directory openssl_certs_dir = os.path.join(self.installdir, 'ssl', 'certs') if self.ssl_certs_dir: remove_dir(openssl_certs_dir) symlink(self.ssl_certs_dir, openssl_certs_dir) else: print_warning( "OpenSSL successfully installed without system SSL certificates. " "Some packages might experience limited functionality.")
def install_step(self): """ Install by unpacking tarball in dist directory, and copying site-packages dir to installdir. """ # locate tarball tarball = None shortver = '.'.join(self.version.split('.')[0:2]) fn_pattern = os.path.join(self.cfg['start_dir'], 'dist', "%s-%s.*.tar.gz" % (self.name, shortver)) matches = glob.glob(fn_pattern) if not matches: raise EasyBuildError("No tarball found at %s", fn_pattern) elif len(matches) > 1: raise EasyBuildError("Multiple matches found for tarball: %s", matches) else: tarball = matches[0] self.log.info("Tarball found at %s" % tarball) # unpack tarball to temporary directory tmpdir = tempfile.mkdtemp() srcdir = extract_file(tarball, tmpdir, change_into_dir=False) if srcdir: change_dir(srcdir) else: raise EasyBuildError("Unpacking tarball %s failed?", tarball) # locate site-packages dir to copy by diving into unpacked tarball src = srcdir while len(os.listdir(src)) == 1: src = os.path.join(src, os.listdir(src)[0]) if not os.path.basename(src) == 'site-packages': raise EasyBuildError( "Expected to find a site-packages path, but found something else: %s", src) # copy contents of site-packages dir dest = os.path.join(self.installdir, 'site-packages') try: shutil.copytree(src, dest) remove_dir(tmpdir) os.chdir(self.installdir) except OSError as err: raise EasyBuildError("Failed to copy directory %s to %s: %s", src, dest, err)
def build_step(self, *args, **kwargs): """Custom build procedure for Python, ensure stack size limit is set to 'unlimited' (if desired).""" # make sure installation directory doesn't already exist when building with --rpath and # configuring with --enable-optimizations, since that leads to errors like: # ./python: symbol lookup error: ./python: undefined symbol: __gcov_indirect_call # see also https://bugs.python.org/issue29712 enable_opts_flag = '--enable-optimizations' if build_option( 'rpath') and enable_opts_flag in self.cfg['configopts']: if os.path.exists(self.installdir): warning_msg = "Removing existing installation directory '%s', " warning_msg += "because EasyBuild is configured to use RPATH linking " warning_msg += "and %s configure option is used." % enable_opts_flag print_warning(warning_msg % self.installdir) remove_dir(self.installdir) if self.cfg['ulimit_unlimited']: # determine current stack size limit (out, _) = run_cmd("ulimit -s") curr_ulimit_s = out.strip() # figure out hard limit for stack size limit; # this determines whether or not we can use "ulimit -s unlimited" (out, _) = run_cmd("ulimit -s -H") max_ulimit_s = out.strip() if curr_ulimit_s == UNLIMITED: self.log.info("Current stack size limit is %s: OK", curr_ulimit_s) elif max_ulimit_s == UNLIMITED: self.log.info( "Current stack size limit is %s, setting it to %s for build...", curr_ulimit_s, UNLIMITED) self.cfg.update('prebuildopts', "ulimit -s %s && " % UNLIMITED) else: msg = "Current stack size limit is %s, and can not be set to %s due to hard limit of %s;" msg += " setting stack size limit to %s instead, " msg += " this may break part of the compilation (e.g. hashlib)..." print_warning( msg % (curr_ulimit_s, UNLIMITED, max_ulimit_s, max_ulimit_s)) self.cfg.update('prebuildopts', "ulimit -s %s && " % max_ulimit_s) super(EB_Python, self).build_step(*args, **kwargs)
def build_image(self, dockerfile): ec = self.easyconfigs[-1]['ec'] module_name = self.mns.det_full_module_name(ec) tempdir = tempfile.mkdtemp(prefix='easybuild-docker') container_name = self.img_name or "%s:latest" % module_name.replace( '/', '-') docker_cmd = ' '.join([ 'sudo', 'docker', 'build', '-f', dockerfile, '-t', container_name, '.' ]) print_msg( "Running '%s', you may need to enter your 'sudo' password..." % docker_cmd) run_cmd(docker_cmd, path=tempdir, stream_output=True) print_msg("Docker image created at %s" % container_name, log=self.log) remove_dir(tempdir)
def install_step(self): """Copy all files in build directory to the install directory""" # CST installer outputs into /tmp so we'll send the content elsewhere and tidy later tmpdir = tempfile.mkdtemp() env.setvar('IATEMPDIR', tmpdir) self.log.debug("Setting temp to: %s" % tmpdir) cmdlist = [ '%s/SIMULIA_CST_Studio_Suite.Linux64/install.sh' % self.builddir, '--nogui', '--force-system-java', '--no-pkg-check', '--replay %s' % self.replayfile, ] cmd = ' '.join(cmdlist) # Run the command but ignore the exit code which will likely be non-zero (out, _) = run_cmd(cmd, simple=False, log_all=False, log_ok=False) self.log.debug("Output from install.sh:\n\n%s" % out) # Search for "*** Installation successful ***" in the output text if not re.search("Installation successful", out) and not self.dry_run: raise EasyBuildError("CST install.sh command failed with output:\n\n%s" % out) # Next, run the service-pack update, if available: if self.cfg['sp_level']: glob_string = "%s/CST_S2*%s*.sup" % (self.builddir, self.cfg['sp_level']) self.log.debug("Globbing for sup file using the following: %s" % glob_string) sp_cands = glob.glob(glob_string) if len(sp_cands) == 1: sp_filepath = sp_cands[0] cmd = "%s/update_with_supfile %s" % (self.installdir, sp_filepath) (out, _) = run_cmd(cmd, simple=False, log_all=False, log_ok=False) self.log.debug("Output from supplementary update:\n\n%s" % out) # Search for "Update completed successfully" in the output text if not re.search("Update completed successfully", out) and not self.dry_run: raise EasyBuildError("CST update failed with output:\n\n%s" % out) elif not self.dry_run: raise EasyBuildError("Unable to locate definitive service pack update file: %s" % sp_cands) remove_dir(tmpdir)
def install_step(self): """Installation of OpenSSL and SSL certificates""" super(EB_OpenSSL, self).install_step() # SSL certificates # OPENSSLDIR is already populated by the installation of OpenSSL # try to symlink system certificates in the empty 'certs' directory ssl_dir = os.path.join(self.installdir, 'ssl') openssl_certs_dir = os.path.join(ssl_dir, 'certs') if self.ssl_certs_dir: remove_dir(openssl_certs_dir) symlink(self.ssl_certs_dir, openssl_certs_dir) # also symlink cert.pem file, if it exists # (required on CentOS 7, see https://github.com/easybuilders/easybuild-easyconfigs/issues/14058) cert_pem_path = os.path.join(os.path.dirname(self.ssl_certs_dir), 'cert.pem') if os.path.isfile(cert_pem_path): symlink(cert_pem_path, os.path.join(ssl_dir, os.path.basename(cert_pem_path))) else: print_warning("OpenSSL successfully installed without system SSL certificates. " "Some packages might experience limited functionality.")
def install_step(self): """ Execute the all.bash script to build and install the Go compiler, specifying the final installation prefix by setting $GOROOT_FINAL. """ srcdir = os.path.join(self.cfg['start_dir'], 'src') try: os.chdir(srcdir) except OSError as err: raise EasyBuildError("Failed to move to %s: %s", srcdir, err) # $GOROOT_FINAL only specifies the location of the final installation, which gets baked into the binaries # the installation itself is *not* done by the all.bash script, that needs to be done manually # $GOROOT_BOOTSTRAP needs to specify a Go installation directory to build the go toolchain for versions # 1.5 and later. if LooseVersion(self.version) >= LooseVersion('1.5'): go_root = get_software_root('Go') if go_root: cmd = "GOROOT_BOOTSTRAP=%s GOROOT_FINAL=%s ./all.bash" % ( go_root, self.installdir) else: raise EasyBuildError( "Go is required as a build dependency for installing Go since version 1.5" ) else: cmd = "GOROOT_FINAL=%s ./all.bash" % self.installdir run_cmd(cmd, log_all=True, simple=False) try: remove_dir(self.installdir) shutil.copytree(self.cfg['start_dir'], self.installdir, symlinks=self.cfg['keepsymlinks']) except OSError as err: raise EasyBuildError("Failed to copy installation to %s: %s", self.installdir, err)
def install_step(self): """Install python but only keep the bits we need""" super(EB_Tkinter, self).install_step() tmpdir = tempfile.mkdtemp(dir=self.builddir) pylibdir = os.path.join(self.installdir, os.path.dirname(det_pylibdir())) shlib_ext = get_shared_lib_ext() tkinter_so = os.path.join(pylibdir, 'lib-dynload', '_tkinter*.' + shlib_ext) tkinter_so_hits = glob.glob(tkinter_so) if len(tkinter_so_hits) != 1: raise EasyBuildError( "Expected to find exactly one _tkinter*.so: %s", tkinter_so_hits) self.tkinter_so_basename = os.path.basename(tkinter_so_hits[0]) if LooseVersion(self.version) >= LooseVersion('3'): tkparts = [ "tkinter", os.path.join("lib-dynload", self.tkinter_so_basename) ] else: tkparts = [ "lib-tk", os.path.join("lib-dynload", self.tkinter_so_basename) ] copy([os.path.join(pylibdir, x) for x in tkparts], tmpdir) remove_dir(self.installdir) move_file(os.path.join(tmpdir, tkparts[0]), os.path.join(pylibdir, tkparts[0])) tkinter_so = os.path.basename(tkparts[1]) move_file(os.path.join(tmpdir, tkinter_so), os.path.join(pylibdir, tkinter_so))
def template_module_only_test(self, easyblock, name='foo', version='1.3.2', extra_txt=''): """Test whether all easyblocks are compatible with --module-only.""" tmpdir = tempfile.mkdtemp() class_regex = re.compile("^class (.*)\(.*", re.M) self.log.debug("easyblock: %s" % easyblock) # read easyblock Python module f = open(easyblock, "r") txt = f.read() f.close() # obtain easyblock class name using regex res = class_regex.search(txt) if res: ebname = res.group(1) self.log.debug("Found class name for easyblock %s: %s" % (easyblock, ebname)) toolchain = None # figure out list of mandatory variables, and define with dummy values as necessary app_class = get_easyblock_class(ebname) # easyblocks deriving from IntelBase require a license file to be found for --module-only bases = list(app_class.__bases__) for base in copy.copy(bases): bases.extend(base.__bases__) if app_class == IntelBase or IntelBase in bases: os.environ['INTEL_LICENSE_FILE'] = os.path.join( tmpdir, 'intel.lic') write_file(os.environ['INTEL_LICENSE_FILE'], '# dummy license') elif app_class == EB_IMOD: # $JAVA_HOME must be set for IMOD os.environ['JAVA_HOME'] = tmpdir elif app_class == PythonBundle: # $EBROOTPYTHON must be set for PythonBundle easyblock os.environ[ 'EBROOTPYTHON'] = '/fake/install/prefix/Python/2.7.14-foss-2018a' elif app_class == EB_OpenFOAM: # proper toolchain must be used for OpenFOAM(-Extend), to determine value to set for $WM_COMPILER write_file( os.path.join(tmpdir, 'GCC', '4.9.3-2.25'), '\n'.join([ '#%Module', 'setenv EBROOTGCC %s' % tmpdir, 'setenv EBVERSIONGCC 4.9.3', ])) write_file( os.path.join(tmpdir, 'OpenMPI', '1.10.2-GCC-4.9.3-2.25'), '\n'.join([ '#%Module', 'setenv EBROOTOPENMPI %s' % tmpdir, 'setenv EBVERSIONOPENMPI 1.10.2', ])) write_file( os.path.join(tmpdir, 'gompi', '2016a'), '\n'.join([ '#%Module', 'module load GCC/4.9.3-2.25', 'module load OpenMPI/1.10.2-GCC-4.9.3-2.25', ])) os.environ['MODULEPATH'] = tmpdir toolchain = {'name': 'gompi', 'version': '2016a'} # extend easyconfig to make sure mandatory custom easyconfig paramters are defined extra_options = app_class.extra_options() for (key, val) in extra_options.items(): if val[2] == MANDATORY: extra_txt += '%s = "foo"\n' % key # write easyconfig file self.writeEC(ebname, name=name, version=version, extratxt=extra_txt, toolchain=toolchain) # take into account that for some easyblock, particular dependencies are hard required early on # (in prepare_step for exampel); # we just set the corresponding $EBROOT* environment variables here to fool it... req_deps = { # QScintilla easyblock requires that either PyQt or PyQt5 are available as dependency # (PyQt is easier, since PyQt5 is only supported for sufficiently recent QScintilla versions) 'qscintilla.py': [('PyQt', '4.12')], # MotionCor2 and Gctf easyblock requires CUDA as dependency 'motioncor2.py': [('CUDA', '10.1.105')], 'gctf.py': [('CUDA', '10.1.105')], } easyblock_fn = os.path.basename(easyblock) for (dep_name, dep_version) in req_deps.get(easyblock_fn, []): dep_root_envvar = get_software_root_env_var_name(dep_name) os.environ[dep_root_envvar] = '/value/should/not/matter' dep_version_envvar = get_software_version_env_var_name(dep_name) os.environ[dep_version_envvar] = dep_version # initialize easyblock # if this doesn't fail, the test succeeds app = app_class(EasyConfig(self.eb_file)) # run all steps, most should be skipped orig_workdir = os.getcwd() try: app.run_all_steps(run_test_cases=False) finally: change_dir(orig_workdir) if os.path.basename(easyblock) == 'modulerc.py': # .modulerc must be cleaned up to avoid causing trouble (e.g. "Duplicate version symbol" errors) modulerc = os.path.join(TMPDIR, 'modules', 'all', name, '.modulerc') if os.path.exists(modulerc): remove_file(modulerc) modulerc += '.lua' if os.path.exists(modulerc): remove_file(modulerc) else: modfile = os.path.join(TMPDIR, 'modules', 'all', name, version) luamodfile = '%s.lua' % modfile self.assertTrue( os.path.exists(modfile) or os.path.exists(luamodfile), "Module file %s or %s was generated" % (modfile, luamodfile)) if os.path.exists(modfile): modtxt = read_file(modfile) else: modtxt = read_file(luamodfile) none_regex = re.compile('None') self.assertFalse(none_regex.search(modtxt), "None not found in module file: %s" % modtxt) # cleanup app.close_log() remove_file(app.logfile) remove_dir(tmpdir) else: self.assertTrue(False, "Class found in easyblock %s" % easyblock)
code += "template_module_only_test(self, '%s', name='OpenMPI', version='system')" % easyblock elif os.path.basename(easyblock) == 'craytoolchain.py': # make sure that a (known) PrgEnv is included as a dependency extra_txt = 'dependencies = [("PrgEnv-gnu/1.2.3", EXTERNAL_MODULE)]' code = "def innertest(self): " code += "template_module_only_test(self, '%s', extra_txt='%s')" % ( easyblock, extra_txt) elif os.path.basename(easyblock) == 'modulerc.py': # exactly one dependency is included with ModuleRC generic easyblock (and name must match) extra_txt = 'dependencies = [("foo", "1.2.3.4.5")]' code = "def innertest(self): " code += "template_module_only_test(self, '%s', version='1.2.3.4', extra_txt='%s')" % ( easyblock, extra_txt) else: code = "def innertest(self): template_module_only_test(self, '%s')" % easyblock exec(code, globals()) innertest.__doc__ = "Test for using --module-only with easyblock %s" % easyblock innertest.__name__ = "test_easyblock_%s" % '_'.join( easyblock.replace('.py', '').split('/')) setattr(ModuleOnlyTest, innertest.__name__, innertest) return TestLoader().loadTestsFromTestCase(ModuleOnlyTest) if __name__ == '__main__': res = TextTestRunner(verbosity=1).run(suite()) remove_dir(TMPDIR) sys.exit(len(res.failures))
def cleanup_step(self): """Complete cleanup by also removing custom created short build directory.""" remove_dir(self.short_start_dir)
def test_step(self): """Run WPS test (requires large dataset to be downloaded). """ wpsdir = None def run_wps_cmd(cmdname, mpi_cmd=True): """Run a WPS command, and check for success.""" cmd = os.path.join(wpsdir, "%s.exe" % cmdname) if mpi_cmd: if build_option('mpi_tests'): cmd = self.toolchain.mpi_cmd_for(cmd, 1) else: self.log.info("Skipping MPI test for %s, since MPI tests are disabled", cmd) return (out, _) = run_cmd(cmd, log_all=True, simple=False) re_success = re.compile("Successful completion of %s" % cmdname) if not re_success.search(out): raise EasyBuildError("%s.exe failed (pattern '%s' not found)?", cmdname, re_success.pattern) if self.cfg['runtest']: if not self.cfg['testdata']: raise EasyBuildError("List of URLs for testdata not provided.") wpsdir = os.path.join(self.builddir, self.wps_subdir) try: # create temporary directory tmpdir = tempfile.mkdtemp() change_dir(tmpdir) # download data testdata_paths = [] for testdata in self.cfg['testdata']: path = self.obtain_file(testdata) if not path: raise EasyBuildError("Downloading file from %s failed?", testdata) testdata_paths.append(path) # unpack data for path in testdata_paths: extract_file(path, tmpdir) namelist_file = os.path.join(tmpdir, 'namelist.wps') # GEOGRID # setup directories and files if LooseVersion(self.version) < LooseVersion("4.0"): geog_data_dir = "geog" else: geog_data_dir = "WPS_GEOG" for dir_name in os.listdir(os.path.join(tmpdir, geog_data_dir)): symlink(os.path.join(tmpdir, geog_data_dir, dir_name), os.path.join(tmpdir, dir_name)) # copy namelist.wps file and patch it for geogrid copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file) regex_subs = [(r"^(\s*geog_data_path\s*=\s*).*$", r"\1 '%s'" % tmpdir)] apply_regex_substitutions(namelist_file, regex_subs) # GEOGRID.TBL geogrid_dir = os.path.join(tmpdir, 'geogrid') mkdir(geogrid_dir) symlink(os.path.join(wpsdir, 'geogrid', 'GEOGRID.TBL.ARW'), os.path.join(geogrid_dir, 'GEOGRID.TBL')) # run geogrid.exe run_wps_cmd("geogrid") # UNGRIB # determine start and end time stamps of grib files grib_file_prefix = "fnl_" k = len(grib_file_prefix) fs = [f for f in sorted(os.listdir('.')) if f.startswith(grib_file_prefix)] start = "%s:00:00" % fs[0][k:] end = "%s:00:00" % fs[-1][k:] # copy namelist.wps file and patch it for ungrib copy_file(os.path.join(wpsdir, 'namelist.wps'), namelist_file) regex_subs = [ (r"^(\s*start_date\s*=\s*).*$", r"\1 '%s','%s'," % (start, start)), (r"^(\s*end_date\s*=\s*).*$", r"\1 '%s','%s'," % (end, end)), ] apply_regex_substitutions(namelist_file, regex_subs) # copy correct Vtable vtable_dir = os.path.join(wpsdir, 'ungrib', 'Variable_Tables') if os.path.exists(os.path.join(vtable_dir, 'Vtable.ARW')): copy_file(os.path.join(vtable_dir, 'Vtable.ARW'), os.path.join(tmpdir, 'Vtable')) elif os.path.exists(os.path.join(vtable_dir, 'Vtable.ARW.UPP')): copy_file(os.path.join(vtable_dir, 'Vtable.ARW.UPP'), os.path.join(tmpdir, 'Vtable')) else: raise EasyBuildError("Could not find Vtable file to use for testing ungrib") # run link_grib.csh script cmd = "%s %s*" % (os.path.join(wpsdir, "link_grib.csh"), grib_file_prefix) run_cmd(cmd, log_all=True, simple=True) # run ungrib.exe run_wps_cmd("ungrib", mpi_cmd=False) # METGRID.TBL metgrid_dir = os.path.join(tmpdir, 'metgrid') mkdir(metgrid_dir) symlink(os.path.join(wpsdir, 'metgrid', 'METGRID.TBL.ARW'), os.path.join(metgrid_dir, 'METGRID.TBL')) # run metgrid.exe run_wps_cmd('metgrid') # clean up change_dir(self.builddir) remove_dir(tmpdir) except OSError as err: raise EasyBuildError("Failed to run WPS test: %s", err)
def test_build_easyconfigs_in_parallel_pbs_python(self): """Test build_easyconfigs_in_parallel(), using (mocked) pbs_python as backend for --job.""" # put mocked functions in place PbsPython__init__ = PbsPython.__init__ PbsPython_check_version = PbsPython._check_version PbsPython_complete = PbsPython.complete PbsPython_connect_to_server = PbsPython.connect_to_server PbsPython_ppn = PbsPython.ppn pbs_python_PbsJob = pbs_python.PbsJob PbsPython.__init__ = lambda self: PbsPython__init__(self, pbs_server='localhost') PbsPython._check_version = lambda _: True PbsPython.complete = mock PbsPython.connect_to_server = mock PbsPython.ppn = mock pbs_python.PbsJob = MockPbsJob topdir = os.path.dirname(os.path.abspath(__file__)) build_options = { 'external_modules_metadata': {}, 'robot_path': os.path.join(topdir, 'easyconfigs', 'test_ecs'), 'valid_module_classes': config.module_classes(), 'validate': False, 'job_cores': 3, } init_config(args=['--job-backend=PbsPython'], build_options=build_options) ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip', 'gzip-1.5-foss-2018a.eb') easyconfigs = process_easyconfig(ec_file) ordered_ecs = resolve_dependencies(easyconfigs, self.modtool) jobs = build_easyconfigs_in_parallel("echo '%(spec)s'", ordered_ecs, prepare_first=False) # only one job submitted since foss/2018a module is already available self.assertEqual(len(jobs), 1) regex = re.compile("echo '.*/gzip-1.5-foss-2018a.eb'") self.assertTrue(regex.search(jobs[-1].script), "Pattern '%s' found in: %s" % (regex.pattern, jobs[-1].script)) ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip', 'gzip-1.4-GCC-4.6.3.eb') ordered_ecs = resolve_dependencies(process_easyconfig(ec_file), self.modtool, retain_all_deps=True) jobs = submit_jobs(ordered_ecs, '', testing=False, prepare_first=False) # make sure command is correct, and that --hidden is there when it needs to be for i, ec in enumerate(ordered_ecs): if ec['hidden']: regex = re.compile("eb %s.* --hidden" % ec['spec']) else: regex = re.compile("eb %s" % ec['spec']) self.assertTrue(regex.search(jobs[i].script), "Pattern '%s' found in: %s" % (regex.pattern, jobs[i].script)) for job in jobs: self.assertEqual(job.cores, build_options['job_cores']) # no deps for GCC/4.6.3 (toolchain) and intel/2018a (test easyconfig with 'fake' deps) self.assertEqual(len(jobs[0].deps), 0) self.assertEqual(len(jobs[1].deps), 0) # only dependency for toy/0.0-deps is intel/2018a (dep marked as external module is filtered out) self.assertTrue('toy-0.0-deps.eb' in jobs[2].script) self.assertEqual(len(jobs[2].deps), 1) self.assertTrue('intel-2018a.eb' in jobs[2].deps[0].script) # dependencies for gzip/1.4-GCC-4.6.3: GCC/4.6.3 (toolchain) + toy/.0.0-deps self.assertTrue('gzip-1.4-GCC-4.6.3.eb' in jobs[3].script) self.assertEqual(len(jobs[3].deps), 2) regex = re.compile('toy-0.0-deps.eb\s* --hidden') self.assertTrue(regex.search(jobs[3].deps[0].script)) self.assertTrue('GCC-4.6.3.eb' in jobs[3].deps[1].script) # also test use of --pre-create-installdir ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb') ordered_ecs = resolve_dependencies(process_easyconfig(ec_file), self.modtool) # installation directory doesn't exist yet before submission toy_installdir = os.path.join(self.test_installpath, 'software', 'toy', '0.0') self.assertFalse(os.path.exists(toy_installdir)) jobs = submit_jobs(ordered_ecs, '', testing=False) self.assertEqual(len(jobs), 1) # software install dir is created (by default) as part of job submission process (fetch_step is run) self.assertTrue(os.path.exists(toy_installdir)) remove_dir(toy_installdir) remove_dir(os.path.dirname(toy_installdir)) self.assertFalse(os.path.exists(toy_installdir)) # installation directory does *not* get created when --pre-create-installdir is used build_options['pre_create_installdir'] = False init_config(args=['--job-backend=PbsPython'], build_options=build_options) jobs = submit_jobs(ordered_ecs, '', testing=False) self.assertEqual(len(jobs), 1) self.assertFalse(os.path.exists(toy_installdir)) # restore mocked stuff PbsPython.__init__ = PbsPython__init__ PbsPython._check_version = PbsPython_check_version PbsPython.complete = PbsPython_complete PbsPython.connect_to_server = PbsPython_connect_to_server PbsPython.ppn = PbsPython_ppn pbs_python.PbsJob = pbs_python_PbsJob