def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env): os.environ.update({ 'GITLAB_PRIVATE_TOKEN': 'faketoken', }) url = 'https://www.nosuchurlexists.itsfake/artifacts.zip' working_dir = os.path.join(tmpdir.strpath, 'repro') test_artifacts_path = os.path.join( spack_paths.test_path, 'data', 'ci', 'gitlab', 'artifacts.zip') with open(test_artifacts_path, 'rb') as fd: fake_responder = FakeWebResponder(content_to_read=[fd.read()]) monkeypatch.setattr(ci, 'build_opener', lambda handler: fake_responder) ci.download_and_extract_artifacts(url, working_dir) found_zip = fs.find(working_dir, 'artifacts.zip') assert(len(found_zip) == 0) found_install = fs.find(working_dir, 'install.sh') assert(len(found_install) == 1) fake_responder._resp_code = 400 with pytest.raises(spack.error.SpackError): ci.download_and_extract_artifacts(url, working_dir)
def filter_config_file(self): config_file_paths = filesystem.find(self.prefix, "syclcc.json") if len(config_file_paths) != 1: raise InstallError( "installed hipSYCL must provide a unique compiler driver " "configuration file, found: {0}".format(config_file_paths)) config_file_path = config_file_paths[0] with open(config_file_path) as f: config = json.load(f) # 1. Fix compiler: use the real one in place of the Spack wrapper config["default-cpu-cxx"] = self.compiler.cxx # 2. Fix stdlib: we need to make sure cuda-enabled binaries find # the libc++.so and libc++abi.so dyn linked to the sycl # ptx backend rpaths = set() so_paths = filesystem.find(self.spec["llvm"].prefix, "libc++.so") if len(so_paths) != 1: raise InstallError("concretized llvm dependency must provide a " "unique directory containing libc++.so, " "found: {0}".format(so_paths)) rpaths.add(path.dirname(so_paths[0])) so_paths = filesystem.find(self.spec["llvm"].prefix, "libc++abi.so") if len(so_paths) != 1: raise InstallError("concretized llvm dependency must provide a " "unique directory containing libc++abi.so, " "found: {0}".format(so_paths)) rpaths.add(path.dirname(so_paths[0])) config["default-cuda-link-line"] += " " + " ".join( "-rpath {0}".format(p) for p in rpaths) # Replace the installed config file with open(config_file_path, "w") as f: json.dump(config, f, indent=2)
def install(self, spec, prefix): for mpic in find(self.stage.source_path, 'mpic*'): if os.path.isfile(mpic): mode = os.stat(mpic).st_mode os.chmod(mpic, mode | stat.S_IWRITE) filter_file(r'-I(.*mpiroot)', r'-isystem \1', mpic) root_dir = dirname(dirname(find(self.stage.source_path, 'mpicc')[0])) install_tree(root_dir, prefix)
def install(self, spec, prefix): rock = '.' specs = find('.', '*.rockspec', recursive=False) if specs: rock = specs[0] rocks_args = self.luarocks_args() rocks_args.append(rock) self.luarocks('--tree=' + prefix, 'make', *rocks_args)
def import_modules(self): """Names of modules that the Python package provides. These are used to test whether or not the installation succeeded. These names generally come from running: .. code-block:: python >> import setuptools >> setuptools.find_packages() in the source tarball directory. If the module names are incorrectly detected, this property can be overridden by the package. Returns: list: list of strings of module names """ modules = [] pkg = self.spec['python'].package # Packages may be installed in platform-specific or platform-independent # site-packages directories for directory in {pkg.platlib, pkg.purelib}: root = os.path.join(self.prefix, directory) # Some Python libraries are packages: collections of modules # distributed in directories containing __init__.py files for path in find(root, '__init__.py', recursive=True): modules.append( path.replace(root + os.sep, '', 1).replace(os.sep + '__init__.py', '').replace('/', '.')) # Some Python libraries are modules: individual *.py files # found in the site-packages directory for path in find(root, '*.py', recursive=False): modules.append( path.replace(root + os.sep, '', 1).replace('.py', '').replace('/', '.')) modules = [mod for mod in modules if re.match('[a-zA-Z0-9._]+$', mod)] tty.debug('Detected the following modules: {0}'.format(modules)) return modules
def cmake_args(self): spec = self.spec args = [ "-DWITH_CPU_BACKEND:Bool=TRUE", # TODO: no ROCm stuff available in spack yet "-DWITH_ROCM_BACKEND:Bool=FALSE", "-DWITH_CUDA_BACKEND:Bool={0}".format("TRUE" if "+cuda" in spec else "FALSE"), # prevent hipSYCL's cmake to look for other LLVM installations # if the specified one isn't compatible "-DDISABLE_LLVM_VERSION_CHECK:Bool=TRUE", ] # LLVM directory containing all installed CMake files # (e.g.: configs consumed by client projects) llvm_cmake_dirs = filesystem.find(spec["llvm"].prefix, "LLVMExports.cmake") if len(llvm_cmake_dirs) != 1: raise InstallError("concretized llvm dependency must provide " "a unique directory containing CMake client " "files, found: {0}".format(llvm_cmake_dirs)) args.append("-DLLVM_DIR:String={0}".format( path.dirname(llvm_cmake_dirs[0]))) # clang internal headers directory llvm_clang_include_dirs = filesystem.find( spec["llvm"].prefix, "__clang_cuda_runtime_wrapper.h") if len(llvm_clang_include_dirs) != 1: raise InstallError( "concretized llvm dependency must provide a " "unique directory containing clang internal " "headers, found: {0}".format(llvm_clang_include_dirs)) args.append("-DCLANG_INCLUDE_PATH:String={0}".format( path.dirname(llvm_clang_include_dirs[0]))) # target clang++ executable llvm_clang_bin = path.join(spec["llvm"].prefix.bin, "clang++") if not filesystem.is_exe(llvm_clang_bin): raise InstallError("concretized llvm dependency must provide a " "valid clang++ executable, found invalid: " "{0}".format(llvm_clang_bin)) args.append( "-DCLANG_EXECUTABLE_PATH:String={0}".format(llvm_clang_bin)) # explicit CUDA toolkit if "+cuda" in spec: args.append("-DCUDA_TOOLKIT_ROOT_DIR:String={0}".format( spec["cuda"].prefix)) return args
def install(self, spec, prefix): for mpic in find(self.stage.source_path, 'mpic*'): mode = os.stat(mpic).st_mode os.chmod(mpic, mode | stat.S_IWRITE) filter_file(r'-I(.*mpiroot)', r'-isystem\1', mpic) install_tree( join_path(self.stage.source_path, 'opt/hpe/hpc/mpt/mpt-' + str(self.spec.version)), prefix)
def import_modules(self): """Names of modules that the Python package provides. These are used to test whether or not the installation succeeded. These names generally come from running: .. code-block:: python >> import setuptools >> setuptools.find_packages() in the source tarball directory. If the module names are incorrectly detected, this property can be overridden by the package. Returns: list: list of strings of module names """ modules = [] root = os.path.join( self.prefix, self.spec['python'].package.config_vars['python_lib']['false'] ['false'], ) # Some Python libraries are packages: collections of modules # distributed in directories containing __init__.py files for path in find(root, '__init__.py', recursive=True): modules.append( path.replace(root + os.sep, '', 1).replace(os.sep + '__init__.py', '').replace('/', '.')) # Some Python libraries are modules: individual *.py files # found in the site-packages directory for path in find(root, '*.py', recursive=False): modules.append( path.replace(root + os.sep, '', 1).replace('.py', '').replace('/', '.')) tty.debug('Detected the following modules: {0}'.format(modules)) return modules
def _do_patch_libtool(self): """If configure generates a "libtool" script that does not correctly detect the compiler (and patch_libtool is set), patch in the correct flags for the Arm, Clang/Flang, and Fujitsu compilers.""" # Exit early if we are required not to patch libtool if not self.patch_libtool: return for libtool_path in fs.find( self.build_directory, 'libtool', recursive=True): self._patch_libtool(libtool_path)
def unpack(self): rpm2cpio = spack.util.executable.which('rpm2cpio') cpio = spack.util.executable.which('cpio') chmod = spack.util.executable.which('chmod') print(self.stage) for rpm_filename in find(self.stage.source_path, '*.rpm'): with TemporaryFile() as tmpf: rpm2cpio(rpm_filename, output=tmpf) tmpf.seek(0) cpio('-dium', input=tmpf) chmod('-R', 'u+w', self.stage.source_path)
def remove_libtool_archives(self): """Remove all .la files in prefix sub-folders if the package sets ``install_libtool_archives`` to be False. """ # If .la files are to be installed there's nothing to do if self.install_libtool_archives: return # Remove the files and create a log of what was removed libtool_files = fs.find(str(self.prefix), '*.la', recursive=True) with fs.safe_remove(*libtool_files): fs.mkdirp(os.path.dirname(self._removed_la_files_log)) with open(self._removed_la_files_log, mode='w') as f: f.write('\n'.join(libtool_files))
def test_libtool_archive_files_are_deleted_by_default( self, mutable_database): # Install a package that creates a mock libtool archive s = Spec('libtool-deletion').concretized() s.package.do_install(explicit=True) # Assert the libtool archive is not there and we have # a log of removed files assert not os.path.exists(s.package.libtool_archive_file) search_directory = os.path.join(s.prefix, '.spack') libtool_deletion_log = fs.find(search_directory, 'removed_la_files.txt', recursive=True) assert libtool_deletion_log
def install(self, spec, prefix): if self.spec.satisfies('platform=linux target=aarch64:'): make("build-linux-arm64") elif self.spec.satisfies('platform=linux'): make("build-linux") elif self.spec.satisfies('platform=darwin target=aarch64:'): make("build-mac-arm64") elif self.spec.satisfies('platform=darwin'): make("build-mac") elif self.spec.satisfies('platform=windows'): make("build-windows") mkdirp(prefix.bin) oras = find("bin", "oras") if not oras: tty.die("Oras executable missing in bin.") tty.debug("Found oras executable %s to move into install bin" % oras[0]) install(oras[0], prefix.bin)
def test_find_with_globbing(root, search_list, kwargs, expected): matches = find(root, search_list, **kwargs) assert sorted(matches) == sorted(expected)
def get_executable(exe, spec=None, install=False): """Find an executable named exe, either in PATH or in Spack Args: exe (str): needed executable name spec (Spec or str): spec to search for exe in (default exe) install (bool): install spec if not available When ``install`` is True, Spack will use the python used to run Spack as an external. The ``install`` option should only be used with packages that install quickly (when using external python) or are guaranteed by Spack organization to be in a binary mirror (clingo).""" # Search the system first runner = spack.util.executable.which(exe) if runner: return runner # Check whether it's already installed spec = spack.spec.Spec(spec or exe) installed_specs = spack.store.db.query(spec, installed=True) for ispec in installed_specs: # filter out directories of the same name as the executable exe_path = [exe_p for exe_p in fs.find(ispec.prefix, exe) if fs.is_exe(exe_p)] if exe_path: ret = spack.util.executable.Executable(exe_path[0]) envmod = EnvironmentModifications() for dep in ispec.traverse(root=True, order='post'): envmod.extend(uenv.environment_modifications_for_spec(dep)) ret.add_default_envmod(envmod) return ret else: tty.warn('Exe %s not found in prefix %s' % (exe, ispec.prefix)) def _raise_error(executable, exe_spec): error_msg = 'cannot find the executable "{0}"'.format(executable) if exe_spec: error_msg += ' from spec "{0}'.format(exe_spec) raise RuntimeError(error_msg) # If we're not allowed to install this for ourselves, we can't find it if not install: _raise_error(exe, spec) with spack_python_interpreter(): # We will install for ourselves, using this python if needed # Concretize the spec spec.concretize() spec.package.do_install() # filter out directories of the same name as the executable exe_path = [exe_p for exe_p in fs.find(spec.prefix, exe) if fs.is_exe(exe_p)] if exe_path: ret = spack.util.executable.Executable(exe_path[0]) envmod = EnvironmentModifications() for dep in spec.traverse(root=True, order='post'): envmod.extend(uenv.environment_modifications_for_spec(dep)) ret.add_default_envmod(envmod) return ret _raise_error(exe, spec)
def test_find_with_globbing(root, search_list, kwargs, expected): matches = find(root, search_list, **kwargs) assert sorted(matches) == sorted(expected)
def _fix_ext_suffix(candidate_spec): """Fix the external suffixes of Python extensions on the fly for platforms that may need it Args: candidate_spec (Spec): installed spec with a Python module to be checked. """ # Here we map target families to the patterns expected # by pristine CPython. Only architectures with known issues # are included. Known issues: # # [RHEL + ppc64le]: https://github.com/spack/spack/issues/25734 # _suffix_to_be_checked = { 'ppc64le': { 'glob': '*.cpython-*-powerpc64le-linux-gnu.so', 're': r'.cpython-[\w]*-powerpc64le-linux-gnu.so', 'fmt': r'{module}.cpython-{major}{minor}m-powerpc64le-linux-gnu.so' } } # If the current architecture is not problematic return generic_target = archspec.cpu.host().family if str(generic_target) not in _suffix_to_be_checked: return # If there's no EXT_SUFFIX (Python < 3.5) or the suffix matches # the expectations, return since the package is surely good ext_suffix = sysconfig.get_config_var('EXT_SUFFIX') if ext_suffix is None: return expected = _suffix_to_be_checked[str(generic_target)] if fnmatch.fnmatch(ext_suffix, expected['glob']): return # If we are here it means the current interpreter expects different names # than pristine CPython. So: # 1. Find what we have installed # 2. Create symbolic links for the other names, it they're not there already # Check if standard names are installed and if we have to create # link for this interpreter standard_extensions = fs.find(candidate_spec.prefix, expected['glob']) link_names = [ re.sub(expected['re'], ext_suffix, s) for s in standard_extensions ] for file_name, link_name in zip(standard_extensions, link_names): if os.path.exists(link_name): continue os.symlink(file_name, link_name) # Check if this interpreter installed something and we have to create # links for a standard CPython interpreter non_standard_extensions = fs.find(candidate_spec.prefix, '*' + ext_suffix) for abs_path in non_standard_extensions: directory, filename = os.path.split(abs_path) module = filename.split('.')[0] link_name = os.path.join( directory, expected['fmt'].format(module=module, major=sys.version_info[0], minor=sys.version_info[1])) if os.path.exists(link_name): continue os.symlink(abs_path, link_name)
def _do_patch_config_files(self): """Some packages ship with older config.guess/config.sub files and need to have these updated when installed on a newer architecture. In particular, config.guess fails for PPC64LE for version prior to a 2013-06-10 build date (automake 1.13.4) and for ARM (aarch64). """ if not self.patch_config_files or ( not self.spec.satisfies('target=ppc64le:') and not self.spec.satisfies('target=aarch64:')): return # TODO: Expand this to select the 'config.sub'-compatible architecture # for each platform (e.g. 'config.sub' doesn't accept 'power9le', but # does accept 'ppc64le'). if self.spec.satisfies('target=ppc64le:'): config_arch = 'ppc64le' elif self.spec.satisfies('target=aarch64:'): config_arch = 'aarch64' else: config_arch = 'local' def runs_ok(script_abs_path): # Construct the list of arguments for the call additional_args = {'config.sub': [config_arch]} script_name = os.path.basename(script_abs_path) args = [script_abs_path] + additional_args.get(script_name, []) try: check_call(args, stdout=PIPE, stderr=PIPE) except Exception as e: tty.debug(e) return False return True # Compute the list of files that needs to be patched search_dir = self.stage.path to_be_patched = fs.find(search_dir, files=['config.sub', 'config.guess'], recursive=True) to_be_patched = [f for f in to_be_patched if not runs_ok(f)] # If there are no files to be patched, return early if not to_be_patched: return # Directories where to search for files to be copied # over the failing ones good_file_dirs = ['/usr/share'] if 'automake' in self.spec: good_file_dirs.insert(0, self.spec['automake'].prefix) # List of files to be found in the directories above to_be_found = list(set(os.path.basename(f) for f in to_be_patched)) substitutes = {} for directory in good_file_dirs: candidates = fs.find(directory, files=to_be_found, recursive=True) candidates = [f for f in candidates if runs_ok(f)] for name, good_files in itertools.groupby(candidates, key=os.path.basename): substitutes[name] = next(good_files) to_be_found.remove(name) # Check that we found everything we needed if to_be_found: msg = 'Failed to find suitable substitutes for {0}' raise RuntimeError(msg.format(', '.join(to_be_found))) # Copy the good files over the bad ones for abs_path in to_be_patched: name = os.path.basename(abs_path) mode = os.stat(abs_path).st_mode os.chmod(abs_path, stat.S_IWUSR) fs.copy(substitutes[name], abs_path) os.chmod(abs_path, mode)
def _do_patch_config_files(self): """Some packages ship with older config.guess/config.sub files and need to have these updated when installed on a newer architecture. In particular, config.guess fails for PPC64LE for version prior to a 2013-06-10 build date (automake 1.13.4) and for ARM (aarch64) and RISC-V (riscv64). """ if not self.patch_config_files: return # TODO: Expand this to select the 'config.sub'-compatible architecture # for each platform (e.g. 'config.sub' doesn't accept 'power9le', but # does accept 'ppc64le'). if self.spec.satisfies('target=ppc64le:'): config_arch = 'ppc64le' elif self.spec.satisfies('target=aarch64:'): config_arch = 'aarch64' elif self.spec.satisfies('target=riscv64:'): config_arch = 'riscv64' else: config_arch = 'local' def runs_ok(script_abs_path): # Construct the list of arguments for the call additional_args = {'config.sub': [config_arch]} script_name = os.path.basename(script_abs_path) args = [script_abs_path] + additional_args.get(script_name, []) try: check_call(args, stdout=PIPE, stderr=PIPE) except Exception as e: tty.debug(e) return False return True # Get the list of files that needs to be patched to_be_patched = fs.find(self.stage.path, files=['config.sub', 'config.guess']) to_be_patched = [f for f in to_be_patched if not runs_ok(f)] # If there are no files to be patched, return early if not to_be_patched: return # Otherwise, require `gnuconfig` to be a build dependency self._require_build_deps(pkgs=['gnuconfig'], spec=self.spec, err="Cannot patch config files") # Get the config files we need to patch (config.sub / config.guess). to_be_found = list(set(os.path.basename(f) for f in to_be_patched)) gnuconfig = self.spec['gnuconfig'] gnuconfig_dir = gnuconfig.prefix # An external gnuconfig may not not have a prefix. if gnuconfig_dir is None: raise InstallError( "Spack could not find substitutes for GNU config " "files because no prefix is available for the " "`gnuconfig` package. Make sure you set a prefix " "path instead of modules for external `gnuconfig`.") candidates = fs.find(gnuconfig_dir, files=to_be_found, recursive=False) # For external packages the user may have specified an incorrect prefix. # otherwise the installation is just corrupt. if not candidates: msg = ("Spack could not find `config.guess` and `config.sub` " "files in the `gnuconfig` prefix `{0}`. This means the " "`gnuconfig` package is broken").format(gnuconfig_dir) if gnuconfig.external: msg += ( " or the `gnuconfig` package prefix is misconfigured as" " an external package") raise InstallError(msg) # Filter working substitutes candidates = [f for f in candidates if runs_ok(f)] substitutes = {} for candidate in candidates: config_file = os.path.basename(candidate) substitutes[config_file] = candidate to_be_found.remove(config_file) # Check that we found everything we needed if to_be_found: msg = """\ Spack could not find working replacements for the following autotools config files: {0}. To resolve this problem, please try the following: 1. Try to rebuild with `patch_config_files = False` in the package `{1}`, to rule out that Spack tries to replace config files not used by the build. 2. Verify that the `gnuconfig` package is up-to-date. 3. On some systems you need to use system-provided `config.guess` and `config.sub` files. In this case, mark `gnuconfig` as an non-buildable external package, and set the prefix to the directory containing the `config.guess` and `config.sub` files. """ raise InstallError(msg.format(', '.join(to_be_found), self.name)) # Copy the good files over the bad ones for abs_path in to_be_patched: name = os.path.basename(abs_path) mode = os.stat(abs_path).st_mode os.chmod(abs_path, stat.S_IWUSR) fs.copy(substitutes[name], abs_path) os.chmod(abs_path, mode)
def patch(self): files = find('.', 'CMakeLists.txt') for file in files: filter_file('PYTHON_LIBRARIES', 'Python_LIBRARIES', file, string=True)