def make_fujitsu_rules(self): """Create Fujitsu rules (clang variant) unless supplied upstream. Implemented for 1906 and later (older rules are too messy to edit). Already included after 1912. """ general_rules = 'wmake/rules/General' arch_rules = 'wmake/rules/linuxARM64' # self.arch src = arch_rules + 'Clang' dst = arch_rules + 'Fujitsu' # self.compiler if os.path.exists(dst): return # Handle rules/<ARCH><COMP> or rules/<ARCH>/<COMP> if not os.path.exists(src): src = join_path(arch_rules, 'Clang') dst = join_path(arch_rules, 'Fujitsu') # self.compiler if os.path.exists(dst): return tty.info('Add Fujitsu wmake rules') copy_tree(src, dst) for cfg in ['c', 'c++', 'general']: rule = join_path(dst, cfg) filter_file('Clang', 'Fujitsu', rule, backup=False) src = join_path(general_rules, 'Clang') dst = join_path(general_rules, 'Fujitsu') # self.compiler copy_tree(src, dst) filter_file('clang', spack_cc, join_path(dst, 'c'), backup=False, string=True) filter_file('clang++', spack_cxx, join_path(dst, 'c++'), backup=False, string=True)
def patch(self): """Copy additional files or other patching.""" add_extra_files(self, self.common, self.assets) # Emit openfoam version immediately, if we resolved the wrong version # it takes a very long time to rebuild! tty.info('Build for ' + self.spec['openfoam'].format( '{name}{@version}{%compiler}{compiler_flags}{variants}'))
def _add_to_root_stage(self): """ Move the extracted resource to the root stage (according to placement). """ root_stage = self.root_stage resource = self.resource placement = os.path.basename(self.source_path) \ if resource.placement is None \ else resource.placement if not isinstance(placement, dict): placement = {'': placement} target_path = join_path(root_stage.source_path, resource.destination) try: os.makedirs(target_path) except OSError as err: if err.errno == errno.EEXIST and os.path.isdir(target_path): pass else: raise for key, value in iteritems(placement): destination_path = join_path(target_path, value) source_path = join_path(self.source_path, key) if not os.path.exists(destination_path): tty.info('Moving resource stage\n\tsource : ' '{stage}\n\tdestination : {destination}'.format( stage=source_path, destination=destination_path)) shutil.move(os.path.realpath(source_path), destination_path)
def test(self): """Perform stand-alone checks on the installed package.""" if self.spec.satisfies('@:1') or \ not os.path.isdir(self.prefix.bin): tty.info('Skipping: checks not installed in bin for v{0}'.format( self.version)) return # Run a subset of examples PROVIDED installed # tutorials with readily checkable outputs. checks = { 'malloc': ['99 should be 99'], 'recipe_dynamic_pool_heuristic': ['in the pool', 'releas'], 'recipe_no_introspection': ['has allocated', 'used'], 'strategy_example': ['Available allocators', 'HOST'], 'tut_copy': ['Copied source data'], 'tut_introspection': ['Allocator used is HOST', 'size of the allocation'], 'tut_memset': ['Set data from HOST'], 'tut_move': ['Moved source data', 'HOST'], 'tut_reallocate': ['Reallocated data'], 'vector_allocator': [''], } for exe in checks: expected = checks[exe] reason = 'test: checking output from {0}'.format(exe) self.run_test(exe, [], expected, 0, installed=False, purpose=reason, skip_missing=True, work_dir=self.prefix.bin)
def link_one(spec, path, link=os.symlink, verbose=False): 'Link all files in `spec` into directory `path`.' dotspack = transform_path(spec, '.spack', path) if os.path.exists(dotspack): tty.warn('Skipping existing package: "%s"' % spec.name) return if verbose: tty.info('Linking package: "%s"' % spec.name) for dirpath, dirnames, filenames in os.walk(spec.prefix): if not filenames: continue # avoid explicitly making empty dirs targdir = transform_path(spec, dirpath, path) assuredir(targdir) for fname in filenames: src = os.path.join(dirpath, fname) dst = os.path.join(targdir, fname) if os.path.exists(dst): if '.spack' in dst.split(os.path.sep): continue # silence these tty.warn("Skipping existing file: %s" % dst) continue link(src, dst)
def expand_archive(self): super(ResourceStage, self).expand_archive() root_stage = self.root_stage resource = self.resource placement = os.path.basename(self.source_path) \ if resource.placement is None \ else resource.placement if not isinstance(placement, dict): placement = {'': placement} # Make the paths in the dictionary absolute and link for key, value in iteritems(placement): target_path = join_path( root_stage.source_path, resource.destination) destination_path = join_path(target_path, value) source_path = join_path(self.source_path, key) try: os.makedirs(target_path) except OSError as err: if err.errno == errno.EEXIST and os.path.isdir(target_path): pass else: raise if not os.path.exists(destination_path): # Create a symlink tty.info('Moving resource stage\n\tsource : ' '{stage}\n\tdestination : {destination}'.format( stage=source_path, destination=destination_path )) shutil.move(source_path, destination_path)
def _run_example_checks(self): """Run the example smoke test checks.""" tty.info('Running example checks') dirs = [] if self.spec.satisfies('@0.1.3:0.3.1'): dirs.append(self._extra_tests_path) elif self.spec.satisfies('@0.3.3:1.0.1'): dirs.append(join_path(self._extra_tests_path, 'examples')) elif self.spec.satisfies('@1.1.0'): dirs.append(join_path(self.prefix.bin, 'examples')) elif self.spec.satisfies('@2.0.0:'): dirs.append(self.prefix.bin) # Check the results from a subset of the (potentially) available # executables checks = { # Versions 0.1.3:0.3.1 (spack-build/bin) # Versions 0.3.3:1.0.1 (spack-build/bin/examples) # Versions 2.0.0:2.1.0 (spack-build/bin) # Version 1.1.0 (prefix.bin/examples) # Versions 2.0.0:2.1.0 (prefix.bin) 'malloc': (['99 should be 99'], 0), 'strategy_example': (['Available allocators', 'HOST'], 0), 'vector_allocator': ([''], 0), } self._run_checks(dirs, checks)
def try_search_path(self, executables, abstract_spec_str): info = {} if _executables_in_store(executables, abstract_spec_str, query_info=info): self.last_search = info return True tty.info("Bootstrapping {0} from sources".format(abstract_spec_str)) # If we compile code from sources detecting a few build tools # might reduce compilation time by a fair amount _add_externals_if_missing() concrete_spec = spack.spec.Spec(abstract_spec_str) if concrete_spec.name == 'patchelf': concrete_spec._old_concretize(deprecation_warning=False) else: concrete_spec.concretize() msg = "[BOOTSTRAP] Try installing '{0}' from sources" tty.debug(msg.format(abstract_spec_str)) with spack.config.override(self.mirror_scope): concrete_spec.package.do_install() if _executables_in_store(executables, concrete_spec, query_info=info): self.last_search = info return True return False
def try_import(module, abstract_spec_str): if _try_import_from_store(module, abstract_spec_str): return True # Try to build and install from sources with spack_python_interpreter(): # Add hint to use frontend operating system on Cray if str(spack.architecture.platform()) == 'cray': abstract_spec_str += ' os=fe' concrete_spec = spack.spec.Spec(abstract_spec_str + ' ^' + spec_for_current_python()) if module == 'clingo': # TODO: remove when the old concretizer is deprecated concrete_spec._old_concretize() else: concrete_spec.concretize() msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources" tty.debug(msg.format(module, abstract_spec_str)) tty.info("Bootstrapping {0} from sources".format(module)) # Install the spec that should make the module importable concrete_spec.package.do_install() return _try_import_from_store(module, abstract_spec_str=abstract_spec_str)
def save_result(self, result, overwrite=False): """ Read saved ABI results and upload to monitor server. ABI results are saved to individual files, so each one needs to be read and uploaded. Result here should be the lookup generated in run(), the key is the analyzer name, and each value is the result file. We currently upload the entire xml as text because libabigail can't easily read gzipped xml, but this will be updated when it can. """ if not spack.monitor.cli: return name = self.spec.package.name for obj, filename in result.get(self.name, {}).items(): # Don't include the prefix rel_path = obj.replace(self.spec.prefix + os.path.sep, "") # We've already saved the results to file during run content = spack.monitor.read_file(filename) # A result needs an analyzer, value or binary_value, and name data = { "value": content, "install_file": rel_path, "name": "abidw-xml" } tty.info("Sending result for %s %s to monitor." % (name, rel_path)) spack.hooks.on_analyzer_save(self.spec.package, {"libabigail": [data]})
def save_result(self, result, overwrite=False): """ Save a result to the associated spack monitor, if defined. This function is on the level of the analyzer because it might be the case that the result is large (appropriate for a single request) or that the data is organized differently (e.g., more than one request per result). If an analyzer subclass needs to over-write this function with a custom save, that is appropriate to do (see abi). """ # We maintain the structure in json with the analyzer as key so # that in the future, we could upload to a monitor server if result[self.name]: outfile = os.path.join(self.output_dir, self.outfile) # Only try to create the results directory if we have a result if not os.path.exists(self._output_dir): os.makedirs(self._output_dir) # Don't overwrite an existing result if overwrite is False if os.path.exists(outfile) and not overwrite: tty.info("%s exists and overwrite is False, skipping." % outfile) else: tty.info("Writing result to %s" % outfile) spack.monitor.write_json(result[self.name], outfile) # This hook runs after a save result spack.hooks.on_analyzer_save(self.spec.package, result)
def test(self): if not self.spec.satisfies('@develop') or \ not os.path.isdir(self.prefix.bin): tty.info('Skipping: checks not installed in bin for v{0}'. format(self.version)) return tests = [ ['NlpMdsEx1.exe', '400', '100', '0', '-selfcheck'], ['NlpMdsEx1.exe', '400', '100', '1', '-selfcheck'], ['NlpMdsEx1.exe', '400', '100', '0', '-empty_sp_row', '-selfcheck'], ] if '+raja' in self.spec: tests.extend([ ['NlpMdsEx1Raja.exe', '400', '100', '0', '-selfcheck'], ['NlpMdsEx1Raja.exe', '400', '100', '1', '-selfcheck'], ['NlpMdsEx1Raja.exe', '400', '100', '0', '-empty_sp_row', '-selfcheck'], ]) for i, test in enumerate(tests): exe = os.path.join(self.prefix.bin, test[0]) args = test[1:] reason = 'test {0}: "{1}"'.format(i, ' '.join(test)) self.run_test(exe, args, [], 0, installed=False, purpose=reason, skip_missing=True, work_dir=self.prefix.bin)
def expand_archive(self): super(ResourceStage, self).expand_archive() root_stage = self.root_stage resource = self.resource placement = os.path.basename( self.source_path ) if resource.placement is None else resource.placement if not isinstance(placement, dict): placement = {'': placement} # Make the paths in the dictionary absolute and link for key, value in placement.iteritems(): target_path = join_path(root_stage.source_path, resource.destination) destination_path = join_path(target_path, value) source_path = join_path(self.source_path, key) try: os.makedirs(target_path) except OSError as err: if err.errno == errno.EEXIST and os.path.isdir(target_path): pass else: raise if not os.path.exists(destination_path): # Create a symlink tty.info( 'Moving resource stage\n\tsource : {stage}\n\tdestination : {destination}' .format(stage=source_path, destination=destination_path)) shutil.move(source_path, destination_path)
def try_import(module, abstract_spec_str): if _try_import_from_store(module, abstract_spec_str): return True tty.info("Bootstrapping {0} from sources".format(module)) # If we compile code from sources detecting a few build tools # might reduce compilation time by a fair amount _add_externals_if_missing() # Try to build and install from sources with spack_python_interpreter(): # Add hint to use frontend operating system on Cray if str(spack.platforms.host()) == 'cray': abstract_spec_str += ' os=fe' concrete_spec = spack.spec.Spec(abstract_spec_str + ' ^' + spec_for_current_python()) if module == 'clingo': # TODO: remove when the old concretizer is deprecated concrete_spec._old_concretize(deprecation_warning=False) else: concrete_spec.concretize() msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources" tty.debug(msg.format(module, abstract_spec_str)) # Install the spec that should make the module importable concrete_spec.package.do_install(fail_fast=True) return _try_import_from_store(module, abstract_spec_str=abstract_spec_str)
def _add_to_root_stage(self): """ Move the extracted resource to the root stage (according to placement). """ root_stage = self.root_stage resource = self.resource placement = os.path.basename(self.source_path) \ if resource.placement is None \ else resource.placement if not isinstance(placement, dict): placement = {'': placement} target_path = os.path.join( root_stage.source_path, resource.destination) try: os.makedirs(target_path) except OSError as err: if err.errno == errno.EEXIST and os.path.isdir(target_path): pass else: raise for key, value in iteritems(placement): destination_path = os.path.join(target_path, value) source_path = os.path.join(self.source_path, key) if not os.path.exists(destination_path): tty.info('Moving resource stage\n\tsource : ' '{stage}\n\tdestination : {destination}'.format( stage=source_path, destination=destination_path )) shutil.move(os.path.realpath(source_path), destination_path)
def _run_tut_checks(self): """Run the tutorial smoke test checks.""" tty.info('Running tutorials checks') dirs = [] tut_subdir = join_path('examples', 'tutorial') if self.spec.satisfies('@0.2.4:0.3.1'): dirs.append(self._extra_tests_path) elif self.spec.satisfies('@0.3.3:1.0.1'): dirs.append(join_path(self._extra_tests_path, tut_subdir)) elif self.spec.satisfies('@1.1.0'): dirs.append(join_path(self.prefix.bin, tut_subdir)) elif self.spec.satisfies('@2.0.0:'): dirs.append(self.prefix.bin) checks = { # Versions 0.2.4:0.3.1 (spack-build/bin) # Versions 0.3.3:1.0.1 (spack-build/bin/examples/tutorial) # Versions 2.0.0:2.1.0 (spack-build/bin) # Version 1.1.0 (prefix.bin/examples/tutorial) # Versions 2.0.0:2.1.0 (prefix.bin) 'tut_copy': (['Copied source data'], 0), 'tut_introspection': (['Allocator used is HOST', 'size of the allocation'], 0), 'tut_memset': (['Set data from HOST'], 0), 'tut_move': (['Moved source data', 'HOST'], 0), 'tut_reallocate': (['Reallocated data'], 0), } self._run_checks(dirs, checks)
def patch(self): """Adjust OpenFOAM build for spack. Where needed, apply filter as an alternative to normal patching.""" add_extra_files(self, self.common, self.assets) # Prior to 1812, required OpenFOAM-v{VER} directory when sourcing projdir = "OpenFOAM-v{0}".format(self.version) if not os.path.exists(join_path(self.stage.path, projdir)): tty.info('Added directory link {0}'.format(projdir)) os.symlink( os.path.relpath(self.stage.source_path, self.stage.path), join_path(self.stage.path, projdir)) # Avoid WM_PROJECT_INST_DIR for ThirdParty # This modification is non-critical edits = { 'WM_THIRD_PARTY_DIR': r'$WM_PROJECT_DIR/ThirdParty #SPACK: No separate third-party', } rewrite_environ_files( # etc/{bashrc,cshrc} edits, posix=join_path('etc', 'bashrc'), cshell=join_path('etc', 'cshrc')) # The following filtering is non-critical. # It simply prevents 'site' dirs at the wrong level # (likely non-existent anyhow) from being added to # PATH, LD_LIBRARY_PATH. for rcdir in ['config.sh', 'config.csh']: rcfile = join_path('etc', rcdir, 'settings') if os.path.isfile(rcfile): filter_file('WM_PROJECT_INST_DIR/', 'WM_PROJECT_DIR/', rcfile, backup=False)
def view(parser, args): 'Produce a view of a set of packages.' specs = spack.cmd.parse_specs(args.specs) path = args.path[0] view = YamlFilesystemView( path, spack.store.layout, ignore_conflicts=getattr(args, "ignore_conflicts", False), link=os.link if args.action in ["hardlink", "hard"] else os.symlink, verbose=args.verbose) # Process common args and specs if getattr(args, "all", False): specs = view.get_all_specs() if len(specs) == 0: tty.warn("Found no specs in %s" % path) elif args.action in actions_link: # only link commands need to disambiguate specs specs = [spack.cmd.disambiguate_spec(s) for s in specs] elif args.action in actions_status: # no specs implies all if len(specs) == 0: specs = view.get_all_specs() else: specs = relaxed_disambiguate(specs, view) else: # status and remove can map the name to packages in view specs = relaxed_disambiguate(specs, view) with_dependencies = args.dependencies.lower() in ['true', 'yes'] # Map action to corresponding functionality if args.action in actions_link: try: view.add_specs(*specs, with_dependencies=with_dependencies, exclude=args.exclude) except MergeConflictError: tty.info("Some file blocked the merge, adding the '-i' flag will " "ignore this conflict. For more information see e.g. " "https://github.com/spack/spack/issues/9029") raise elif args.action in actions_remove: view.remove_specs(*specs, with_dependencies=with_dependencies, exclude=args.exclude, with_dependents=not args.no_remove_dependents) elif args.action in actions_status: view.print_status(*specs, with_dependencies=with_dependencies) else: tty.error('Unknown action: "%s"' % args.action)
def check_install(self): spec = self.spec test_env = {} # Make sure the correct config is found test_env["BH_CONFIG"] = self.config_file # Remove the lib/spackenv directory from the PATH variable when # executing the tests, becauses it messes with the JIT compilation # inside Bohrium paths = os.environ['PATH'].split(':') paths = [p for p in paths if "spack/env" not in p] test_env["PATH"] = ":".join(paths) # Add the PYTHONPATH to bohrium to the PYTHONPATH environment pythonpaths = [p for p in os.environ["PYTHONPATH"].split(":")] pythonpaths.append( join_path(self.prefix, spec['python'].package.site_packages_dir)) test_env["PYTHONPATH"] = ":".join(pythonpaths) # Collect the stacks which should be available: stacks = ["default"] if "+openmp" in spec: stacks.append("openmp") if "+cuda" in spec: stacks.append("cuda") if "+opencl" in spec: stacks.append("opencl") # C++ compiler and compiler flags cxx = Executable(self.compiler.cxx) cxx_flags = [ "-I", self.prefix.include, "-I", self.prefix.include.bohrium, "-L", self.prefix.lib, "-lbh", "-lbhxx" ] # Compile C++ test program file_cxxadd = join_path(os.path.dirname(self.module.__file__), "cxxadd.cpp") cxx("-o", "test_cxxadd", file_cxxadd, *cxx_flags) test_cxxadd = Executable("./test_cxxadd") # Build python test commandline file_pyadd = join_path(os.path.dirname(self.module.__file__), "pyadd.py") test_pyadd = Executable(spec['python'].command.path + " " + file_pyadd) # Run tests for each available stack for bh_stack in stacks: tty.info("Testing with bohrium stack '" + bh_stack + "'") test_env["BH_STACK"] = bh_stack cpp_output = test_cxxadd(output=str, env=test_env) compare_output(cpp_output, "Success!\n") # Python test (if +python) if "+python" in spec: py_output = test_pyadd(output=str, env=test_env) compare_output(py_output, "Success!\n")
def tutorial(parser, args): if args.action == "basic": tty.info("Setting up Modulecmd.py's basic mock tutorial MODULEPATH") pymod.tutorial.basic_usage() elif args.action == "teardown": tty.info("Removing Modulecmd.py's mock tutorial MODULEPATH") pymod.tutorial.teardown() pymod.mc.dump()
def install_tree(src, dest, **kwargs): """Manually install a file to a particular location.""" tty.info("Installing %s to %s" % (src, dest)) shutil.copytree(src, dest, **kwargs) for s, d in traverse_tree(src, dest, follow_nonexisting=False): set_install_permissions(d) copy_mode(s, d)
def check_one(spec, path, verbose=False): 'Check status of view in path against spec' dotspack = os.path.join(path, '.spack', spec.name) if os.path.exists(os.path.join(dotspack)): tty.info('Package in view: "%s"' % spec.name) return tty.info('Package not in view: "%s"' % spec.name) return
def iter_read(self, pattern): """ A helper to read json from a directory glob and return it loaded. """ for filename in glob(pattern): basename = os.path.basename(filename) tty.info("Reading %s" % basename) yield read_json(filename)
def setup_minimal_environment(self, env): """Sets a minimal openfoam environment. """ tty.info('OpenFOAM minimal env {0}'.format(self.prefix)) env.set('FOAM_PROJECT_DIR', self.projectdir) env.set('WM_PROJECT_DIR', self.projectdir) for d in ['wmake', self.archbin]: # bin added automatically env.prepend_path('PATH', join_path(self.projectdir, d))
def check_install(self): spec = self.spec test_env = {} # Make sure the correct config is found test_env["BH_CONFIG"] = self.config_file # Remove the lib/spackenv directory from the PATH variable when # executing the tests, becauses it messes with the JIT compilation # inside Bohrium paths = os.environ['PATH'].split(':') paths = [p for p in paths if "spack/env" not in p] test_env["PATH"] = ":".join(paths) # Add the PYTHONPATH to bohrium to the PYTHONPATH environment pythonpaths = [p for p in os.environ["PYTHONPATH"].split(":")] pythonpaths.append(join_path(self.prefix, spec['python'].package.site_packages_dir)) test_env["PYTHONPATH"] = ":".join(pythonpaths) # Collect the stacks which should be available: stacks = ["default"] if "+openmp" in spec: stacks.append("openmp") if "+cuda" in spec: stacks.append("cuda") if "+opencl" in spec: stacks.append("opencl") # C++ compiler and compiler flags cxx = Executable(self.compiler.cxx) cxx_flags = ["-I", self.prefix.include, "-I", self.prefix.include.bohrium, "-L", self.prefix.lib, "-lbh", "-lbhxx"] # Compile C++ test program file_cxxadd = join_path(os.path.dirname(self.module.__file__), "cxxadd.cpp") cxx("-o", "test_cxxadd", file_cxxadd, *cxx_flags) test_cxxadd = Executable("./test_cxxadd") # Build python test commandline file_pyadd = join_path(os.path.dirname(self.module.__file__), "pyadd.py") test_pyadd = Executable(spec['python'].command.path + " " + file_pyadd) # Run tests for each available stack for bh_stack in stacks: tty.info("Testing with bohrium stack '" + bh_stack + "'") test_env["BH_STACK"] = bh_stack cpp_output = test_cxxadd(output=str, env=test_env) compare_output(cpp_output, "Success!\n") # Python test (if +python) if "+python" in spec: py_output = test_pyadd(output=str, env=test_env) compare_output(py_output, "Success!\n")
def patch(self): """ Add binary .so file that are missing Horrible hack""" outdir = os.path.join(self.stage.source_path, 'bin') indir = join_path(os.path.dirname(__file__), 'binary', 'linux') for f in os.listdir(indir): tty.info('Added file {0}'.format(f)) install(join_path(indir, f), join_path(outdir, f))
def _run_tools_checks(self): """Run the tools smoke test checks.""" tty.info('Running tools checks') dirs = [self.prefix.bin] if self.spec.satisfies('@0.3.3:0.3.5') else [] checks = { # Versions 0.3.3:0.3.5 (spack-build/bin/tools) 'replay': (['No input file'], 0), } self._run_checks(dirs, checks)
def _run_plots_checks(self): """Run the plots smoke test checks.""" tty.info('Running plots checks') dirs = [self.prefix.bin] if self.spec.satisfies('@0.3.3:0.3.5') else [] checks = { # Versions 0.3.3:0.3.5 (prefix.bin) 'plot_allocations': ([''], 0), } self._run_checks(dirs, checks)
def build(self): """Build the cache""" tty.info("Building the MODULEPATH cache") self._data = {} self._data["version"] = cache_version_info # Build the modulepath cache for path in pymod.modulepath.walk(): path.find_modules() self.write()
def setup_run_environment(self, env): """Sets the run environment (post-installation). The environment comes from running: .. code-block:: console $ . $WM_PROJECT_DIR/etc/bashrc """ bashrc = join_path(self.projectdir, 'etc', 'bashrc') minimal = True if os.path.isfile(bashrc): # post-install: source the installed bashrc try: mods = EnvironmentModifications.from_sourcing_file( bashrc, clean=True, # Remove duplicate entries blacklist=[ # Blacklist these # Inadvertent changes # ------------------- 'PS1', # Leave untouched 'MANPATH', # Leave untouched # Unneeded bits # ------------- # 'FOAM_SETTINGS', # Do not use with modules # 'FOAM_INST_DIR', # Old # 'FOAM_(APP|ETC|SRC|SOLVERS|UTILITIES)', # 'FOAM_TUTORIALS', # May be useful # 'WM_OSTYPE', # Purely optional value # Third-party cruft - only used for orig compilation # ----------------- '[A-Z].*_ARCH_PATH', # '(KAHIP|METIS|SCOTCH)_VERSION', # User-specific # ------------- 'FOAM_RUN', '(FOAM|WM)_.*USER_.*', ], whitelist=[ # Whitelist these 'MPI_ARCH_PATH', # Can be required for compilation ]) env.extend(mods) minimal = False tty.info('OpenFOAM bashrc env: {0}'.format(bashrc)) except Exception: minimal = True if minimal: # pre-build or minimal environment self.setup_minimal_environment(env)
def install(self, spec, prefix): # # we need to follow TBB's compiler selection logic to get the proper build + link flags # but we still need to use spack's compiler wrappers # to accomplish this, we do two things: # # * Look at the spack spec to determine which compiler we should pass to tbb's Makefile # # * patch tbb's build system to use the compiler wrappers (CC, CXX) for # icc, gcc, clang # (see coerce_to_spack()) # self.coerce_to_spack("build") if spec.satisfies('%clang'): tbb_compiler = "clang" elif spec.satisfies('%intel'): tbb_compiler = "icc" else: tbb_compiler = "gcc" mkdirp(prefix) mkdirp(prefix.lib) # # tbb does not have a configure script or make install target # we simply call make, and try to put the pieces together # make("compiler=%s" % (tbb_compiler)) # Note, I tried try tbb_build_dir option, which quickly errored out ..."" # make("compiler=%s tbb_build_dir=%s" %(tbb_compiler,prefix.lib)) # install headers to {prefix}/include install_tree('include', prefix.include) # install libs to {prefix}/lib tbb_lib_names = ["libtbb", "libtbbmalloc", "libtbbmalloc_proxy"] for lib_name in tbb_lib_names: # install release libs fs = glob.glob(join_path("build", "*release", lib_name + ".*")) if len(fs) < 1: tty.error("Failed to find release lib: " + lib_name) for f in fs: tty.info("installing:" + f) install(f, prefix.lib) # install debug libs if they exist fs = glob.glob(join_path("build", "*debug", lib_name + "_debug.*")) for f in fs: tty.info("installing:" + f) install(f, prefix.lib)
def log_info(module, mode, string, **fmt_kwds): """Log an information message to the user Arguments: module (Module): The module being executed mode (Mode): The mode of execution string (str): The informational message """ pymod.modes.assert_known_mode(mode) tty.info(string.format(**fmt_kwds), reported_by=module.fullname),
def try_import(self, module, abstract_spec_str): test_fn, info = functools.partial(_try_import_from_store, module), {} if test_fn(query_spec=abstract_spec_str, query_info=info): return True tty.info("Bootstrapping {0} from pre-built binaries".format(module)) abstract_spec, bincache_platform = self._spec_and_platform( abstract_spec_str + ' ^' + spec_for_current_python()) data = self._read_metadata(module) return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
def add_standalone(self, spec): if spec.package.is_extension: tty.error(self._croot + 'Package %s is an extension.' % spec.name) return False if spec.external: tty.warn(self._croot + 'Skipping external package: %s' % colorize_spec(spec)) return True if self.check_added(spec): tty.warn(self._croot + 'Skipping already linked package: %s' % colorize_spec(spec)) return True if spec.package.extendable: # Check for globally activated extensions in the extendee that # we're looking at. activated = [ p.spec for p in spack.store.db.activated_extensions_for(spec) ] if activated: tty.error("Globally activated extensions cannot be used in " "conjunction with filesystem views. " "Please deactivate the following specs: ") spack.cmd.display_specs(activated, flags=True, variants=True, long=False) return False tree = LinkTree(spec.prefix) if not self.ignore_conflicts: conflict = tree.find_conflict(self.root) if conflict is not None: tty.error(self._croot + "Cannot link package %s, file already exists: %s" % (spec.name, conflict)) return False conflicts = tree.merge(self.root, link=self.link, ignore=ignore_metadata_dir, ignore_conflicts=self.ignore_conflicts) self.link_meta_folder(spec) if self.ignore_conflicts: for c in conflicts: tty.warn(self._croot + "Could not link: %s" % c) if self.verbose: tty.info(self._croot + 'Linked package: %s' % colorize_spec(spec)) return True
def remove_standalone(self, spec): """ Remove (unlink) a standalone package from this view. """ if not self.check_added(spec): tty.warn(self._croot + 'Skipping package not linked in view: %s' % spec.name) return self.unmerge(spec) self.unlink_meta_folder(spec) if self.verbose: tty.info(self._croot + 'Removed package: %s' % colorize_spec(spec))
def add_extra_files(foam_pkg, common, local, **kwargs): """Copy additional common and local files into the stage.source_path from the openfoam-com/common and the package/assets directories, respectively """ outdir = foam_pkg.stage.source_path indir = join_path(os.path.dirname(__file__), 'common') for f in common: tty.info('Added file {0}'.format(f)) install(join_path(indir, f), join_path(outdir, f)) indir = join_path(foam_pkg.package_dir, 'assets') for f in local: tty.info('Added file {0}'.format(f)) install(join_path(indir, f), join_path(outdir, f))
def run(names, verbose=False): """Run tests with the supplied names. Names should be a list. If it's empty, run ALL of the tests.""" verbosity = 1 if not verbose else 2 # If they didn't provide names of tests to run, then use the default # list above. if not names: names = _test_names else: for test in names: if test not in _test_names: tty.error("%s is not a valid test name." % test, "Valid names are:") colify(_test_names, indent=4) sys.exit(1) runner = unittest.TextTestRunner(verbosity=verbosity) testsRun = errors = failures = skipped = 0 for test in names: module = _test_module + '.' + test print module, test suite = unittest.defaultTestLoader.loadTestsFromName(module) tty.msg("Running test: %s" % test) result = runner.run(suite) testsRun += result.testsRun errors += len(result.errors) failures += len(result.failures) skipped += len(result.skipped) succeeded = not errors and not failures tty.msg("Tests Complete.", "%5d tests run" % testsRun, "%5d skipped" % skipped, "%5d failures" % failures, "%5d errors" % errors) if not errors and not failures: tty.info("OK", format='g') else: tty.info("FAIL", format='r') sys.exit(1)
def run(names, verbose=False): """Run tests with the supplied names. Names should be a list. If it's empty, run ALL of Spack's tests.""" verbosity = 1 if not verbose else 2 if not names: names = test_names else: for test in names: if test not in test_names: tty.error("%s is not a valid spack test name." % test, "Valid names are:") colify(test_names, indent=4) sys.exit(1) runner = unittest.TextTestRunner(verbosity=verbosity) testsRun = errors = failures = 0 for test in names: module = 'spack.test.' + test print module suite = unittest.defaultTestLoader.loadTestsFromName(module) tty.msg("Running test: %s" % test) result = runner.run(suite) testsRun += result.testsRun errors += len(result.errors) failures += len(result.failures) succeeded = not errors and not failures tty.msg("Tests Complete.", "%5d tests run" % testsRun, "%5d failures" % failures, "%5d errors" % errors) if not errors and not failures: tty.info("OK", format='g') else: tty.info("FAIL", format='r') sys.exit(1)
def run(names, outputDir, verbose=False): """Run tests with the supplied names. Names should be a list. If it's empty, run ALL of Spack's tests.""" if not names: names = test_names else: for test in names: if test not in test_names: tty.error("%s is not a valid spack test name." % test, "Valid names are:") colify(sorted(test_names), indent=4) sys.exit(1) tally = Tally() for test in names: module = 'spack.test.' + test print(module) tty.msg("Running test: %s" % test) runOpts = ["--with-%s" % spack.test.tally_plugin.Tally.name] if outputDir: xmlOutputFname = "unittests-{0}.xml".format(test) xmlOutputPath = join_path(outputDir, xmlOutputFname) runOpts += ["--with-xunit", "--xunit-file={0}".format(xmlOutputPath)] argv = [""] + runOpts + [module] nose.run(argv=argv, addplugins=[tally]) succeeded = not tally.failCount and not tally.errorCount tty.msg("Tests Complete.", "%5d tests run" % tally.numberOfTestsRun, "%5d failures" % tally.failCount, "%5d errors" % tally.errorCount) if succeeded: tty.info("OK", format='g') else: tty.info("FAIL", format='r') sys.exit(1)
def remove_one(spec, path, verbose=False): 'Remove any files found in `spec` from `path` and purge empty directories.' if not os.path.exists(path): return # done, short circuit dotspack = transform_path(spec, '.spack', path) if not os.path.exists(dotspack): if verbose: tty.info('Skipping nonexistent package: "%s"' % spec.name) return if verbose: tty.info('Removing package: "%s"' % spec.name) for dirpath, dirnames, filenames in os.walk(spec.prefix): if not filenames: continue targdir = transform_path(spec, dirpath, path) for fname in filenames: dst = os.path.join(targdir, fname) if not os.path.exists(dst): continue os.unlink(dst)
def add_standalone(self, spec): if spec.package.is_extension: tty.error(self._croot + 'Package %s is an extension.' % spec.name) return False if spec.external: tty.warn(self._croot + 'Skipping external package: %s' % colorize_spec(spec)) return True if self.check_added(spec): tty.warn(self._croot + 'Skipping already linked package: %s' % colorize_spec(spec)) return True if spec.package.extendable: # Check for globally activated extensions in the extendee that # we're looking at. activated = [p.spec for p in spack.store.db.activated_extensions_for(spec)] if activated: tty.error("Globally activated extensions cannot be used in " "conjunction with filesystem views. " "Please deactivate the following specs: ") spack.cmd.display_specs(activated, flags=True, variants=True, long=False) return False self.merge(spec) self.link_meta_folder(spec) if self.verbose: tty.info(self._croot + 'Linked package: %s' % colorize_spec(spec)) return True
def install(src, dest): """Manually install a file to a particular location.""" tty.info("Installing %s to %s" % (src, dest)) shutil.copy(src, dest) set_install_permissions(dest) copy_mode(src, dest)
def setup_environment(self, spack_env, run_env): """Add environment variables to the generated module file. These environment variables come from running: .. code-block:: console $ . $WM_PROJECT_DIR/etc/bashrc """ # NOTE: Spack runs setup_environment twice. # 1) pre-build to set up the build environment # 2) post-install to determine runtime environment variables # The etc/bashrc is only available (with corrrect content) # post-installation. bashrc = join_path(self.projectdir, 'etc', 'bashrc') minimal = True if os.path.isfile(bashrc): # post-install: source the installed bashrc try: mods = EnvironmentModifications.from_sourcing_file( bashrc, clean=True, # Remove duplicate entries blacklist=[ # Blacklist these # Inadvertent changes # ------------------- 'PS1', # Leave unaffected 'MANPATH', # Leave unaffected # Unneeded bits # ------------- 'FOAM_INST_DIR', # Possibly incorrect 'FOAM_(APP|ETC|SRC|SOLVERS|UTILITIES)', 'FOAM_TEST_.*_DIR', 'WM_NCOMPPROCS', # 'FOAM_TUTORIALS', # can be useful # Lots of third-party cruft # ------------------------- '[A-Z].*_(BIN|LIB|INCLUDE)_DIR', '[A-Z].*_SYSTEM', 'WM_THIRD_PARTY_.*', '(BISON|FLEX|CMAKE|ZLIB)_DIR', '(METIS|PARMETIS|PARMGRIDGEN|SCOTCH)_DIR', # User-specific # ------------- 'FOAM_RUN', '(FOAM|WM)_.*USER_.*', ], whitelist=[ # Whitelist these 'MPI_ARCH_PATH', # Can be needed for compilation 'PYTHON_BIN_DIR', ]) run_env.extend(mods) minimal = False tty.info('foam-extend env: {0}'.format(bashrc)) except Exception: minimal = True if minimal: # pre-build or minimal environment tty.info('foam-extend minimal env {0}'.format(self.prefix)) run_env.set('FOAM_INST_DIR', os.path.dirname(self.projectdir)), run_env.set('FOAM_PROJECT_DIR', self.projectdir) run_env.set('WM_PROJECT_DIR', self.projectdir) for d in ['wmake', self.archbin]: # bin added automatically run_env.prepend_path('PATH', join_path(self.projectdir, d))
def get_uninstall_list(args, specs, env): # Gets the list of installed specs that match the ones give via cli # args.all takes care of the case where '-a' is given in the cli uninstall_list = find_matching_specs(env, specs, args.all, args.force) # Takes care of '-R' active_dpts, inactive_dpts = installed_dependents(uninstall_list, env) # if we are in the global scope, we complain if you try to remove a # spec that's in an environment. If we're in an environment, we'll # just *remove* it from the environment, so we ignore this # error when *in* an environment spec_envs = dependent_environments(uninstall_list) spec_envs = inactive_dependent_environments(spec_envs) # Process spec_dependents and update uninstall_list has_error = not args.force and ( (active_dpts and not args.dependents) # dependents in the current env or (not env and spec_envs) # there are environments that need specs ) # say why each problem spec is needed if has_error: specs = set(active_dpts) if not env: specs.update(set(spec_envs)) # environments depend on this for i, spec in enumerate(sorted(specs)): # space out blocks of reasons if i > 0: print() tty.info("Will not uninstall %s" % spec.cformat("$_$@$%@$/"), format='*r') dependents = active_dpts.get(spec) if dependents: print('The following packages depend on it:') spack.cmd.display_specs(dependents, **display_args) if not env: envs = spec_envs.get(spec) if envs: print('It is used by the following environments:') colify([e.name for e in envs], indent=4) msgs = [] if active_dpts: msgs.append( 'use `spack uninstall --dependents` to remove dependents too') if spec_envs: msgs.append('use `spack env remove` to remove from environments') print() tty.die('There are still dependents.', *msgs) elif args.dependents: for spec, lst in active_dpts.items(): uninstall_list.extend(lst) uninstall_list = list(set(uninstall_list)) # only force-remove (don't completely uninstall) specs that still # have external dependent envs or pkgs removes = set(inactive_dpts) if env: removes.update(spec_envs) # remove anything in removes from the uninstall list uninstall_list = set(uninstall_list) - removes return uninstall_list, removes
def setup_environment(self, spack_env, run_env): """Add environment variables to the generated module file. These environment variables come from running: .. code-block:: console $ . $WM_PROJECT_DIR/etc/bashrc """ # NOTE: Spack runs setup_environment twice. # 1) pre-build to set up the build environment # 2) post-install to determine runtime environment variables # The etc/bashrc is only available (with corrrect content) # post-installation. bashrc = join_path(self.projectdir, 'etc', 'bashrc') minimal = True if os.path.isfile(bashrc): # post-install: source the installed bashrc try: mods = EnvironmentModifications.from_sourcing_file( bashrc, clean=True, # Remove duplicate entries blacklist=[ # Blacklist these # Inadvertent changes # ------------------- 'PS1', # Leave unaffected 'MANPATH', # Leave unaffected # Unneeded bits # ------------- 'FOAM_SETTINGS', # Do not use with modules 'FOAM_INST_DIR', # Old 'FOAM_(APP|ETC|SRC|SOLVERS|UTILITIES)', # 'FOAM_TUTORIALS', # can be useful 'WM_OSTYPE', # Purely optional value # Third-party cruft - only used for orig compilation # ----------------- '[A-Z].*_ARCH_PATH', '(KAHIP|METIS|SCOTCH)_VERSION', # User-specific # ------------- 'FOAM_RUN', '(FOAM|WM)_.*USER_.*', ], whitelist=[ # Whitelist these 'MPI_ARCH_PATH', # Can be needed for compilation ]) run_env.extend(mods) minimal = False tty.info('OpenFOAM bashrc env: {0}'.format(bashrc)) except Exception: minimal = True if minimal: # pre-build or minimal environment tty.info('OpenFOAM minimal env {0}'.format(self.prefix)) run_env.set('FOAM_PROJECT_DIR', self.projectdir) run_env.set('WM_PROJECT_DIR', self.projectdir) for d in ['wmake', self.archbin]: # bin added automatically run_env.prepend_path('PATH', join_path(self.projectdir, d))
def create_host_config(self, spec, prefix): """ This method creates a 'host-config' file that specifies all of the options used to configure and build conduit. For more details see about 'host-config' files see: http://software.llnl.gov/conduit/building.html """ ####################### # Compiler Info ####################### c_compiler = env["SPACK_CC"] cpp_compiler = env["SPACK_CXX"] f_compiler = None if self.compiler.fc: # even if this is set, it may not exist so do one more sanity check if os.path.isfile(env["SPACK_FC"]): f_compiler = env["SPACK_FC"] ####################################################################### # By directly fetching the names of the actual compilers we appear # to doing something evil here, but this is necessary to create a # 'host config' file that works outside of the spack install env. ####################################################################### sys_type = spec.architecture # if on llnl systems, we can use the SYS_TYPE if "SYS_TYPE" in env: sys_type = env["SYS_TYPE"] ############################################## # Find and record what CMake is used ############################################## if "+cmake" in spec: cmake_exe = join_path(spec['cmake'].prefix.bin, "cmake") else: cmake_exe = which("cmake") if cmake_exe is None: msg = 'failed to find CMake (and cmake variant is off)' raise RuntimeError(msg) cmake_exe = cmake_exe.command host_cfg_fname = "%s-%s-%s.cmake" % (socket.gethostname(), sys_type, spec.compiler) cfg = open(host_cfg_fname, "w") cfg.write("##################################\n") cfg.write("# spack generated host-config\n") cfg.write("##################################\n") cfg.write("# {0}-{1}\n".format(sys_type, spec.compiler)) cfg.write("##################################\n\n") # Include path to cmake for reference cfg.write("# cmake from spack \n") cfg.write("# cmake executable path: %s\n\n" % cmake_exe) ####################### # Compiler Settings ####################### cfg.write("#######\n") cfg.write("# using %s compiler spec\n" % spec.compiler) cfg.write("#######\n\n") cfg.write("# c compiler used by spack\n") cfg.write(cmake_cache_entry("CMAKE_C_COMPILER", c_compiler)) cfg.write("# cpp compiler used by spack\n") cfg.write(cmake_cache_entry("CMAKE_CXX_COMPILER", cpp_compiler)) cfg.write("# fortran compiler used by spack\n") if f_compiler is not None: cfg.write(cmake_cache_entry("ENABLE_FORTRAN", "ON")) cfg.write(cmake_cache_entry("CMAKE_Fortran_COMPILER", f_compiler)) else: cfg.write("# no fortran compiler found\n\n") cfg.write(cmake_cache_entry("ENABLE_FORTRAN", "OFF")) ####################### # Python ####################### cfg.write("# Python Support\n") if "+python" in spec: python_exe = join_path(spec['python'].prefix.bin, "python") cfg.write("# Enable python module builds\n") cfg.write(cmake_cache_entry("ENABLE_PYTHON", "ON")) cfg.write("# python from spack \n") cfg.write(cmake_cache_entry("PYTHON_EXECUTABLE", python_exe)) # install module to standard style site packages dir # so we can support spack activate py_ver_short = "python{0}".format(spec["python"].version.up_to(2)) pym_prefix = join_path("${CMAKE_INSTALL_PREFIX}", "lib", py_ver_short, "site-packages") # use pym_prefix as the install path cfg.write(cmake_cache_entry("PYTHON_MODULE_INSTALL_PREFIX", pym_prefix)) else: cfg.write(cmake_cache_entry("ENABLE_PYTHON", "OFF")) if "+doc" in spec: cfg.write(cmake_cache_entry("ENABLE_DOCS", "ON")) cfg.write("# sphinx from spack \n") sphinx_build_exe = join_path(spec['py-sphinx'].prefix.bin, "sphinx-build") cfg.write(cmake_cache_entry("SPHINX_EXECUTABLE", sphinx_build_exe)) cfg.write("# doxygen from uberenv\n") doxygen_exe = join_path(spec['doxygen'].prefix.bin, "doxygen") cfg.write(cmake_cache_entry("DOXYGEN_EXECUTABLE", doxygen_exe)) else: cfg.write(cmake_cache_entry("ENABLE_DOCS", "OFF")) ####################### # MPI ####################### cfg.write("# MPI Support\n") if "+mpi" in spec: cfg.write(cmake_cache_entry("ENABLE_MPI", "ON")) cfg.write(cmake_cache_entry("MPI_C_COMPILER", spec['mpi'].mpicc)) # we use `mpicc` as `MPI_CXX_COMPILER` b/c we don't want to # introduce linking deps to the MPI C++ libs (we aren't using # C++ features of MPI) -- this happens with some versions of # OpenMPI cfg.write(cmake_cache_entry("MPI_CXX_COMPILER", spec['mpi'].mpicc)) cfg.write(cmake_cache_entry("MPI_Fortran_COMPILER", spec['mpi'].mpifc)) else: cfg.write(cmake_cache_entry("ENABLE_MPI", "OFF")) ####################################################################### # I/O Packages ####################################################################### cfg.write("# I/O Packages\n\n") ####################### # HDF5 ####################### cfg.write("# hdf5 from spack \n") if "+hdf5" in spec: cfg.write(cmake_cache_entry("HDF5_DIR", spec['hdf5'].prefix)) else: cfg.write("# hdf5 not built by spack \n") ####################### # Silo ####################### cfg.write("# silo from spack \n") if "+silo" in spec: cfg.write(cmake_cache_entry("SILO_DIR", spec['silo'].prefix)) else: cfg.write("# silo not built by spack \n") cfg.write("##################################\n") cfg.write("# end spack generated host-config\n") cfg.write("##################################\n") cfg.close() host_cfg_fname = os.path.abspath(host_cfg_fname) tty.info("spack generated conduit host-config file: " + host_cfg_fname) return host_cfg_fname
def create_host_config(self, spec, prefix, py_site_pkgs_dir=None): """ This method creates a 'host-config' file that specifies all of the options used to configure and build conduit. For more details about 'host-config' files see: http://software.llnl.gov/conduit/building.html Note: The `py_site_pkgs_dir` arg exists to allow a package that subclasses this package provide a specific site packages dir when calling this function. `py_site_pkgs_dir` should be an absolute path or `None`. This is necessary because the spack `site_packages_dir` var will not exist in the base class. For more details on this issue see: https://github.com/spack/spack/issues/6261 """ ####################### # Compiler Info ####################### c_compiler = env["SPACK_CC"] cpp_compiler = env["SPACK_CXX"] f_compiler = None if self.compiler.fc: # even if this is set, it may not exist so do one more sanity check if os.path.isfile(env["SPACK_FC"]): f_compiler = env["SPACK_FC"] ####################################################################### # By directly fetching the names of the actual compilers we appear # to doing something evil here, but this is necessary to create a # 'host config' file that works outside of the spack install env. ####################################################################### sys_type = spec.architecture # if on llnl systems, we can use the SYS_TYPE if "SYS_TYPE" in env: sys_type = env["SYS_TYPE"] ############################################## # Find and record what CMake is used ############################################## if "+cmake" in spec: cmake_exe = spec['cmake'].command.path else: cmake_exe = which("cmake") if cmake_exe is None: msg = 'failed to find CMake (and cmake variant is off)' raise RuntimeError(msg) cmake_exe = cmake_exe.path host_cfg_fname = "%s-%s-%s-conduit.cmake" % (socket.gethostname(), sys_type, spec.compiler) cfg = open(host_cfg_fname, "w") cfg.write("##################################\n") cfg.write("# spack generated host-config\n") cfg.write("##################################\n") cfg.write("# {0}-{1}\n".format(sys_type, spec.compiler)) cfg.write("##################################\n\n") # Include path to cmake for reference cfg.write("# cmake from spack \n") cfg.write("# cmake executable path: %s\n\n" % cmake_exe) ####################### # Compiler Settings ####################### cfg.write("#######\n") cfg.write("# using %s compiler spec\n" % spec.compiler) cfg.write("#######\n\n") cfg.write("# c compiler used by spack\n") cfg.write(cmake_cache_entry("CMAKE_C_COMPILER", c_compiler)) cfg.write("# cpp compiler used by spack\n") cfg.write(cmake_cache_entry("CMAKE_CXX_COMPILER", cpp_compiler)) cfg.write("# fortran compiler used by spack\n") if f_compiler is not None: cfg.write(cmake_cache_entry("ENABLE_FORTRAN", "ON")) cfg.write(cmake_cache_entry("CMAKE_Fortran_COMPILER", f_compiler)) else: cfg.write("# no fortran compiler found\n\n") cfg.write(cmake_cache_entry("ENABLE_FORTRAN", "OFF")) ####################### # Python ####################### cfg.write("# Python Support\n") if "+python" in spec: cfg.write("# Enable python module builds\n") cfg.write(cmake_cache_entry("ENABLE_PYTHON", "ON")) cfg.write("# python from spack \n") cfg.write(cmake_cache_entry("PYTHON_EXECUTABLE", spec['python'].command.path)) # only set dest python site packages dir if passed if py_site_pkgs_dir: cfg.write(cmake_cache_entry("PYTHON_MODULE_INSTALL_PREFIX", py_site_pkgs_dir)) else: cfg.write(cmake_cache_entry("ENABLE_PYTHON", "OFF")) if "+doc" in spec: cfg.write(cmake_cache_entry("ENABLE_DOCS", "ON")) cfg.write("# sphinx from spack \n") sphinx_build_exe = join_path(spec['py-sphinx'].prefix.bin, "sphinx-build") cfg.write(cmake_cache_entry("SPHINX_EXECUTABLE", sphinx_build_exe)) if "+doxygen" in spec: cfg.write("# doxygen from uberenv\n") doxygen_exe = spec['doxygen'].command.path cfg.write(cmake_cache_entry("DOXYGEN_EXECUTABLE", doxygen_exe)) else: cfg.write(cmake_cache_entry("ENABLE_DOCS", "OFF")) ####################### # MPI ####################### cfg.write("# MPI Support\n") if "+mpi" in spec: cfg.write(cmake_cache_entry("ENABLE_MPI", "ON")) cfg.write(cmake_cache_entry("MPI_C_COMPILER", spec['mpi'].mpicc)) cfg.write(cmake_cache_entry("MPI_CXX_COMPILER", spec['mpi'].mpicxx)) cfg.write(cmake_cache_entry("MPI_Fortran_COMPILER", spec['mpi'].mpifc)) mpiexe_bin = join_path(spec['mpi'].prefix.bin, 'mpiexec') if os.path.isfile(mpiexe_bin): # starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE # vs the older versions which expect MPIEXEC if self.spec["cmake"].satisfies('@3.10:'): cfg.write(cmake_cache_entry("MPIEXEC_EXECUTABLE", mpiexe_bin)) else: cfg.write(cmake_cache_entry("MPIEXEC", mpiexe_bin)) else: cfg.write(cmake_cache_entry("ENABLE_MPI", "OFF")) ####################################################################### # I/O Packages ####################################################################### cfg.write("# I/O Packages\n\n") ####################### # HDF5 ####################### cfg.write("# hdf5 from spack \n") if "+hdf5" in spec: cfg.write(cmake_cache_entry("HDF5_DIR", spec['hdf5'].prefix)) else: cfg.write("# hdf5 not built by spack \n") ####################### # Silo ####################### cfg.write("# silo from spack \n") if "+silo" in spec: cfg.write(cmake_cache_entry("SILO_DIR", spec['silo'].prefix)) else: cfg.write("# silo not built by spack \n") cfg.write("##################################\n") cfg.write("# end spack generated host-config\n") cfg.write("##################################\n") cfg.close() host_cfg_fname = os.path.abspath(host_cfg_fname) tty.info("spack generated conduit host-config file: " + host_cfg_fname) return host_cfg_fname
def patch(self): """Copy additional files or other patching.""" add_extra_files(self, self.common, self.assets) # Emit openfoam version immediately, if we resolved the wrong version # it takes a very long time to rebuild! tty.info('Build for ' + self.spec['openfoam'].format('$_$@$%@+$+'))
def install(src, dest): """Manually install a file to a particular location.""" tty.info("Installing %s to %s" % (src, dest)) shutil.copy(src, dest)