def test_report(): out = debug('report') arch = architecture.Arch(architecture.platform(), 'frontend', 'frontend') assert get_version() in out assert platform.python_version() in out assert str(arch) in out assert spack.config.get('config:concretizer') in out
def arch(parser, args): if args.frontend: arch = architecture.Arch(architecture.platform(), 'frontend', 'frontend') elif args.backend: arch = architecture.Arch(architecture.platform(), 'backend', 'backend') else: arch = architecture.Arch(architecture.platform(), 'default_os', 'default_target') if args.platform: print(arch.platform) elif args.operating_system: print(arch.os) elif args.target: print(arch.target) else: print(arch)
def arch(parser, args): arch = architecture.Arch(architecture.platform(), 'default_os', 'default_target') if args.platform: print(arch.platform) elif args.operating_system: print(arch.platform_os) elif args.target: print(arch.target) else: print(arch)
def arch(parser, args): if args.known_targets: display_targets(archspec.cpu.TARGETS) return if args.frontend: arch = architecture.Arch(architecture.platform(), 'frontend', 'frontend') elif args.backend: arch = architecture.Arch(architecture.platform(), 'backend', 'backend') else: arch = architecture.Arch(architecture.platform(), 'default_os', 'default_target') if args.platform: print(arch.platform) elif args.operating_system: print(arch.os) elif args.target: print(arch.target) else: print(arch)
def get_specs(allarch=False): """ Get spec.yaml's for build caches available on mirror """ global _cached_specs arch = architecture.Arch(architecture.platform(), 'default_os', 'default_target') if not spack.mirror.MirrorCollection(): tty.debug("No Spack mirrors are currently configured") return {} for mirror in spack.mirror.MirrorCollection().values(): fetch_url_build_cache = url_util.join(mirror.fetch_url, _build_cache_relative_path) tty.debug('Finding buildcaches at {0}'.format( url_util.format(fetch_url_build_cache))) index_url = url_util.join(fetch_url_build_cache, 'index.json') try: _, _, file_stream = web_util.read_from_url(index_url, 'application/json') index_object = codecs.getreader('utf-8')(file_stream).read() except (URLError, web_util.SpackWebError) as url_err: tty.error('Failed to read index {0}'.format(index_url)) tty.debug(url_err) # Continue on to the next mirror continue tmpdir = tempfile.mkdtemp() index_file_path = os.path.join(tmpdir, 'index.json') with open(index_file_path, 'w') as fd: fd.write(index_object) db_root_dir = os.path.join(tmpdir, 'db_root') db = spack_db.Database(None, db_dir=db_root_dir, enable_transaction_locking=False) db._read_from_file(index_file_path) spec_list = db.query_local(installed=False) for indexed_spec in spec_list: spec_arch = architecture.arch_for_spec(indexed_spec.architecture) if (allarch is True or spec_arch == arch): _cached_specs.add(indexed_spec) return _cached_specs
def install(self, spec, prefix): """ Create bash setup script in prefix.""" # first, log spack version to build-out tty.msg('* **Spack:**', get_version()) tty.msg('* **Python:**', platform.python_version()) tty.msg( '* **Platform:**', architecture.Arch(architecture.platform(), 'frontend', 'frontend')) # get all dependency specs, including compiler with spack.store.db.read_transaction(): specs = [dep for dep in spec.traverse(order='post')] # record all changes to the environment by packages in the stack env_mod = spack.util.environment.EnvironmentModifications() # first setup compiler, similar to build_environment.py in spack compiler = self.compiler if compiler.cc: env_mod.set('CC', compiler.cc) if compiler.cxx: env_mod.set('CXX', compiler.cxx) if compiler.f77: env_mod.set('F77', compiler.f77) if compiler.fc: env_mod.set('FC', compiler.fc) compiler.setup_custom_environment(self, env_mod) env_mod.prepend_path('PATH', os.path.dirname(compiler.cxx)) # now setup all other packages for _spec in specs: env_mod.extend(uenv.environment_modifications_for_spec(_spec)) env_mod.prepend_path(uenv.spack_loaded_hashes_var, _spec.dag_hash()) # transform to bash commands, and write to file cmds = k4_generate_setup_script(env_mod) with open(os.path.join(prefix, "setup.sh"), "w") as f: f.write(cmds) # optionally add a symlink (location configurable via environment variable # K4_LATEST_SETUP_PATH. Step will be skipped if it is empty) try: symlink_path = os.environ.get("K4_LATEST_SETUP_PATH", "") if symlink_path: # make sure that the path exists, create if not if not os.path.exists(os.path.dirname(symlink_path)): os.makedirs(os.path.dirname(symlink_path)) # make sure that an existing file will be overwritten, # even if it is a symlink (for which 'exists' is false!) if os.path.exists(symlink_path) or os.path.islink( symlink_path): os.remove(symlink_path) os.symlink(os.path.join(prefix, "setup.sh"), symlink_path) except: tty.warn("Could not create symlink")
def _eval_conditional(string): """Evaluate conditional definitions using restricted variable scope.""" arch = architecture.Arch(architecture.platform(), 'default_os', 'default_target') valid_variables = { 'target': str(arch.target), 'os': str(arch.os), 'platform': str(arch.platform), 'arch': str(arch), 'architecture': str(arch), 're': re, 'env': os.environ, 'hostname': socket.gethostname() } return eval(string, valid_variables)
def install(self, spec, prefix): """ Create bash setup script in prefix.""" # first, log spack version to build-out tty.msg('* **Spack:**', get_version()) tty.msg('* **Python:**', platform.python_version()) tty.msg( '* **Platform:**', architecture.Arch(architecture.platform(), 'frontend', 'frontend')) # get all dependency specs, including compiler with spack.store.db.read_transaction(): specs = [dep for dep in spec.traverse(order='post')] try: gcc_spec = spack.cmd.disambiguate_spec(str(spec.compiler), None, first=True) gcc_specs = [dep for dep in gcc_spec.traverse(order='post')] specs = specs + gcc_specs except: tty.warn("No spec found for " + str(spec.compiler) + ". Assuming it is a system compiler," "not adding it to the setup.") # record all changes to the environment by packages in the stack env_mod = spack.util.environment.EnvironmentModifications() for _spec in specs: env_mod.extend(uenv.environment_modifications_for_spec(_spec)) env_mod.prepend_path(uenv.spack_loaded_hashes_var, _spec.dag_hash()) # transform to bash commands, and write to file cmds = k4_generate_setup_script(env_mod) with open(os.path.join(prefix, "setup.sh"), "w") as f: f.write(cmds) # optionally add a symlink (location configurable via environment variable # K4_LATEST_SETUP_PATH. Step will be skipped if it is empty) try: symlink_path = os.environ.get("K4_LATEST_SETUP_PATH", "") if symlink_path: # make sure that the path exists, create if not if not os.path.exists(os.path.dirname(symlink_path)): os.makedirs(os.path.dirname(symlink_path)) # make sure that an existing file will be overwritten, # even if it is a symlink (for which 'exists' is false!) if os.path.exists(symlink_path) or os.path.islink( symlink_path): os.remove(symlink_path) os.symlink(os.path.join(prefix, "setup.sh"), symlink_path) except: tty.warn("Could not create symlink")
def get_host_environment(): """Return a dictionary (lookup) with host information (not including the os.environ). """ import spack.architecture as architecture import spack.spec arch = architecture.Arch(architecture.platform(), 'default_os', 'default_target') arch_spec = spack.spec.Spec('arch=%s' % arch) return { 'target': str(arch.target), 'os': str(arch.os), 'platform': str(arch.platform), 'arch': arch_spec, 'architecture': arch_spec, 'arch_str': str(arch), 'hostname': socket.gethostname() }
def get_specs(force=False, allarch=False): """ Get spec.yaml's for build caches available on mirror """ arch = architecture.Arch(architecture.platform(), 'default_os', 'default_target') arch_pattern = ('([^-]*-[^-]*-[^-]*)') if not allarch: arch_pattern = '(%s-%s-[^-]*)' % (arch.platform, arch.os) regex_pattern = '%s(.*)(spec.yaml$)' % (arch_pattern) arch_re = re.compile(regex_pattern) if not spack.mirror.MirrorCollection(): tty.debug("No Spack mirrors are currently configured") return {} urls = set() for mirror in spack.mirror.MirrorCollection().values(): fetch_url_build_cache = url_util.join(mirror.fetch_url, _build_cache_relative_path) mirror_dir = url_util.local_file_path(fetch_url_build_cache) if mirror_dir: tty.msg("Finding buildcaches in %s" % mirror_dir) if os.path.exists(mirror_dir): files = os.listdir(mirror_dir) for file in files: m = arch_re.search(file) if m: link = url_util.join(fetch_url_build_cache, file) urls.add(link) else: tty.msg("Finding buildcaches at %s" % url_util.format(fetch_url_build_cache)) p, links = web_util.spider( url_util.join(fetch_url_build_cache, 'index.html')) for link in links: m = arch_re.search(link) if m: urls.add(link) return try_download_specs(urls=urls, force=force)
def install(self, spec, prefix): """ Create bash setup script in prefix.""" # first, log spack version to build-out tty.msg('* **Spack:**', get_version()) tty.msg('* **Python:**', platform.python_version()) tty.msg('* **Platform:**', architecture.Arch( architecture.platform(), 'frontend', 'frontend')) specs = [spec] with spack.store.db.read_transaction(): specs = [dep for _spec in specs for dep in _spec.traverse( order='post')] try: gcc_specs = [spack.cmd.disambiguate_spec(str(spec.compiler), None, first=True)] gcc_specs = [dep for _spec in gcc_specs for dep in _spec.traverse( order='post')] specs = specs + gcc_specs except: tty.warn("No spec found for " + str(spec.compiler) + " Assuming it is a system compiler and not adding it to the setup.") env_mod = spack.util.environment.EnvironmentModifications() for _spec in specs: env_mod.extend(uenv.environment_modifications_for_spec(_spec)) env_mod.prepend_path(uenv.spack_loaded_hashes_var, _spec.dag_hash()) cmds = k4_generate_setup_script(env_mod) with open(os.path.join(prefix, "setup.sh"), "w") as f: f.write(cmds) try: symlink_path = os.environ.get("K4_LATEST_SETUP_PATH", "") if symlink_path: if not os.path.exists(os.path.dirname(symlink_path)): os.makedirs(os.path.dirname(symlink_path)) if os.path.exists(symlink_path) or os.path.islink(symlink_path): os.remove(symlink_path) os.symlink(os.path.join(prefix, "setup.sh"), symlink_path) except: tty.warn("Could not create symlink")
def get_arch(self): arch = architecture.Arch() arch.platform = architecture.platform() return str(arch.platform.target('default_target'))
def clean_environment(): # Stuff in here sanitizes the build environment to eliminate # anything the user has set that may interfere. We apply it immediately # unlike the other functions so it doesn't overwrite what the modules load. env = EnvironmentModifications() # Remove these vars from the environment during build because they # can affect how some packages find libraries. We want to make # sure that builds never pull in unintended external dependencies. env.unset('LD_LIBRARY_PATH') env.unset('LD_RUN_PATH') env.unset('DYLD_LIBRARY_PATH') env.unset('DYLD_FALLBACK_LIBRARY_PATH') # These vars affect how the compiler finds libraries and include dirs. env.unset('LIBRARY_PATH') env.unset('CPATH') env.unset('C_INCLUDE_PATH') env.unset('CPLUS_INCLUDE_PATH') env.unset('OBJC_INCLUDE_PATH') # On Cray "cluster" systems, unset CRAY_LD_LIBRARY_PATH to avoid # interference with Spack dependencies. # CNL requires these variables to be set (or at least some of them, # depending on the CNL version). hostarch = arch.Arch(arch.platform(), 'default_os', 'default_target') on_cray = str(hostarch.platform) == 'cray' using_cnl = re.match(r'cnl\d+', str(hostarch.os)) if on_cray and not using_cnl: env.unset('CRAY_LD_LIBRARY_PATH') for varname in os.environ.keys(): if 'PKGCONF' in varname: env.unset(varname) # Unset the following variables because they can affect installation of # Autotools and CMake packages. build_system_vars = [ 'CC', 'CFLAGS', 'CPP', 'CPPFLAGS', # C variables 'CXX', 'CCC', 'CXXFLAGS', 'CXXCPP', # C++ variables 'F77', 'FFLAGS', 'FLIBS', # Fortran77 variables 'FC', 'FCFLAGS', 'FCLIBS', # Fortran variables 'LDFLAGS', 'LIBS' # linker variables ] for v in build_system_vars: env.unset(v) # Unset mpi environment vars. These flags should only be set by # mpi providers for packages with mpi dependencies mpi_vars = ['MPICC', 'MPICXX', 'MPIFC', 'MPIF77', 'MPIF90'] for v in mpi_vars: env.unset(v) build_lang = spack.config.get('config:build_language') if build_lang: # Override language-related variables. This can be used to force # English compiler messages etc., which allows parse_log_events to # show useful matches. env.set('LC_ALL', build_lang) # Remove any macports installs from the PATH. The macports ld can # cause conflicts with the built-in linker on el capitan. Solves # assembler issues, e.g.: # suffix or operands invalid for `movq'" path = get_path('PATH') for p in path: if '/macports/' in p: env.remove_path('PATH', p) env.apply_modifications()
def report(args): print('* **Spack:**', get_version()) print('* **Python:**', platform.python_version()) print('* **Platform:**', architecture.Arch(architecture.platform(), 'frontend', 'frontend'))
def report(args): print('* **Spack:**', get_version()) print('* **Python:**', platform.python_version()) print('* **Platform:**', architecture.Arch( architecture.platform(), 'frontend', 'frontend')) print('* **Concretizer:**', spack.config.get('config:concretizer'))