def test_005_db_exists(self): """Make sure db cache file exists after creating.""" index_file = join_path(self.install_path, '.spack-db', 'index.yaml') lock_file = join_path(self.install_path, '.spack-db', 'lock') self.assertTrue(os.path.exists(index_file)) self.assertTrue(os.path.exists(lock_file))
def location(parser, args): if args.module_dir: print spack.module_path elif args.spack_root: print spack.prefix else: specs = spack.cmd.parse_specs(args.spec, concretize=True) if not specs: tty.die("You must supply a spec.") if len(specs) != 1: tty.die("Too many specs. Need only one.") spec = specs[0] if args.install_dir: print spec.prefix elif args.package_dir: print join_path(spack.db.root, spec.name) else: pkg = spack.db.get(spec) if args.stage_dir: print pkg.stage.path else: # args.build_dir is the default. if not os.listdir(pkg.stage.path): tty.die( "Build directory does not exist yet. Run this to create it:", "spack stage " + " ".join(args.spec)) print pkg.stage.expanded_archive_path
def file_name(self): parts = [self.token_to_path(x, self.requires[x]) for x in self.hierarchy_tokens if x in self.requires] hierarchy_name = join_path(*parts) fullname = join_path(self.modules_root, hierarchy_name, self.use_name + '.lua') return fullname
def inspect_path(prefix): """ Inspects the prefix of an installation to search for common layouts. Issues a request to modify the environment accordingly when an item is found. Args: prefix: prefix of the installation Returns: instance of EnvironmentModifications containing the requested modifications """ env = EnvironmentModifications() # Inspect the prefix to check for the existence of common directories prefix_inspections = { 'bin': ('PATH',), 'man': ('MANPATH',), 'lib': ('LIBRARY_PATH', 'LD_LIBRARY_PATH'), 'lib64': ('LIBRARY_PATH', 'LD_LIBRARY_PATH'), 'include': ('CPATH',) } for attribute, variables in prefix_inspections.items(): expected = getattr(prefix, attribute) if os.path.isdir(expected): for variable in variables: env.prepend_path(variable, expected) # PKGCONFIG for expected in (join_path(prefix.lib, 'pkgconfig'), join_path(prefix.lib64, 'pkgconfig')): if os.path.isdir(expected): env.prepend_path('PKG_CONFIG_PATH', expected) # CMake related variables env.prepend_path('CMAKE_PREFIX_PATH', prefix) return env
def _add_to_root_stage(self): """ Move the extracted resource to the root stage (according to placement). """ root_stage = self.root_stage resource = self.resource placement = os.path.basename(self.source_path) \ if resource.placement is None \ else resource.placement if not isinstance(placement, dict): placement = {'': placement} target_path = join_path(root_stage.source_path, resource.destination) try: os.makedirs(target_path) except OSError as err: if err.errno == errno.EEXIST and os.path.isdir(target_path): pass else: raise for key, value in iteritems(placement): destination_path = join_path(target_path, value) source_path = join_path(self.source_path, key) if not os.path.exists(destination_path): tty.info('Moving resource stage\n\tsource : ' '{stage}\n\tdestination : {destination}'.format( stage=source_path, destination=destination_path)) shutil.move(os.path.realpath(source_path), destination_path)
def test_do_test(mock_packages, install_mockery, mock_test_stage): """Perform a stand-alone test with files to copy.""" spec = spack.spec.Spec('trivial-smoke-test').concretized() test_name = 'test_do_test' test_filename = 'test_file.in' pkg = spec.package pkg.create_extra_test_source() test_suite = spack.install_test.TestSuite([spec], test_name) test_suite.current_test_spec = spec test_suite.current_base_spec = spec test_suite.ensure_stage() # Save off target paths for current spec since test suite processing # assumes testing multiple specs. cached_filename = fs.join_path(test_suite.current_test_cache_dir, pkg.test_source_filename) data_filename = fs.join_path(test_suite.current_test_data_dir, test_filename) # Run the test, making sure to retain the test stage directory # so we can ensure the files were copied. test_suite(remove_directory=False) assert os.path.exists(cached_filename) assert os.path.exists(data_filename)
def test_source_files(self): datadir = join_path(spack_root, 'lib', 'spack', 'spack', 'test', 'data') files = [ join_path(datadir, 'sourceme_first.sh'), join_path(datadir, 'sourceme_second.sh') ] env = EnvironmentModifications.from_sourcing_files(*files) modifications = env.group_by_name() self.assertEqual(len(modifications), 4) # Set new variables self.assertEqual(len(modifications['NEW_VAR']), 1) self.assertTrue(isinstance(modifications['NEW_VAR'][0], SetEnv)) self.assertEqual(modifications['NEW_VAR'][0].value, 'new') # Unset variables self.assertEqual(len(modifications['EMPTY_PATH_LIST']), 1) self.assertTrue( isinstance(modifications['EMPTY_PATH_LIST'][0], UnsetEnv)) # Modified variables self.assertEqual(len(modifications['UNSET_ME']), 1) self.assertTrue(isinstance(modifications['UNSET_ME'][0], SetEnv)) self.assertEqual(modifications['UNSET_ME'][0].value, 'overridden') self.assertEqual(len(modifications['PATH_LIST']), 3) self.assertTrue(isinstance(modifications['PATH_LIST'][0], RemovePath)) self.assertEqual(modifications['PATH_LIST'][0].value, '/path/third') self.assertTrue(isinstance(modifications['PATH_LIST'][1], AppendPath)) self.assertEqual(modifications['PATH_LIST'][1].value, '/path/fourth') self.assertTrue(isinstance(modifications['PATH_LIST'][2], PrependPath)) self.assertEqual(modifications['PATH_LIST'][2].value, '/path/first')
def link_tau_arch_dirs(self): for subdir in os.listdir(self.prefix): for d in ('bin', 'lib'): src = join_path(self.prefix, subdir, d) dest = join_path(self.prefix, d) if os.path.isdir(src) and not os.path.exists(dest): os.symlink(join_path(subdir, d), dest)
def location(parser, args): if args.module_dir: print spack.module_path elif args.spack_root: print spack.prefix else: specs = spack.cmd.parse_specs(args.spec, concretize=True) if not specs: tty.die("You must supply a spec.") if len(specs) != 1: tty.die("Too many specs. Need only one.") spec = specs[0] if args.install_dir: print spec.prefix elif args.package_dir: print join_path(spack.db.root, spec.name) else: pkg = spack.db.get(spec) if args.stage_dir: print pkg.stage.path else: # args.build_dir is the default. if not os.listdir(pkg.stage.path): tty.die("Build directory does not exist yet. Run this to create it:", "spack stage " + " ".join(args.spec)) print pkg.stage.expanded_archive_path
def test(self): try: import tempfile with tempfile.TemporaryDirectory() as tmp_dir_name: tmp_dir_name = '/tmp' test_inp = f''' stop ''' with open(join_path(tmp_dir_name, 'test.inp'), 'w') as fp: fp.write(test_inp) test_csh = f''' source {self.prefix}/cns_solve_env cns_solve < {tmp_dir_name}/test.inp >& {tmp_dir_name}/test_output.txt ''' with open(join_path(tmp_dir_name, 'test.csh'), 'w') as fp: fp.write(test_csh) csh(join(tmp_dir_name, 'test.csh')) expected = ''' ============================================================ | | | Crystallography & NMR System (CNS) | | CNSsolve | | | ============================================================ Version: 1.2 at patch level 1 Status: General release with ARIA enhancements ============================================================ '''.split("\n") expected = [line.strip() for line in expected if len(line)] ok = True result = '' with open(f"{tmp_dir_name}/test_output.txt", 'r') as file_handle: result = file_handle.readlines() result = [line.strip() for line in result if len(line)] for line in expected: if not line in result: tty.error(f'line --{line}-- not in result') ok = False break if not ok: tty.error(f'''during testing strings {expected} not found in test output") ''') tty.error("") tty.error(f" output was") tty.error("") for line in result: tty.error(line.strip()) except Exception as e: tty.error('there was an error', e)
def _setup_files(directory): # Create a python cache and source file. cache_dir = fs.join_path(directory, '__pycache__') fs.mkdirp(cache_dir) fs.touch(fs.join_path(directory, source_file)) fs.touch(fs.join_path(directory, cache_files[0])) for filename in cache_files: # Ensure byte code files in python cache directory fs.touch(fs.join_path(cache_dir, filename))
def default_log_file(spec): """Computes the default filename for the log file and creates the corresponding directory if not present """ fmt = 'test-{x.name}-{x.version}-{hash}.xml' basename = fmt.format(x=spec, hash=spec.dag_hash()) dirname = fs.join_path(spack.var_path, 'junit-report') fs.mkdirp(dirname) return fs.join_path(dirname, basename)
def install(self, spec, prefix): sombrero_dir = join_path(prefix.bin, 'sombrero') mkdirp(sombrero_dir) # Install the shell driver install('sombrero.sh', prefix.bin) # Install all executables for i in range(1, 7): install(join_path('sombrero', 'sombrero{0}'.format(i)), sombrero_dir)
def add_single_spec(spec, mirror_root, categories, **kwargs): tty.msg("Adding package {pkg} to mirror".format(pkg=spec.format("$_$@"))) spec_exists_in_mirror = True try: with spec.package.stage: # fetcher = stage.fetcher # fetcher.fetch() # ... # fetcher.archive(archive_path) for ii, stage in enumerate(spec.package.stage): fetcher = stage.fetcher if ii == 0: # create a subdirectory for the current package@version archive_path = os.path.abspath( join_path(mirror_root, mirror_archive_path(spec, fetcher))) name = spec.cformat("$_$@") else: resource = stage.resource archive_path = os.path.abspath( join_path( mirror_root, mirror_archive_path(spec, fetcher, resource.name))) name = "{resource} ({pkg}).".format( resource=resource.name, pkg=spec.cformat("$_$@")) subdir = os.path.dirname(archive_path) mkdirp(subdir) if os.path.exists(archive_path): tty.msg("{name} : already added".format(name=name)) else: spec_exists_in_mirror = False fetcher.fetch() if not kwargs.get('no_checksum', False): fetcher.check() tty.msg("{name} : checksum passed".format(name=name)) # Fetchers have to know how to archive their files. Use # that to move/copy/create an archive in the mirror. fetcher.archive(archive_path) tty.msg("{name} : added".format(name=name)) if spec_exists_in_mirror: categories['present'].append(spec) else: categories['mirrored'].append(spec) except Exception as e: if spack.debug: sys.excepthook(*sys.exc_info()) else: tty.warn("Error while fetching %s" % spec.cformat('$_$@'), e.message) categories['error'].append(spec)
def all_package_names(self): """Generator function for all packages. This looks for ``<pkg_name>/package.py`` files within the root direcotry""" all_package_names = [] for pkg_name in os.listdir(self.root): pkg_dir = join_path(self.root, pkg_name) pkg_file = join_path(pkg_dir, _package_file_name) if os.path.isfile(pkg_file): all_package_names.append(pkg_name) all_package_names.sort() return all_package_names
def location(parser, args): if args.module_dir: print spack.module_path elif args.spack_root: print spack.prefix elif args.packages: print spack.db.root else: specs = spack.cmd.parse_specs(args.spec) if not specs: tty.die("You must supply a spec.") if len(specs) != 1: tty.die("Too many specs. Supply only one.") spec = specs[0] if args.install_dir: # install_dir command matches against installed specs. matching_specs = spack.db.get_installed(spec) if not matching_specs: tty.die("Spec '%s' matches no installed packages." % spec) elif len(matching_specs) > 1: args = [ "%s matches multiple packages." % spec, "Matching packages:" ] args += [" " + str(s) for s in matching_specs] args += ["Use a more specific spec."] tty.die(*args) print matching_specs[0].prefix elif args.package_dir: # This one just needs the spec name. print join_path(spack.db.root, spec.name) else: # These versions need concretized specs. spec.concretize() pkg = spack.db.get(spec) if args.stage_dir: print pkg.stage.path else: # args.build_dir is the default. if not pkg.stage.source_path: tty.die( "Build directory does not exist yet. Run this to create it:", "spack stage " + " ".join(args.spec)) print pkg.stage.source_path
def setup_dependent_package(self, module, dependent_spec): """ Called before lua modules's install() methods. In most cases, extensions will only need to have two lines:: luarocks('--tree=' + prefix, 'install', rock_spec_path) """ # Lua extension builds can have lua and luarocks executable functions module.lua = Executable(join_path(self.spec.prefix.bin, 'lua')) module.luarocks = Executable( join_path(self.spec.prefix.bin, 'luarocks'))
def install(self, spec, prefix): # edit cns_solve_environment to allow a build shutil.copy('cns_solve_env', 'cns_solve_env.back') filter_file(r"setenv CNS_SOLVE '_CNSsolve_location_'", f"setenv CNS_SOLVE '{self.stage.source_path}'", 'cns_solve_env') # copy over an almost right machine make file we could have got it from v1.3 but this is simpler src_file = 'instlib/machine/supported/intel-x86_64bit-linux/Makefile.header.2.gfortran' dest_file = 'instlib/machine/supported/mac-intel-darwin/Makefile.header.5.gfortran' shutil.move(src_file, dest_file) if not self.spec.satisfies('%fortran@:10.0.0'): # patch the machine make file, can't be done with a patch statement it doesn't exists till we copy it # tried just copying the file from the package directory but it caused a lockup patch = which('patch') patch_file = join_path( package_root, 'nmrpack/packages/cns', 'gfortran_10_allow_argument_mismatch.patch') patch('-p1', '-i', patch_file) if '+aria' in self.spec: from_path = pathlib.Path('aria2.3/cns/src') to_path = 'source' for target_file in from_path.iterdir(): if target_file.is_file() and target_file.suffix in ('.f', '.inc'): print(f'copying {target_file} to {to_path}') shutil.copy(target_file, to_path) if target_file.is_dir(): print(f'copying {target_file} to {to_path}') shutil.copytree(target_file, join_path(to_path, target_file.name)) shutil.copytree(from_path, 'aria2.3_patches_applied') shutil.rmtree('aria2.3') make('install') install_tree('.', prefix) with working_dir(prefix): shutil.move('cns_solve_env.back', 'cns_solve_env') replacement_env = f" setenv CNS_SOLVE '{prefix}'" filter_file(r"setenv CNS_SOLVE '_CNSsolve_location_'", replacement_env, 'cns_solve_env') # remove a leftover from our previous edits os.remove(pathlib.Path(prefix) / pathlib.Path('cns_solve_env' + '~'))
def check_expand_archive(stage, stage_name, mock_archive): stage_path = get_stage_path(stage, stage_name) archive_name = 'test-files.tar.gz' archive_dir = 'test-files' assert archive_name in os.listdir(stage_path) assert archive_dir in os.listdir(stage_path) assert join_path(stage_path, archive_dir) == stage.source_path readme = join_path(stage_path, archive_dir, 'README.txt') assert os.path.isfile(readme) with open(readme) as file: 'hello world!\n' == file.read()
def check_mirror(): with Stage('spack-mirror-test') as stage: mirror_root = join_path(stage.path, 'test-mirror') # register mirror with spack config mirrors = {'spack-mirror-test': 'file://' + mirror_root} spack.config.update_config('mirrors', mirrors) os.chdir(stage.path) spack.mirror.create( mirror_root, repos, no_checksum=True ) # Stage directory exists assert os.path.isdir(mirror_root) # check that there are subdirs for each package for name in repos: subdir = join_path(mirror_root, name) assert os.path.isdir(subdir) files = os.listdir(subdir) assert len(files) == 1 # Now try to fetch each package. for name, mock_repo in repos.items(): spec = Spec(name).concretized() pkg = spec.package saved_checksum_setting = spack.do_checksum with pkg.stage: # Stage the archive from the mirror and cd to it. spack.do_checksum = False pkg.do_stage(mirror_only=True) # Compare the original repo with the expanded archive original_path = mock_repo.path if 'svn' in name: # have to check out the svn repo to compare. original_path = join_path( mock_repo.path, 'checked_out') svn = which('svn', required=True) svn('checkout', mock_repo.url, original_path) dcmp = filecmp.dircmp(original_path, pkg.stage.source_path) # make sure there are no new files in the expanded # tarball assert not dcmp.right_only # and that all original files are present. assert all(l in exclude for l in dcmp.left_only) spack.do_checksum = saved_checksum_setting
def check_mirror(): with Stage('spack-mirror-test') as stage: mirror_root = join_path(stage.path, 'test-mirror') # register mirror with spack config mirrors = {'spack-mirror-test': 'file://' + mirror_root} spack.config.update_config('mirrors', mirrors) os.chdir(stage.path) spack.mirror.create(mirror_root, repos, no_checksum=True) # Stage directory exists assert os.path.isdir(mirror_root) # check that there are subdirs for each package for name in repos: subdir = join_path(mirror_root, name) assert os.path.isdir(subdir) files = os.listdir(subdir) assert len(files) == 1 # Now try to fetch each package. for name, mock_repo in repos.items(): spec = Spec(name).concretized() pkg = spec.package saved_checksum_setting = spack.do_checksum with pkg.stage: # Stage the archive from the mirror and cd to it. spack.do_checksum = False pkg.do_stage(mirror_only=True) # Compare the original repo with the expanded archive original_path = mock_repo.path if 'svn' in name: # have to check out the svn repo to compare. original_path = join_path(mock_repo.path, 'checked_out') svn = which('svn', required=True) svn('checkout', mock_repo.url, original_path) dcmp = filecmp.dircmp(original_path, pkg.stage.source_path) # make sure there are no new files in the expanded # tarball assert not dcmp.right_only # and that all original files are present. assert all(l in exclude for l in dcmp.left_only) spack.do_checksum = saved_checksum_setting
def __init__(self, root, db_dir=None): """Create a Database for Spack installations under ``root``. A Database is a cache of Specs data from ``$prefix/spec.yaml`` files in Spack installation directories. By default, Database files (data and lock files) are stored under ``root/.spack-db``, which is created if it does not exist. This is the ``db_dir``. The Database will attempt to read an ``index.json`` file in ``db_dir``. If it does not find one, it will fall back to read an ``index.yaml`` if one is present. If that does not exist, it will create a database when needed by scanning the entire Database root for ``spec.yaml`` files according to Spack's ``DirectoryLayout``. Caller may optionally provide a custom ``db_dir`` parameter where data will be stored. This is intended to be used for testing the Database class. """ self.root = root if db_dir is None: # If the db_dir is not provided, default to within the db root. self._db_dir = join_path(self.root, _db_dirname) else: # Allow customizing the database directory location for testing. self._db_dir = db_dir # Set up layout of database files within the db dir self._old_yaml_index_path = join_path(self._db_dir, 'index.yaml') self._index_path = join_path(self._db_dir, 'index.json') self._lock_path = join_path(self._db_dir, 'lock') # This is for other classes to use to lock prefix directories. self.prefix_lock_path = join_path(self._db_dir, 'prefix_lock') # Create needed directories and files if not os.path.exists(self._db_dir): mkdirp(self._db_dir) # initialize rest of state. self.lock = Lock(self._lock_path) self._data = {} # whether there was an error at the start of a read transaction self._error = None
def test_source_files(self): datadir = join_path(spack_root, 'lib', 'spack', 'spack', 'test', 'data') files = [ join_path(datadir, 'sourceme_first.sh'), join_path(datadir, 'sourceme_second.sh'), join_path(datadir, 'sourceme_parameters.sh intel64') ] env = EnvironmentModifications.from_sourcing_files(*files) modifications = env.group_by_name() # This is sensitive to the user's environment; can include # spurious entries for things like PS1 # # TODO: figure out how to make a bit more robust. self.assertTrue(len(modifications) >= 4) # Set new variables self.assertEqual(len(modifications['NEW_VAR']), 1) self.assertTrue(isinstance(modifications['NEW_VAR'][0], SetEnv)) self.assertEqual(modifications['NEW_VAR'][0].value, 'new') self.assertEqual(len(modifications['FOO']), 1) self.assertTrue(isinstance(modifications['FOO'][0], SetEnv)) self.assertEqual(modifications['FOO'][0].value, 'intel64') # Unset variables self.assertEqual(len(modifications['EMPTY_PATH_LIST']), 1) self.assertTrue(isinstance( modifications['EMPTY_PATH_LIST'][0], UnsetEnv)) # Modified variables self.assertEqual(len(modifications['UNSET_ME']), 1) self.assertTrue(isinstance(modifications['UNSET_ME'][0], SetEnv)) self.assertEqual(modifications['UNSET_ME'][0].value, 'overridden') self.assertEqual(len(modifications['PATH_LIST']), 3) self.assertTrue( isinstance(modifications['PATH_LIST'][0], RemovePath) ) self.assertEqual(modifications['PATH_LIST'][0].value, '/path/third') self.assertTrue( isinstance(modifications['PATH_LIST'][1], AppendPath) ) self.assertEqual(modifications['PATH_LIST'][1].value, '/path/fourth') self.assertTrue( isinstance(modifications['PATH_LIST'][2], PrependPath) ) self.assertEqual(modifications['PATH_LIST'][2].value, '/path/first')
def __init__(self, pkg, path_or_url, level): super(FilePatch, self).__init__(pkg, path_or_url, level) pkg_dir = spack.repo.dirname_for_package_name(pkg.name) self.path = join_path(pkg_dir, path_or_url) if not os.path.isfile(self.path): raise NoSuchPatchFileError(pkg.name, self.path)
def paths(self): if self._paths is None: self._paths = {} def add_path(path_name, directory): path = self._paths.setdefault(path_name, []) path.append(directory) # Add paths if they exist. for var, directory in [ ('PATH', self.spec.prefix.bin), ('MANPATH', self.spec.prefix.man), ('MANPATH', self.spec.prefix.share_man), ('LD_LIBRARY_PATH', self.spec.prefix.lib), ('LD_LIBRARY_PATH', self.spec.prefix.lib64)]: if os.path.isdir(directory): add_path(var, directory) # Add python path unless it's an actual python installation # TODO: is there a better way to do this? if self.spec.name != 'python': site_packages = glob(join_path(self.spec.prefix.lib, "python*/site-packages")) if site_packages: add_path('PYTHONPATH', site_packages[0]) # short description is just the package + version # TODO: maybe packages can optionally provide it. self.short_description = self.spec.format("$_ $@") # long description is the docstring with reduced whitespace. if self.spec.package.__doc__: self.long_description = re.sub(r'\s+', ' ', self.spec.package.__doc__) return self._paths
def resource(pkg, **kwargs): """ Define an external resource to be fetched and staged when building the package. Based on the keywords present in the dictionary the appropriate FetchStrategy will be used for the resource. Resources are fetched and staged in their own folder inside spack stage area, and then linked into the stage area of the package that needs them. List of recognized keywords: * 'when' : (optional) represents the condition upon which the resource is needed * 'destination' : (optional) path where to link the resource. This path must be relative to the main package stage area. * 'placement' : (optional) gives the possibility to fine tune how the resource is linked into the main package stage area. """ when = kwargs.get('when', pkg.name) destination = kwargs.get('destination', "") placement = kwargs.get('placement', None) # Check if the path is relative if os.path.isabs(destination): message = "The destination keyword of a resource directive can't be an absolute path.\n" message += "\tdestination : '{dest}\n'".format(dest=destination) raise RuntimeError(message) # Check if the path falls within the main package stage area test_path = 'stage_folder_root/' normalized_destination = os.path.normpath(join_path(test_path, destination)) # Normalized absolute path if test_path not in normalized_destination: message = "The destination folder of a resource must fall within the main package stage directory.\n" message += "\tdestination : '{dest}'\n".format(dest=destination) raise RuntimeError(message) when_spec = parse_anonymous_spec(when, pkg.name) resources = pkg.resources.setdefault(when_spec, []) name = kwargs.get('name') fetcher = from_kwargs(**kwargs) resources.append(Resource(name, fetcher, destination, placement))
def test_nonexisting_package_filename(self): filename = spack.db.filename_for_package_name( 'some-nonexisting-package') self.assertEqual( filename, join_path(spack.mock_packages_path, 'some-nonexisting-package', 'package.py'))
def global_license_file(self): """Returns the path where a global license file should be stored. All Intel software shares the same license, so we store it in a common 'intel' directory.""" return join_path(self.global_license_dir, 'intel', os.path.basename(self.license_files[0]))
def find_compilers(*path): """Return a list of compilers found in the suppied paths. This invokes the find() method for each Compiler class, and appends the compilers detected to a list. """ # Make sure path elements exist, and include /bin directories # under prefixes. filtered_path = [] for p in path: # Eliminate symlinks and just take the real directories. p = os.path.realpath(p) if not os.path.isdir(p): continue filtered_path.append(p) # Check for a bin directory, add it if it exists bin = join_path(p, 'bin') if os.path.isdir(bin): filtered_path.append(os.path.realpath(bin)) # Once the paths are cleaned up, do a search for each type of # compiler. We can spawn a bunch of parallel searches to reduce # the overhead of spelunking all these directories. types = all_compiler_types() compiler_lists = parmap(lambda cls: cls.find(*filtered_path), types) # ensure all the version calls we made are cached in the parent # process, as well. This speeds up Spack a lot. clist = reduce(lambda x, y: x + y, compiler_lists) return clist
def find_compilers(*path): """Return a list of compilers found in the suppied paths. This invokes the find() method for each Compiler class, and appends the compilers detected to a list. """ # Make sure path elements exist, and include /bin directories # under prefixes. filtered_path = [] for p in path: # Eliminate symlinks and just take the real directories. p = os.path.realpath(p) if not os.path.isdir(p): continue filtered_path.append(p) # Check for a bin directory, add it if it exists bin = join_path(p, 'bin') if os.path.isdir(bin): filtered_path.append(os.path.realpath(bin)) # Once the paths are cleaned up, do a search for each type of # compiler. We can spawn a bunch of parallel searches to reduce # the overhead of spelunking all these directories. types = all_compiler_types() compiler_lists = parmap(lambda cls: cls.find(*filtered_path), types) # ensure all the version calls we made are cached in the parent # process, as well. This speeds up Spack a lot. clist = reduce(lambda x,y: x+y, compiler_lists) return clist
def autoreconf(self, spec, prefix): """Not needed usually, configure should be already there""" # If configure exists nothing needs to be done if os.path.exists(self.configure_abs_path): return # Else try to regenerate it autotools = ['m4', 'autoconf', 'automake', 'libtool'] missing = [x for x in autotools if x not in spec] if missing: msg = 'Cannot generate configure: missing dependencies {0}' raise RuntimeError(msg.format(missing)) tty.msg('Configure script not found: trying to generate it') tty.warn('*********************************************************') tty.warn('* If the default procedure fails, consider implementing *') tty.warn('* a custom AUTORECONF phase in the package *') tty.warn('*********************************************************') with working_dir(self.configure_directory): m = inspect.getmodule(self) # This part should be redundant in principle, but # won't hurt m.libtoolize() m.aclocal() # This line is what is needed most of the time # --install, --verbose, --force autoreconf_args = ['-ivf'] if 'pkg-config' in spec: autoreconf_args += [ '-I', join_path(spec['pkg-config'].prefix, 'share', 'aclocal'), ] autoreconf_args += self.autoreconf_extra_args m.autoreconf(*autoreconf_args)
def get_origin_url(): git_dir = join_path(spack.prefix, '.git') git = which('git', required=True) origin_url = git( '--git-dir=%s' % git_dir, 'config', '--get', 'remote.origin.url', return_output=True) return origin_url.strip()
def __init__(self, spec=None): super(LmodModule, self).__init__(spec) # Sets the root directory for this architecture self.modules_root = join_path(LmodModule.path, self.spec.architecture) # Retrieve core compilers self.core_compilers = self.configuration.get('core_compilers', []) # Keep track of the requirements that this package has in terms # of virtual packages # that participate in the hierarchical structure self.requires = {'compiler': self.spec.compiler} # For each virtual dependency in the hierarchy for x in self.hierarchy_tokens: if x in self.spec and not self.spec.package.provides( x): # if I depend on it self.requires[x] = self.spec[x] # record the actual provider # Check what are the services I need (this will determine where the # module file will be written) self.substitutions = {} self.substitutions.update(self.requires) # TODO : complete substitutions # Check what service I provide to others self.provides = {} # If it is in the list of supported compilers family -> compiler if self.spec.name in spack.compilers.supported_compilers(): self.provides['compiler'] = spack.spec.CompilerSpec(str(self.spec)) # Special case for llvm if self.spec.name == 'llvm': self.provides['compiler'] = spack.spec.CompilerSpec(str(self.spec)) self.provides['compiler'].name = 'clang' for x in self.hierarchy_tokens: if self.spec.package.provides(x): self.provides[x] = self.spec[x]
def _execute(pkg): when = kwargs.get('when', pkg.name) destination = kwargs.get('destination', "") placement = kwargs.get('placement', None) # Check if the path is relative if os.path.isabs(destination): message = 'The destination keyword of a resource directive ' 'can\'t be an absolute path.\n' message += "\tdestination : '{dest}\n'".format(dest=destination) raise RuntimeError(message) # Check if the path falls within the main package stage area test_path = 'stage_folder_root' normalized_destination = os.path.normpath( join_path(test_path, destination) ) # Normalized absolute path if test_path not in normalized_destination: message = "The destination folder of a resource must fall " "within the main package stage directory.\n" message += "\tdestination : '{dest}'\n".format(dest=destination) raise RuntimeError(message) when_spec = parse_anonymous_spec(when, pkg.name) resources = pkg.resources.setdefault(when_spec, []) name = kwargs.get('name') fetcher = from_kwargs(**kwargs) resources.append(Resource(name, fetcher, destination, placement))
def edit(parser, args): name = args.name if args.edit_command: if not name: path = spack.cmd.command_path else: path = join_path(spack.cmd.command_path, name + ".py") if not os.path.exists(path): tty.die("No command named '%s'." % name) else: # By default open the directory where packages or commands live. if not name: path = spack.packages_path else: path = spack.db.filename_for_package_name(name) if os.path.exists(path): if not os.path.isfile(path): tty.die("Something's wrong. '%s' is not a file!" % path) if not os.access(path, os.R_OK|os.W_OK): tty.die("Insufficient permissions on '%s'!" % path) elif not args.force: tty.die("No package '%s'. Use spack create, or supply -f/--force " "to edit a new file." % name) else: mkdirp(os.path.dirname(path)) with closing(open(path, "w")) as pkg_file: pkg_file.write( package_template.substitute(name=name, class_name=mod_to_class(name))) # If everything checks out, go ahead and edit. spack.editor(path)
def make_path_for_spec(self, spec): _check_concrete(spec) path = self.path_for_spec(spec) spec_file_path = join_path(path, self.spec_file) if os.path.isdir(path): if not os.path.isfile(spec_file_path): raise InconsistentInstallDirectoryError( 'No spec file found at path %s' % spec_file_path) installed_spec = self.read_spec(spec_file_path) if installed_spec == self.spec: raise InstallDirectoryAlreadyExistsError(path) spec_hash = self.hash_spec(spec) installed_hash = self.hash_spec(installed_spec) if installed_spec == spec_hash: raise SpecHashCollisionError( installed_hash, spec_hash, self.prefix_size) else: raise InconsistentInstallDirectoryError( 'Spec file in %s does not match SHA-1 hash!' % spec_file_path) mkdirp(path) self.write_spec(spec, spec_file_path)
def bootstrap(parser, args): origin_url = get_origin_url() prefix = args.prefix tty.msg("Fetching spack from origin: %s" % origin_url) if os.path.exists(join_path(prefix, '.git')): tty.die("There already seems to be a git repository in %s" % prefix) files_in_the_way = os.listdir(prefix) if files_in_the_way: tty.die("There are already files there! Delete these files before boostrapping spack.", *files_in_the_way) tty.msg("Installing:", "%s/bin/spack" % prefix, "%s/lib/spack/..." % prefix) os.chdir(prefix) check_call(['git', 'init', '--shared', '-q']) check_call(['git', 'remote', 'add', 'origin', origin_url]) check_call(['git', 'fetch', 'origin', 'master:refs/remotes/origin/master', '-n', '-q']) check_call(['git', 'reset', '--hard', 'origin/master', '-q']) tty.msg("Successfully created a new spack in %s" % prefix, "Run %s/bin/spack to use this installation." % prefix)
def _execute(pkg): when = kwargs.get('when', pkg.name) destination = kwargs.get('destination', "") placement = kwargs.get('placement', None) # Check if the path is relative if os.path.isabs(destination): message = 'The destination keyword of a resource directive ' 'can\'t be an absolute path.\n' message += "\tdestination : '{dest}\n'".format(dest=destination) raise RuntimeError(message) # Check if the path falls within the main package stage area test_path = 'stage_folder_root' normalized_destination = os.path.normpath( join_path(test_path, destination)) # Normalized absolute path if test_path not in normalized_destination: message = "The destination folder of a resource must fall " "within the main package stage directory.\n" message += "\tdestination : '{dest}'\n".format(dest=destination) raise RuntimeError(message) when_spec = parse_anonymous_spec(when, pkg.name) resources = pkg.resources.setdefault(when_spec, []) name = kwargs.get('name') fetcher = from_kwargs(**kwargs) resources.append(Resource(name, fetcher, destination, placement))
def clone(parser, args): origin_url, branch = get_origin_info(args.remote) prefix = args.prefix tty.msg("Fetching spack from '%s': %s" % (args.remote, origin_url)) if os.path.isfile(prefix): tty.die("There is already a file at %s" % prefix) mkdirp(prefix) if os.path.exists(join_path(prefix, '.git')): tty.die("There already seems to be a git repository in %s" % prefix) files_in_the_way = os.listdir(prefix) if files_in_the_way: tty.die( "There are already files there! " "Delete these files before boostrapping spack.", *files_in_the_way) tty.msg("Installing:", "%s/bin/spack" % prefix, "%s/lib/spack/..." % prefix) os.chdir(prefix) git = which('git', required=True) git('init', '--shared', '-q') git('remote', 'add', 'origin', origin_url) git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch), '-n', '-q') git('reset', '--hard', 'origin/%s' % branch, '-q') git('checkout', '-B', branch, 'origin/%s' % branch, '-q') tty.msg("Successfully created a new spack in %s" % prefix, "Run %s/bin/spack to use this installation." % prefix)
def configure(self, spec, prefix): """Configure the package.""" configure = self.configure_file() args = self.configure_args() python_include_dir = os.path.basename( inspect.getmodule(self).python_include_dir) args.extend([ '--verbose', '--confirm-license', '--qmake', spec['qt'].prefix.bin.qmake, '--sip', spec['py-sip'].prefix.bin.sip, '--sip-incdir', join_path(spec['py-sip'].prefix.include, python_include_dir), '--bindir', prefix.bin, '--destdir', inspect.getmodule(self).site_packages_dir, ]) self.python(configure, *args)
def bootstrap(parser, args): origin_url, branch = get_origin_info(args.remote) prefix = args.prefix tty.msg("Fetching spack from '%s': %s" % (args.remote, origin_url)) if os.path.isfile(prefix): tty.die("There is already a file at %s" % prefix) mkdirp(prefix) if os.path.exists(join_path(prefix, '.git')): tty.die("There already seems to be a git repository in %s" % prefix) files_in_the_way = os.listdir(prefix) if files_in_the_way: tty.die("There are already files there! " "Delete these files before boostrapping spack.", *files_in_the_way) tty.msg("Installing:", "%s/bin/spack" % prefix, "%s/lib/spack/..." % prefix) os.chdir(prefix) git = which('git', required=True) git('init', '--shared', '-q') git('remote', 'add', 'origin', origin_url) git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch), '-n', '-q') git('reset', '--hard', 'origin/%s' % branch, '-q') git('checkout', '-B', branch, 'origin/%s' % branch, '-q') tty.msg("Successfully created a new spack in %s" % prefix, "Run %s/bin/spack to use this installation." % prefix)
class TclModule(EnvModule): name = 'tcl' path = join_path(spack.share_path, "modules") @property def file_name(self): return join_path(TclModule.path, self.spec.architecture, self.use_name) @property def use_name(self): return self.spec.format('$_$@$%@$+$#') def _write(self, m_file): # TODO: cateogry? m_file.write('#%Module1.0\n') # Short description if self.short_description: m_file.write('module-whatis \"%s\"\n\n' % self.short_description) # Long description if self.long_description: m_file.write('proc ModulesHelp { } {\n') doc = re.sub(r'"', '\"', self.long_description) m_file.write("puts stderr \"%s\"\n" % doc) m_file.write('}\n\n') # Path alterations for var, dirs in self.paths.items(): for directory in dirs: m_file.write("prepend-path %s \"%s\"\n" % (var, directory)) m_file.write("prepend-path CMAKE_PREFIX_PATH \"%s\"\n" % self.spec.prefix)
class Dotkit(EnvModule): name = 'dotkit' path = join_path(spack.share_path, "dotkit") @property def file_name(self): return join_path(Dotkit.path, self.spec.architecture, self.spec.format('$_$@$%@$+$#.dk')) @property def use_name(self): return self.spec.format('$_$@$%@$+$#') def _write(self, dk_file): # Category if self.category: dk_file.write('#c %s\n' % self.category) # Short description if self.short_description: dk_file.write('#d %s\n' % self.short_description) # Long description if self.long_description: for line in textwrap.wrap(self.long_description, 72): dk_file.write("#h %s\n" % line) # Path alterations for var, dirs in self.paths.items(): for directory in dirs: dk_file.write("dk_alter %s %s\n" % (var, directory)) # Let CMake find this package. dk_file.write("dk_alter CMAKE_PREFIX_PATH %s\n" % self.spec.prefix)
def all_specs(self): if not os.path.isdir(self.root): return [] pattern = join_path( self.root, '*', '*', '*', self.metadata_dir, self.spec_file_name) spec_files = glob.glob(pattern) return [self.read_spec(s) for s in spec_files]
def location(parser, args): if args.module_dir: print spack.module_path elif args.spack_root: print spack.prefix elif args.packages: print spack.db.root elif args.stages: print spack.stage_path else: specs = spack.cmd.parse_specs(args.spec) if not specs: tty.die("You must supply a spec.") if len(specs) != 1: tty.die("Too many specs. Supply only one.") if args.install_dir: # install_dir command matches against installed specs. spec = spack.cmd.disambiguate_spec(specs[0]) print spec.prefix else: spec = specs[0] if args.package_dir: # This one just needs the spec name. print join_path(spack.db.root, spec.name) else: # These versions need concretized specs. spec.concretize() pkg = spack.db.get(spec) if args.stage_dir: print pkg.stage.path else: # args.build_dir is the default. if not pkg.stage.source_path: tty.die("Build directory does not exist yet. Run this to create it:", "spack stage " + " ".join(args.spec)) print pkg.stage.source_path
def symlink_license(pkg): """Create local symlinks that point to the global license file.""" target = pkg.global_license_file for filename in pkg.license_files: link_name = join_path(pkg.prefix, filename) if os.path.exists(target): os.symlink(target, link_name) tty.msg("Added local symlink %s to global license file" % link_name)
def test_package_filename(builtin_mock): repo = Repo(spack.mock_packages_path) filename = repo.filename_for_package_name('mpich') assert filename == join_path( spack.mock_packages_path, 'packages', 'mpich', 'package.py' )
def test_nonexisting_package_filename(): repo = Repo(spack.mock_packages_path) filename = repo.filename_for_package_name('some-nonexisting-package') assert filename == join_path( spack.mock_packages_path, 'packages', 'some-nonexisting-package', 'package.py' )
def all_hook_modules(): modules = [] for name in list_modules(spack.hooks_path): mod_name = __name__ + '.' + name path = join_path(spack.hooks_path, name) + ".py" mod = imp.load_source(mod_name, path) modules.append(mod) return modules
def setup_platform_environment(cls, pkg, env): """ Change the linker to default dynamic to be more similar to linux/standard linker behavior """ env.set('CRAYPE_LINK_TYPE', 'dynamic') cray_wrapper_names = join_path(build_env_path, 'cray') if os.path.isdir(cray_wrapper_names): env.prepend_path('PATH', cray_wrapper_names) env.prepend_path('SPACK_ENV_PATH', cray_wrapper_names)
def all_specs(self): if not os.path.isdir(self.root): return [] path_elems = ["*"] * len(self.path_scheme.split(os.sep)) path_elems += [self.metadata_dir, self.spec_file_name] pattern = join_path(self.root, *path_elems) spec_files = glob.glob(pattern) return [self.read_spec(s) for s in spec_files]
def setup_environment(self, spack_env, run_env): pattern = join_path(self.prefix.lib, 'Makefile.*') files = glob.glob(pattern) # This function is called both at install time to set up # the build environment and after install to generate the associated # module file. In the former case there is no `self.prefix.lib` # directory to inspect. The conditional below will set `TAU_MAKEFILE` # in the latter case. if files: run_env.set('TAU_MAKEFILE', files[0])