def test_resolve_paths(self): """Test _resolve_paths.""" assert spack.stage._resolve_paths([]) == [] # resolved path without user appends user paths = [os.path.join(os.path.sep, 'a', 'b', 'c')] user = getpass.getuser() can_paths = [os.path.join(paths[0], user)] assert spack.stage._resolve_paths(paths) == can_paths # resolved path with node including user does not append user paths = [os.path.join(os.path.sep, 'spack-{0}'.format(user), 'stage')] assert spack.stage._resolve_paths(paths) == paths tempdir = '$tempdir' can_tempdir = canonicalize_path(tempdir) user = getpass.getuser() temp_has_user = user in can_tempdir.split(os.sep) paths = [os.path.join(tempdir, 'stage'), os.path.join(tempdir, '$user'), os.path.join(tempdir, '$user', '$user'), os.path.join(tempdir, '$user', 'stage', '$user')] res_paths = [canonicalize_path(p) for p in paths] if temp_has_user: res_paths[1] = can_tempdir res_paths[2] = os.path.join(can_tempdir, user) res_paths[3] = os.path.join(can_tempdir, 'stage', user) else: res_paths[0] = os.path.join(res_paths[0], user) assert spack.stage._resolve_paths(paths) == res_paths
def test_substitute_tempdir(mock_low_high_config): tempdir = tempfile.gettempdir() assert tempdir == spack_path.canonicalize_path('$tempdir') assert tempdir + os.sep + \ os.path.join('foo', 'bar', 'baz') == spack_path.canonicalize_path( os.path.join('$tempdir', 'foo', 'bar', 'baz') )
def _resolve_paths(candidates): """ Resolve candidate paths and make user-related adjustments. Adjustments involve removing extra $user from $tempdir if $tempdir includes $user and appending $user if it is not present in the path. """ temp_path = sup.canonicalize_path('$tempdir') user = getpass.getuser() tmp_has_usr = user in temp_path.split(os.path.sep) paths = [] for path in candidates: # Remove the extra `$user` node from a `$tempdir/$user` entry for # hosts that automatically append `$user` to `$tempdir`. if path.startswith(os.path.join('$tempdir', '$user')) and tmp_has_usr: path = path.replace("/$user", "", 1) # Ensure the path is unique per user. can_path = sup.canonicalize_path(path) if user not in can_path: can_path = os.path.join(can_path, user) paths.append(can_path) return paths
def check_canonical(self, var, expected): """Ensure that <expected> is substituted properly for <var> in strings containing <var> in various positions.""" path = '/foo/bar/baz' self.assertEqual(canonicalize_path(var + path), expected + path) self.assertEqual(canonicalize_path(path + var), path + '/' + expected) self.assertEqual(canonicalize_path(path + var + path), expected + path)
def test_resolve_paths(self): """Test _resolve_paths.""" assert spack.stage._resolve_paths([]) == [] paths = [os.path.join(os.path.sep, 'a', 'b', 'c')] assert spack.stage._resolve_paths(paths) == paths tmp = '$tempdir' paths = [os.path.join(tmp, 'stage'), os.path.join(tmp, '$user')] can_paths = [canonicalize_path(paths[0]), canonicalize_path(tmp)] user = getpass.getuser() if user not in can_paths[1].split(os.path.sep): can_paths[1] = os.path.join(can_paths[1], user) assert spack.stage._resolve_paths(paths) == can_paths
def _resolve_paths(candidates): """Resolve paths, removing extra $user from $tempdir if needed.""" temp_path = sup.canonicalize_path('$tempdir') tmp_has_usr = getpass.getuser() in temp_path.split(os.path.sep) paths = [] for path in candidates: # First remove the extra `$user` node from a `$tempdir/$user` entry # for hosts that automatically append `$user` to `$tempdir`. if path.startswith(os.path.join('$tempdir', '$user')) and tmp_has_usr: path = os.path.join('$tempdir', path[15:]) paths.append(sup.canonicalize_path(path)) return paths
def spec_externals(spec): """Return a list of external specs (w/external directory path filled in), one for each known external installation.""" # break circular import. from spack.util.module_cmd import path_from_modules # NOQA: ignore=F401 allpkgs = spack.config.get('packages') names = set([spec.name]) names |= set(vspec.name for vspec in spec.package.virtuals_provided) external_specs = [] for name in names: pkg_config = allpkgs.get(name, {}) pkg_externals = pkg_config.get('externals', []) for entry in pkg_externals: spec_str = entry['spec'] external_path = entry.get('prefix', None) if external_path: external_path = canonicalize_path(external_path) external_modules = entry.get('modules', None) external_spec = spack.spec.Spec.from_detection( spack.spec.Spec(spec_str, external_path=external_path, external_modules=external_modules), extra_attributes=entry.get('extra_attributes', {})) if external_spec.satisfies(spec): external_specs.append(external_spec) # Defensively copy returned specs return [s.copy() for s in external_specs]
def spec_externals(spec): """Return a list of external specs (w/external directory path filled in), one for each known external installation.""" # break circular import. from spack.util.module_cmd import get_path_from_module # NOQA: ignore=F401 allpkgs = spack.config.get('packages') names = set([spec.name]) names |= set(vspec.name for vspec in spec.package.virtuals_provided) external_specs = [] for name in names: pkg_config = allpkgs.get(name, {}) pkg_paths = pkg_config.get('paths', {}) pkg_modules = pkg_config.get('modules', {}) if (not pkg_paths) and (not pkg_modules): continue for external_spec, path in pkg_paths.items(): external_spec = spack.spec.Spec( external_spec, external_path=canonicalize_path(path)) if external_spec.satisfies(spec): external_specs.append(external_spec) for external_spec, module in pkg_modules.items(): external_spec = spack.spec.Spec(external_spec, external_module=module) if external_spec.satisfies(spec): external_specs.append(external_spec) # defensively copy returned specs return [s.copy() for s in external_specs]
def spec_externals(spec): """Return a list of external specs (w/external directory path filled in), one for each known external installation.""" # break circular import. from spack.util.module_cmd import get_path_from_module # NOQA: ignore=F401 allpkgs = get_packages_config() name = spec.name external_specs = [] pkg_paths = allpkgs.get(name, {}).get('paths', None) pkg_modules = allpkgs.get(name, {}).get('modules', None) if (not pkg_paths) and (not pkg_modules): return [] for external_spec, path in iteritems(pkg_paths): if not path: # skip entries without paths (avoid creating extra Specs) continue external_spec = spack.spec.Spec(external_spec, external_path=canonicalize_path(path)) if external_spec.satisfies(spec): external_specs.append(external_spec) for external_spec, module in iteritems(pkg_modules): if not module: continue external_spec = spack.spec.Spec(external_spec, external_module=module) if external_spec.satisfies(spec): external_specs.append(external_spec) # defensively copy returned specs return [s.copy() for s in external_specs]
def _first_accessible_path(paths): """Find the first path that is accessible, creating it if necessary.""" for path in paths: try: # Ensure the user has access, creating the directory if necessary. path = sup.canonicalize_path(path) if os.path.exists(path): if can_access(path): return path else: # The path doesn't exist so create it and adjust permissions # and group as needed (e.g., shared ``$tempdir``). prefix = os.path.sep parts = path.strip(os.path.sep).split(os.path.sep) for part in parts: prefix = os.path.join(prefix, part) if not os.path.exists(prefix): break parent = os.path.dirname(prefix) gid = os.stat(parent).st_gid mkdirp(path, group=gid, default_perms='parents') if can_access(path): return path except OSError as e: tty.debug('OSError while checking stage path %s: %s' % ( path, str(e))) return None
def test_substitute_user(mock_low_high_config): user = getpass.getuser() assert os.sep + os.path.join('foo', 'bar') + os.sep \ + user + os.sep \ + 'baz' == spack_path.canonicalize_path( os.sep + os.path.join('foo', 'bar', '$user', 'baz') )
def get_tmp_root(): global _tmp_root, _use_tmp_stage if not _use_tmp_stage: return None if _tmp_root is None: candidates = spack.config.get('config:build_stage') if isinstance(candidates, string_types): candidates = [candidates] path = _first_accessible_path(candidates) if not path: raise StageError("No accessible stage paths in %s", candidates) # Return None to indicate we're using a local staging area. if path == canonicalize_path(spack.paths.stage_path): _use_tmp_stage = False return None # ensure that any temp path is unique per user, so users don't # fight over shared temporary space. user = getpass.getuser() if user not in path: path = os.path.join(path, user, 'spack-stage') else: path = os.path.join(path, 'spack-stage') mkdirp(path) _tmp_root = path return _tmp_root
def make_environment(dirs=None): """Returns an configured environment for template rendering.""" if dirs is None: # Default directories where to search for templates builtins = spack.config.get('config:template_dirs', ['$spack/share/spack/templates']) extensions = spack.extensions.get_template_dirs() dirs = [ canonicalize_path(d) for d in itertools.chain(builtins, extensions) ] # avoid importing this at the top level as it's used infrequently and # slows down startup a bit. import jinja2 # Loader for the templates loader = jinja2.FileSystemLoader(dirs) # Environment of the template engine env = jinja2.Environment(loader=loader, trim_blocks=True, lstrip_blocks=True) # Custom filters _set_filters(env) return env
def get_tmp_root(): global _tmp_root, _use_tmp_stage if not _use_tmp_stage: return None if _tmp_root is None: config = spack.config.get_config('config') candidates = config['build_stage'] if isinstance(candidates, string_types): candidates = [candidates] path = _first_accessible_path(candidates) if not path: raise StageError("No accessible stage paths in %s", candidates) # Return None to indicate we're using a local staging area. if path == canonicalize_path(spack.stage_path): _use_tmp_stage = False return None # ensure that any temp path is unique per user, so users don't # fight over shared temporary space. user = getpass.getuser() if user not in path: path = os.path.join(path, user, 'spack-stage') else: path = os.path.join(path, 'spack-stage') mkdirp(path) _tmp_root = path return _tmp_root
def test_substitute_config_variables(mock_config): prefix = spack.paths.prefix.lstrip('/') assert os.path.join( '/foo/bar/baz', prefix ) == canonicalize_path('/foo/bar/baz/$spack') assert os.path.join( spack.paths.prefix, 'foo/bar/baz' ) == canonicalize_path('$spack/foo/bar/baz/') assert os.path.join( '/foo/bar/baz', prefix, 'foo/bar/baz' ) == canonicalize_path('/foo/bar/baz/$spack/foo/bar/baz/') assert os.path.join( '/foo/bar/baz', prefix ) == canonicalize_path('/foo/bar/baz/${spack}') assert os.path.join( spack.paths.prefix, 'foo/bar/baz' ) == canonicalize_path('${spack}/foo/bar/baz/') assert os.path.join( '/foo/bar/baz', prefix, 'foo/bar/baz' ) == canonicalize_path('/foo/bar/baz/${spack}/foo/bar/baz/') assert os.path.join( '/foo/bar/baz', prefix, 'foo/bar/baz' ) != canonicalize_path('/foo/bar/baz/${spack/foo/bar/baz/')
def test_substitute_config_variables(self): prefix = spack.prefix.lstrip('/') self.assertEqual(os.path.join('/foo/bar/baz', prefix), canonicalize_path('/foo/bar/baz/$spack')) self.assertEqual(os.path.join(spack.prefix, 'foo/bar/baz'), canonicalize_path('$spack/foo/bar/baz/')) self.assertEqual(os.path.join('/foo/bar/baz', prefix, 'foo/bar/baz'), canonicalize_path('/foo/bar/baz/$spack/foo/bar/baz/')) self.assertEqual(os.path.join('/foo/bar/baz', prefix), canonicalize_path('/foo/bar/baz/${spack}')) self.assertEqual(os.path.join(spack.prefix, 'foo/bar/baz'), canonicalize_path('${spack}/foo/bar/baz/')) self.assertEqual( os.path.join('/foo/bar/baz', prefix, 'foo/bar/baz'), canonicalize_path('/foo/bar/baz/${spack}/foo/bar/baz/')) self.assertNotEqual( os.path.join('/foo/bar/baz', prefix, 'foo/bar/baz'), canonicalize_path('/foo/bar/baz/${spack/foo/bar/baz/'))
def test_substitute_config_variables(mock_low_high_config): prefix = spack.paths.prefix.lstrip('/') assert os.path.join('/foo/bar/baz', prefix) == canonicalize_path('/foo/bar/baz/$spack') assert os.path.join( spack.paths.prefix, 'foo/bar/baz') == canonicalize_path('$spack/foo/bar/baz/') assert os.path.join( '/foo/bar/baz', prefix, 'foo/bar/baz') == canonicalize_path('/foo/bar/baz/$spack/foo/bar/baz/') assert os.path.join('/foo/bar/baz', prefix) == canonicalize_path('/foo/bar/baz/${spack}') assert os.path.join( spack.paths.prefix, 'foo/bar/baz') == canonicalize_path('${spack}/foo/bar/baz/') assert os.path.join('/foo/bar/baz', prefix, 'foo/bar/baz') == canonicalize_path( '/foo/bar/baz/${spack}/foo/bar/baz/') assert os.path.join('/foo/bar/baz', prefix, 'foo/bar/baz') != canonicalize_path( '/foo/bar/baz/${spack/foo/bar/baz/')
def create_repo(root, namespace=None): """Create a new repository in root with the specified namespace. If the namespace is not provided, use basename of root. Return the canonicalized path and namespace of the created repository. """ root = canonicalize_path(root) if not namespace: namespace = os.path.basename(root) if not re.match(r'\w[\.\w-]*', namespace): raise InvalidNamespaceError( "'%s' is not a valid namespace." % namespace) existed = False if os.path.exists(root): if os.path.isfile(root): raise BadRepoError('File %s already exists and is not a directory' % root) elif os.path.isdir(root): if not os.access(root, os.R_OK | os.W_OK): raise BadRepoError( 'Cannot create new repo in %s: cannot access directory.' % root) if os.listdir(root): raise BadRepoError( 'Cannot create new repo in %s: directory is not empty.' % root) existed = True full_path = os.path.realpath(root) parent = os.path.dirname(full_path) if not os.access(parent, os.R_OK | os.W_OK): raise BadRepoError( "Cannot create repository in %s: can't access parent!" % root) try: config_path = os.path.join(root, repo_config_name) packages_path = os.path.join(root, packages_dir_name) mkdirp(packages_path) with open(config_path, 'w') as config: config.write("repo:\n") config.write(" namespace: '%s'\n" % namespace) except (IOError, OSError) as e: raise BadRepoError('Failed to create new repository in %s.' % root, "Caused by %s: %s" % (type(e), e)) # try to clean up. if existed: shutil.rmtree(config_path, ignore_errors=True) shutil.rmtree(packages_path, ignore_errors=True) else: shutil.rmtree(root, ignore_errors=True) return full_path, namespace
def __init__(self, root): """Instantiate a package repository from a filesystem path. Args: root: the root directory of the repository """ # Root directory, containing _repo.yaml and package dirs # Allow roots to by spack-relative by starting with '$spack' self.root = canonicalize_path(root) # check and raise BadRepoError on fail. def check(condition, msg): if not condition: raise BadRepoError(msg) # Validate repository layout. self.config_file = os.path.join(self.root, repo_config_name) check(os.path.isfile(self.config_file), "No %s found in '%s'" % (repo_config_name, root)) self.packages_path = os.path.join(self.root, packages_dir_name) check(os.path.isdir(self.packages_path), "No directory '%s' found in '%s'" % (packages_dir_name, root)) # Read configuration and validate namespace config = self._read_config() check( 'namespace' in config, '%s must define a namespace.' % os.path.join(root, repo_config_name)) self.namespace = config['namespace'] check(re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace), ("Invalid namespace '%s' in repo '%s'. " % (self.namespace, self.root)) + "Namespaces must be valid python identifiers separated by '.'") # Set up 'full_namespace' to include the super-namespace self.full_namespace = get_full_namespace(self.namespace) # Keep name components around for checking prefixes. self._names = self.full_namespace.split('.') # These are internal cache variables. self._modules = {} self._classes = {} self._instances = {} # Maps that goes from package name to corresponding file stat self._fast_package_checker = None # Indexes for this repository, computed lazily self._repo_index = None # make sure the namespace for packages in this repo exists. self._create_namespace()
def _misc_cache(): """The ``misc_cache`` is Spack's cache for small data. Currently the ``misc_cache`` stores indexes for virtual dependency providers and for which packages provide which tags. """ path = spack.config.get('config:misc_cache') if not path: path = os.path.join(spack.paths.user_config_path, 'cache') path = canonicalize_path(path) return spack.util.file_cache.FileCache(path)
def _fetch_cache(): """Filesystem cache of downloaded archives. This prevents Spack from repeatedly fetch the same files when building the same package different ways or multiple times. """ path = spack.config.get('config:source_cache') if not path: path = os.path.join(spack.paths.var_path, "cache") path = canonicalize_path(path) return spack.fetch_strategy.FsCache(path)
def test_substitute_padding(mock_low_high_config): max_system_path = spack_path.get_system_path_max() expected_length = (max_system_path - spack_path.SPACK_MAX_INSTALL_PATH_LENGTH) install_path = spack_path.canonicalize_path('/foo/bar/${padding}/baz') assert spack_path.SPACK_PATH_PADDING_CHARS in install_path assert len(install_path) == expected_length install_path = spack_path.canonicalize_path('/foo/bar/baz/gah/$padding') assert spack_path.SPACK_PATH_PADDING_CHARS in install_path assert len(install_path) == expected_length i_path = spack_path.canonicalize_path('/foo/$padding:10') i_expect = os.path.join('/foo', spack_path.SPACK_PATH_PADDING_CHARS[:10]) assert i_path == i_expect i_path = spack_path.canonicalize_path('/foo/${padding:20}') i_expect = os.path.join('/foo', spack_path.SPACK_PATH_PADDING_CHARS[:20]) assert i_path == i_expect
def make_environment(dirs=None): """Returns an configured environment for template rendering.""" if dirs is None: # Default directories where to search for templates dirs = [canonicalize_path(d) for d in spack.config.get('config:template_dirs')] # Loader for the templates loader = jinja2.FileSystemLoader(dirs) # Environment of the template engine env = jinja2.Environment(loader=loader, trim_blocks=True) # Custom filters _set_filters(env) return env
def make_environment(dirs=None): """Returns an configured environment for template rendering.""" if dirs is None: # Default directories where to search for templates dirs = [ canonicalize_path(d) for d in spack.config.get('config:template_dirs') ] # Loader for the templates loader = jinja2.FileSystemLoader(dirs) # Environment of the template engine env = jinja2.Environment(loader=loader, trim_blocks=True) # Custom filters _set_filters(env) return env
def instance_path_for_stage(): """ Temporarily use the "traditional" spack instance stage path for staging. Note that it can be important for other tests that the previous settings be restored when the test case is over. """ current = spack.config.get('config:build_stage') base = canonicalize_path(os.path.join('$spack', 'test-stage')) mkdirp(base) path = tempfile.mkdtemp(dir=base) spack.config.set('config', {'build_stage': path}, scope='user') yield spack.config.set('config', {'build_stage': current}, scope='user') shutil.rmtree(base)
def parse(url, scheme='file'): """Parse a url. For file:// URLs, the netloc and path components are concatenated and passed through spack.util.path.canoncalize_path(). Otherwise, the returned value is the same as urllib's urlparse() with allow_fragments=False. """ # guarantee a value passed in is of proper url format. Guarantee # allows for easier string manipulation accross platforms if isinstance(url, string_types): require_url_format(url) url = escape_file_url(url) url_obj = ( urllib_parse.urlparse(url, scheme=scheme, allow_fragments=False) if isinstance(url, string_types) else url) (scheme, netloc, path, params, query, _) = url_obj scheme = (scheme or 'file').lower() if scheme == 'file': # (The user explicitly provides the file:// scheme.) # examples: # file://C:\\a\\b\\c # file://X:/a/b/c path = canonicalize_path(netloc + path) path = re.sub(r'^/+', '/', path) netloc = '' drive_ltr_lst = re.findall(r'[A-Za-z]:\\', path) is_win_path = bool(drive_ltr_lst) if is_windows and is_win_path: drive_ltr = drive_ltr_lst[0].strip('\\') path = re.sub(r'[\\]*' + drive_ltr, '', path) netloc = '/' + drive_ltr.strip('\\') if sys.platform == "win32": path = convert_to_posix_path(path) return urllib_parse.ParseResult(scheme=scheme, netloc=netloc, path=path, params=params, query=query, fragment=None)
class Dotkit(EnvModule): name = 'dotkit' path = canonicalize_path( _roots.get(name, join_path(spack.share_path, name))) environment_modifications_formats = { PrependPath: 'dk_alter {name} {value}\n', RemovePath: 'dk_unalter {name} {value}\n', SetEnv: 'dk_setenv {name} {value}\n' } autoload_format = 'dk_op {module_file}\n' default_naming_format = \ '${PACKAGE}-${VERSION}-${COMPILERNAME}-${COMPILERVER}' @property def file_name(self): return join_path(self.path, self.spec.architecture, '%s.dk' % self.use_name) @property def header(self): # Category header = '' if self.category: header += '#c %s\n' % self.category # Short description if self.short_description: header += '#d %s\n' % self.short_description # Long description if self.long_description: for line in textwrap.wrap(self.long_description, 72): header += '#h %s\n' % line return header def prerequisite(self, spec): tty.warn('prerequisites: not supported by dotkit module files') tty.warn('\tYou may want to check %s/modules.yaml' % spack.user_config_path) return ''
def add_remote_packages(remote, exclude=[], nostack=False, hardlinks=False): """ Add all installed packages in `remote` to the packages dictionary. If nostack == True, packages will not be re-linked if they exist. If hardlinks == True, packages will be hard-linked. Not recommended! """ config = spack.config.get_config("config") # NOTE: This has to be kept in sync with spack/store.py! layout = spack.directory_layout.YamlDirectoryLayout( canonicalize_path(osp.join(remote, 'opt', 'spack')), hash_len=config.get('install_hash_length'), path_scheme=config.get('install_path_scheme')) num_packages = 0 for spec in filter_exclude(layout.all_specs(), exclude): src = layout.path_for_spec(spec) tgt = spack.store.layout.path_for_spec(spec) if osp.exists(tgt): if not (nostack or hardlinks): if osp.islink(tgt): os.remove(tgt) else: tty.warn("Cannot not stack {0} because {1} exists.".format( src, tgt)) continue else: tty.info("Not stacking {0} because already present.".format( src)) fs.mkdirp(osp.dirname(tgt)) tty.debug("Linking {0} -> {1}".format(src, tgt)) if not hardlinks: os.symlink(src, tgt) else: os.link(src, tgt) num_packages += 1 tty.info("Added {0} packages from {1}".format(num_packages, remote)) return num_packages
def _first_accessible_path(paths): """Find a tmp dir that exists that we can access.""" for path in paths: try: # try to create the path if it doesn't exist. path = canonicalize_path(path) mkdirp(path) # ensure accessible if not can_access(path): continue # return it if successful. return path except OSError: tty.debug('OSError while checking temporary path: %s' % path) continue return None
def test_template_retrieval(self): """Tests the template retrieval mechanism hooked into config files""" # Check the directories are correct template_dirs = spack.config.get('config:template_dirs') template_dirs = [canonicalize_path(x) for x in template_dirs] assert len(template_dirs) == 3 env = tengine.make_environment(template_dirs) # Retrieve a.txt, which resides in the second # template directory specified in the mock configuration template = env.get_template('a.txt') text = template.render({'word': 'world'}) assert 'Hello world!' == text # Retrieve b.txt, which resides in the third # template directory specified in the mock configuration template = env.get_template('b.txt') text = template.render({'word': 'world'}) assert 'Howdy world!' == text
def test_get_stage_root_in_spack(self, clear_stage_root): """Ensure an instance path is an accessible build stage path.""" base = canonicalize_path(os.path.join('$spack', '.spack-test-stage')) mkdirp(base) test_path = tempfile.mkdtemp(dir=base) try: with spack.config.override('config:build_stage', test_path): path = spack.stage.get_stage_root() assert 'spack' in path.split(os.path.sep) # Make sure cached stage path value was changed appropriately assert spack.stage._stage_root == test_path # Make sure the directory exists assert os.path.isdir(spack.stage._stage_root) finally: # Clean up regardless of outcome shutil.rmtree(base)
def spec_externals(spec): """Return a list of external specs (w/external directory path filled in), one for each known external installation.""" # break circular import. from spack.util.module_cmd import get_path_from_module # NOQA: ignore=F401 allpkgs = get_packages_config() name = spec.name external_specs = [] pkg_paths = allpkgs.get(name, {}).get('paths', None) pkg_modules = allpkgs.get(name, {}).get('modules', None) if (not pkg_paths) and (not pkg_modules): return [] for external_spec, path in iteritems(pkg_paths): if not path: # skip entries without paths (avoid creating extra Specs) continue external_spec = spack.spec.Spec(external_spec, external_path=canonicalize_path(path)) if external_spec.satisfies(spec): external_specs.append(external_spec) for external_spec, module in iteritems(pkg_modules): if not module: continue external_spec = spack.spec.Spec( external_spec, external_module=module) if external_spec.satisfies(spec): external_specs.append(external_spec) # defensively copy returned specs return [s.copy() for s in external_specs]
abi = ABI() # This controls how things are concretized in spack. # Replace it with a subclass if you want different # policies. concretizer = DefaultConcretizer() #----------------------------------------------------------------------------- # config.yaml options #----------------------------------------------------------------------------- _config = spack.config.get_config('config') # Path where downloaded source code is cached cache_path = canonicalize_path( _config.get('source_cache', join_path(var_path, "cache"))) fetch_cache = spack.fetch_strategy.FsCache(cache_path) # cache for miscellaneous stuff. misc_cache_path = canonicalize_path( _config.get('misc_cache', join_path(user_config_path, 'cache'))) misc_cache = FileCache(misc_cache_path) # If this is enabled, tools that use SSL should not verify # certifiates. e.g., curl should use the -k option. insecure = not _config.get('verify_ssl', True) # Whether spack should allow installation of unsafe versions of software.
def test_substitute_tempdir(mock_low_high_config): tempdir = tempfile.gettempdir() assert tempdir == canonicalize_path('$tempdir') assert tempdir + '/foo/bar/baz' == canonicalize_path( '$tempdir/foo/bar/baz')
def test_substitute_user(mock_low_high_config): user = getpass.getuser() assert '/foo/bar/' + user + '/baz' == canonicalize_path( '/foo/bar/$user/baz')
def test_substitute_tempdir(mock_config): tempdir = tempfile.gettempdir() assert tempdir == canonicalize_path('$tempdir') assert tempdir + '/foo/bar/baz' == canonicalize_path( '$tempdir/foo/bar/baz' )
def test_substitute_user(mock_config): user = getpass.getuser() assert '/foo/bar/' + user + '/baz' == canonicalize_path( '/foo/bar/$user/baz' )
from spack.util.path import canonicalize_path from spack.database import Database from spack.directory_layout import YamlDirectoryLayout __author__ = "Benedikt Hegner (CERN)" __all__ = ['db', 'layout', 'root'] # # Read in the config # config = spack.config.get_config("config") # # Set up the install path # root = canonicalize_path( config.get('install_tree', os.path.join(spack.opt_path, 'spack'))) # # Set up the installed packages database # db = Database(root) # # This controls how spack lays out install prefixes and # stage directories. # layout = YamlDirectoryLayout(root, hash_len=config.get('install_hash_length'), path_scheme=config.get('install_path_scheme'))
def __init__(self, root, namespace=repo_namespace): """Instantiate a package repository from a filesystem path. Arguments: root The root directory of the repository. namespace A super-namespace that will contain the repo-defined namespace (this is generally jsut `spack.pkg`). The super-namespace is Spack's way of separating repositories from other python namespaces. """ # Root directory, containing _repo.yaml and package dirs # Allow roots to by spack-relative by starting with '$spack' self.root = canonicalize_path(root) # super-namespace for all packages in the Repo self.super_namespace = namespace # check and raise BadRepoError on fail. def check(condition, msg): if not condition: raise BadRepoError(msg) # Validate repository layout. self.config_file = join_path(self.root, repo_config_name) check(os.path.isfile(self.config_file), "No %s found in '%s'" % (repo_config_name, root)) self.packages_path = join_path(self.root, packages_dir_name) check(os.path.isdir(self.packages_path), "No directory '%s' found in '%s'" % (repo_config_name, root)) # Read configuration and validate namespace config = self._read_config() check('namespace' in config, '%s must define a namespace.' % join_path(root, repo_config_name)) self.namespace = config['namespace'] check(re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace), ("Invalid namespace '%s' in repo '%s'. " % (self.namespace, self.root)) + "Namespaces must be valid python identifiers separated by '.'") # Set up 'full_namespace' to include the super-namespace if self.super_namespace: self.full_namespace = "%s.%s" % ( self.super_namespace, self.namespace) else: self.full_namespace = self.namespace # Keep name components around for checking prefixes. self._names = self.full_namespace.split('.') # These are internal cache variables. self._modules = {} self._classes = {} self._instances = {} # list of packages that are newer than the index. self._needs_update = [] # Index of virtual dependencies self._provider_index = None # Cached list of package names. self._all_package_names = None # make sure the namespace for packages in this repo exists. self._create_namespace() # Unique filename for cache of virtual dependency providers self._cache_file = 'providers/%s-index.yaml' % self.namespace