def write(self): """Write out a module file for this object.""" module_dir = os.path.dirname(self.file_name) if not os.path.exists(module_dir): mkdirp(module_dir) # Environment modifications guessed by inspecting the # installation prefix env = inspect_path(self.spec.prefix) # Let the extendee modify their extensions before asking for # package-specific modifications spack_env = EnvironmentModifications() for item in self.pkg.extendees: try: package = self.spec[item].package package.setup_dependent_package(self.pkg.module, self.spec) package.setup_dependent_environment(spack_env, env, self.spec) except: # The extends was conditional, so it doesn't count here # eg: extends('python', when='+python') pass # Package-specific environment modifications self.spec.package.setup_environment(spack_env, env) # TODO : implement site-specific modifications and filters if not env: return with open(self.file_name, 'w') as f: self.write_header(f) for line in self.process_environment_command(env): f.write(line)
def create(self): """Creates the stage directory. If get_tmp_root() is None, the stage directory is created directly under spack.paths.stage_path, otherwise this will attempt to create a stage in a temporary directory and link it into spack.paths.stage_path. """ # Create the top-level stage directory mkdirp(spack.paths.stage_path) remove_if_dead_link(self.path) # If a tmp_root exists then create a directory there and then link it # in the stage area, otherwise create the stage directory in self.path if self._need_to_create_path(): tmp_root = get_tmp_root() if tmp_root is not None: # tempfile.mkdtemp already sets mode 0700 tmp_dir = tempfile.mkdtemp('', _stage_prefix, tmp_root) tty.debug('link %s -> %s' % (self.path, tmp_dir)) os.symlink(tmp_dir, self.path) else: # emulate file permissions for tempfile.mkdtemp mkdirp(self.path, mode=stat.S_IRWXU) # Make sure we can actually do something with the stage we made. ensure_access(self.path) self.created = True
def test_url_patch(mock_stage, filename, sha256, archive_sha256): # Make a patch object url = 'file://' + filename m = sys.modules['spack.patch'] patch = m.Patch.create( None, url, sha256=sha256, archive_sha256=archive_sha256) # make a stage with Stage(url) as stage: # TODO: url isn't used; maybe refactor Stage # TODO: there is probably a better way to mock this. stage.mirror_path = mock_stage # don't disrupt the spack install # fake a source path with working_dir(stage.path): mkdirp('spack-expanded-archive') with working_dir(stage.source_path): # write a file to be patched with open('foo.txt', 'w') as f: f.write("""\ first line second line """) # write the expected result of patching. with open('foo-expected.txt', 'w') as f: f.write("""\ zeroth line first line third line """) # apply the patch and compare files patch.apply(stage) with working_dir(stage.source_path): assert filecmp.cmp('foo.txt', 'foo-expected.txt')
def dump_provenance(self, spec, path): """Dump provenance information for a spec to a particular path. This dumps the package file and any associated patch files. Raises UnknownPackageError if not found. """ # Some preliminary checks. if spec.virtual: raise UnknownPackageError(spec.name) if spec.namespace and spec.namespace != self.namespace: raise UnknownPackageError( "Repository %s does not contain package %s." % (self.namespace, spec.fullname)) # Install any patch files needed by packages. mkdirp(path) for spec, patches in spec.package.patches.items(): for patch in patches: if patch.path: if os.path.exists(patch.path): install(patch.path, path) else: tty.warn("Patch file did not exist: %s" % patch.path) # Install the package.py file itself. install(self.filename_for_package_name(spec), path)
def __init__(self, root, db_dir=None): """Create a Database for Spack installations under ``root``. A Database is a cache of Specs data from ``$prefix/spec.yaml`` files in Spack installation directories. By default, Database files (data and lock files) are stored under ``root/.spack-db``, which is created if it does not exist. This is the ``db_dir``. The Database will attempt to read an ``index.json`` file in ``db_dir``. If it does not find one, it will fall back to read an ``index.yaml`` if one is present. If that does not exist, it will create a database when needed by scanning the entire Database root for ``spec.yaml`` files according to Spack's ``DirectoryLayout``. Caller may optionally provide a custom ``db_dir`` parameter where data will be stored. This is intended to be used for testing the Database class. """ self.root = root if db_dir is None: # If the db_dir is not provided, default to within the db root. self._db_dir = os.path.join(self.root, _db_dirname) else: # Allow customizing the database directory location for testing. self._db_dir = db_dir # Set up layout of database files within the db dir self._old_yaml_index_path = os.path.join(self._db_dir, 'index.yaml') self._index_path = os.path.join(self._db_dir, 'index.json') self._lock_path = os.path.join(self._db_dir, 'lock') # This is for other classes to use to lock prefix directories. self.prefix_lock_path = os.path.join(self._db_dir, 'prefix_lock') # Create needed directories and files if not os.path.exists(self._db_dir): mkdirp(self._db_dir) # initialize rest of state. self.db_lock_timeout = ( spack.config.get('config:db_lock_timeout') or _db_lock_timeout) self.package_lock_timeout = ( spack.config.get('config:package_lock_timeout') or None) tty.debug('DATABASE LOCK TIMEOUT: {0}s'.format( str(self.db_lock_timeout))) timeout_format_str = ('{0}s'.format(str(self.package_lock_timeout)) if self.package_lock_timeout else 'No timeout') tty.debug('PACKAGE LOCK TIMEOUT: {0}'.format( str(timeout_format_str))) self.lock = Lock(self._lock_path, default_timeout=self.db_lock_timeout) self._data = {} # whether there was an error at the start of a read transaction self._error = None
def make_path_for_spec(self, spec): _check_concrete(spec) path = self.path_for_spec(spec) spec_file_path = self.spec_file_path(spec) if os.path.isdir(path): if not os.path.isfile(spec_file_path): raise InconsistentInstallDirectoryError( 'No spec file found at path %s' % spec_file_path) installed_spec = self.read_spec(spec_file_path) if installed_spec == self.spec: raise InstallDirectoryAlreadyExistsError(path) spec_hash = self.hash_spec(spec) installed_hash = self.hash_spec(installed_spec) if installed_spec == spec_hash: raise SpecHashCollisionError(installed_hash, spec_hash) else: raise InconsistentInstallDirectoryError( 'Spec file in %s does not match SHA-1 hash!' % spec_file_path) mkdirp(path) self.write_spec(spec, spec_file_path)
def edit_package(name, repo_path, namespace, force=False): if repo_path: repo = Repo(repo_path) elif namespace: repo = spack.repo.get_repo(namespace) else: repo = spack.repo path = repo.filename_for_package_name(name) spec = Spec(name) if os.path.exists(path): if not os.path.isfile(path): tty.die("Something's wrong. '%s' is not a file!" % path) if not os.access(path, os.R_OK|os.W_OK): tty.die("Insufficient permissions on '%s'!" % path) elif not force: tty.die("No package '%s'. Use spack create, or supply -f/--force " "to edit a new file." % spec.name) else: mkdirp(os.path.dirname(path)) with open(path, "w") as pkg_file: pkg_file.write( package_template.substitute( name=spec.name, class_name=mod_to_class(spec.name))) spack.editor(path)
def bootstrap(parser, args): origin_url, branch = get_origin_info(args.remote) prefix = args.prefix tty.msg("Fetching spack from '%s': %s" % (args.remote, origin_url)) if os.path.isfile(prefix): tty.die("There is already a file at %s" % prefix) mkdirp(prefix) if os.path.exists(join_path(prefix, '.git')): tty.die("There already seems to be a git repository in %s" % prefix) files_in_the_way = os.listdir(prefix) if files_in_the_way: tty.die("There are already files there! " "Delete these files before boostrapping spack.", *files_in_the_way) tty.msg("Installing:", "%s/bin/spack" % prefix, "%s/lib/spack/..." % prefix) os.chdir(prefix) git = which('git', required=True) git('init', '--shared', '-q') git('remote', 'add', 'origin', origin_url) git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch), '-n', '-q') git('reset', '--hard', 'origin/%s' % branch, '-q') git('checkout', '-B', branch, 'origin/%s' % branch, '-q') tty.msg("Successfully created a new spack in %s" % prefix, "Run %s/bin/spack to use this installation." % prefix)
def edit(parser, args): name = args.name # By default open the directory where packages live. if not name: path = spack.packages_path else: path = spack.db.filename_for_package_name(name) if os.path.exists(path): if not os.path.isfile(path): tty.die("Something's wrong. '%s' is not a file!" % path) if not os.access(path, os.R_OK|os.W_OK): tty.die("Insufficient permissions on '%s'!" % path) elif not args.force: tty.die("No package '%s'. Use spack create, or supply -f/--force " "to edit a new file." % name) else: mkdirp(os.path.dirname(path)) with closing(open(path, "w")) as pkg_file: pkg_file.write( package_template.substitute(name=name, class_name=mod_to_class(name))) # If everything checks out, go ahead and edit. spack.editor(path)
def test_env_with_included_config_scope(): config_scope_path = os.path.join(ev.root('test'), 'config') test_config = """\ env: include: - %s specs: - mpileaks """ % config_scope_path spack.package_prefs.PackagePrefs.clear_caches() _env_create('test', StringIO(test_config)) e = ev.read('test') fs.mkdirp(config_scope_path) with open(os.path.join(config_scope_path, 'packages.yaml'), 'w') as f: f.write("""\ packages: mpileaks: version: [2.2] """) ev.prepare_config_scope(e) e.concretize() assert any(x.satisfies('[email protected]') for x in e._get_environment_specs())
def initmock(self): # Use the mock packages database for these tests. This allows # us to set up contrived packages that don't interfere with # real ones. self.db = RepoPath(spack.mock_packages_path) spack.repo.swap(self.db) spack.config.clear_config_caches() self.real_scopes = spack.config.config_scopes # Mock up temporary configuration directories self.temp_config = tempfile.mkdtemp() self.mock_site_config = os.path.join(self.temp_config, 'site') self.mock_user_config = os.path.join(self.temp_config, 'user') mkdirp(self.mock_site_config) mkdirp(self.mock_user_config) for confs in [('compilers.yaml', mock_compiler_config), ('packages.yaml', mock_packages_config)]: conf_yaml = os.path.join(self.mock_site_config, confs[0]) with open(conf_yaml, 'w') as f: f.write(confs[1]) # TODO: Mocking this up is kind of brittle b/c ConfigScope # TODO: constructor modifies config_scopes. Make it cleaner. spack.config.config_scopes = OrderedDict() spack.config.ConfigScope('site', self.mock_site_config) spack.config.ConfigScope('user', self.mock_user_config) # Store changes to the package's dependencies so we can # restore later. self.saved_deps = {}
def create(parser, args): # Gather information about the package to be created name = get_name(args) url = get_url(args) versions, guesser = get_versions(args, name) build_system = get_build_system(args, guesser) # Create the package template object PackageClass = templates[build_system] package = PackageClass(name, url, versions) tty.msg("Created template for {0} package".format(package.name)) # Create a directory for the new package repo = get_repository(args, name) pkg_path = repo.filename_for_package_name(package.name) if os.path.exists(pkg_path) and not args.force: tty.die('{0} already exists.'.format(pkg_path), ' Try running `spack create --force` to overwrite it.') else: mkdirp(os.path.dirname(pkg_path)) # Write the new package file package.write(pkg_path) tty.msg("Created package file: {0}".format(pkg_path)) # Open up the new package file in your $EDITOR spack.editor(pkg_path)
def init_entry(self, key): """Ensure we can access a cache file. Create a lock for it if needed. Return whether the cache file exists yet or not. """ cache_path = self.cache_path(key) exists = os.path.exists(cache_path) if exists: if not os.path.isfile(cache_path): raise CacheError("Cache file is not a file: %s" % cache_path) if not os.access(cache_path, os.R_OK | os.W_OK): raise CacheError("Cannot access cache file: %s" % cache_path) else: # if the file is hierarchical, make parent directories parent = os.path.dirname(cache_path) if parent.rstrip(os.path.sep) != self.root: mkdirp(parent) if not os.access(parent, os.R_OK | os.W_OK): raise CacheError("Cannot access cache directory: %s" % parent) # ensure lock is created for this key self._get_lock(key) return exists
def get_tmp_root(): global _tmp_root, _use_tmp_stage if not _use_tmp_stage: return None if _tmp_root is None: candidates = spack.config.get('config:build_stage') if isinstance(candidates, string_types): candidates = [candidates] path = _first_accessible_path(candidates) if not path: raise StageError("No accessible stage paths in %s", candidates) # Return None to indicate we're using a local staging area. if path == canonicalize_path(spack.paths.stage_path): _use_tmp_stage = False return None # ensure that any temp path is unique per user, so users don't # fight over shared temporary space. user = getpass.getuser() if user not in path: path = os.path.join(path, user, 'spack-stage') else: path = os.path.join(path, 'spack-stage') mkdirp(path) _tmp_root = path return _tmp_root
def install_all(self, args=None): """Install all concretized specs in an environment.""" # Make sure log directory exists log_path = self.log_path fs.mkdirp(log_path) for concretized_hash in self.concretized_order: spec = self.specs_by_hash[concretized_hash] # Parse cli arguments and construct a dictionary # that will be passed to Package.do_install API kwargs = dict() if args: spack.cmd.install.update_kwargs_from_args(args, kwargs) with fs.working_dir(self.path): spec.package.do_install(**kwargs) # Link the resulting log file into logs dir build_log_link = os.path.join( log_path, '%s-%s.log' % (spec.name, spec.dag_hash(7))) if os.path.exists(build_log_link): os.remove(build_log_link) os.symlink(spec.package.build_log_path, build_log_link)
def create(path, specs, **kwargs): """Create a directory to be used as a spack mirror, and fill it with package archives. Arguments: path: Path to create a mirror directory hierarchy in. specs: Any package versions matching these specs will be added \ to the mirror. Keyword args: no_checksum: If True, do not checkpoint when fetching (default False) num_versions: Max number of versions to fetch per spec, \ if spec is ambiguous (default is 0 for all of them) Return Value: Returns a tuple of lists: (present, mirrored, error) * present: Package specs that were already present. * mirrored: Package specs that were successfully mirrored. * error: Package specs that failed to mirror due to some error. This routine iterates through all known package versions, and it creates specs for those versions. If the version satisfies any spec in the specs list, it is downloaded and added to the mirror. """ # Make sure nothing is in the way. if os.path.isfile(path): raise MirrorError("%s already exists and is a file." % path) # automatically spec-ify anything in the specs array. specs = [s if isinstance(s, Spec) else Spec(s) for s in specs] # Get concrete specs for each matching version of these specs. version_specs = get_matching_versions( specs, num_versions=kwargs.get('num_versions', 0)) for s in version_specs: s.concretize() # Get the absolute path of the root before we start jumping around. mirror_root = os.path.abspath(path) if not os.path.isdir(mirror_root): try: mkdirp(mirror_root) except OSError as e: raise MirrorError( "Cannot create directory '%s':" % mirror_root, str(e)) # Things to keep track of while parsing specs. categories = { 'present': [], 'mirrored': [], 'error': [] } # Iterate through packages and download all safe tarballs for each for spec in version_specs: add_single_spec(spec, mirror_root, categories, **kwargs) return categories['present'], categories['mirrored'], categories['error']
def default_log_file(spec): """Computes the default filename for the log file and creates the corresponding directory if not present """ fmt = 'test-{x.name}-{x.version}-{hash}.xml' basename = fmt.format(x=spec, hash=spec.dag_hash()) dirname = fs.os.path.join(spack.paths.var_path, 'junit-report') fs.mkdirp(dirname) return fs.os.path.join(dirname, basename)
def create_install_directory(self, spec): _check_concrete(spec) prefix = self.check_installed(spec) if prefix: raise InstallDirectoryAlreadyExistsError(prefix) mkdirp(self.metadata_path(spec)) self.write_spec(spec, self.spec_file_path(spec))
def create_repo(root, namespace=None): """Create a new repository in root with the specified namespace. If the namespace is not provided, use basename of root. Return the canonicalized path and namespace of the created repository. """ root = canonicalize_path(root) if not namespace: namespace = os.path.basename(root) if not re.match(r'\w[\.\w-]*', namespace): raise InvalidNamespaceError( "'%s' is not a valid namespace." % namespace) existed = False if os.path.exists(root): if os.path.isfile(root): raise BadRepoError('File %s already exists and is not a directory' % root) elif os.path.isdir(root): if not os.access(root, os.R_OK | os.W_OK): raise BadRepoError( 'Cannot create new repo in %s: cannot access directory.' % root) if os.listdir(root): raise BadRepoError( 'Cannot create new repo in %s: directory is not empty.' % root) existed = True full_path = os.path.realpath(root) parent = os.path.dirname(full_path) if not os.access(parent, os.R_OK | os.W_OK): raise BadRepoError( "Cannot create repository in %s: can't access parent!" % root) try: config_path = os.path.join(root, repo_config_name) packages_path = os.path.join(root, packages_dir_name) mkdirp(packages_path) with open(config_path, 'w') as config: config.write("repo:\n") config.write(" namespace: '%s'\n" % namespace) except (IOError, OSError) as e: raise BadRepoError('Failed to create new repository in %s.' % root, "Caused by %s: %s" % (type(e), e)) # try to clean up. if existed: shutil.rmtree(config_path, ignore_errors=True) shutil.rmtree(packages_path, ignore_errors=True) else: shutil.rmtree(root, ignore_errors=True) return full_path, namespace
def create(parser, args): url = args.url if not url: setup_parser.subparser.print_help() return # Figure out a name and repo for the package. name, version = guess_name_and_version(url, args) spec = Spec(name) name = spec.name # factors out namespace, if any repo = find_repository(spec, args) tty.msg("This looks like a URL for %s version %s" % (name, version)) tty.msg("Creating template for package %s" % name) # Fetch tarballs (prompting user if necessary) versions, urls = fetch_tarballs(url, name, version) # Try to guess what configure system is used. guesser = ConfigureGuesser() ver_hash_tuples = spack.cmd.checksum.get_checksums( versions, urls, first_stage_function=guesser, keep_stage=args.keep_stage) if not ver_hash_tuples: tty.die("Could not fetch any tarballs for %s" % name) # Prepend 'py-' to python package names, by convention. if guesser.build_system == 'python': name = 'py-%s' % name # Prepend 'r-' to R package names, by convention. if guesser.build_system == 'r': name = 'r-%s' % name # Create a directory for the new package. pkg_path = repo.filename_for_package_name(name) if os.path.exists(pkg_path) and not args.force: tty.die("%s already exists." % pkg_path) else: mkdirp(os.path.dirname(pkg_path)) # Write out a template for the file with open(pkg_path, "w") as pkg_file: pkg_file.write( package_template.substitute( name=name, configure=guesser.configure, class_name=mod_to_class(name), url=url, versions=make_version_calls(ver_hash_tuples))) # If everything checks out, go ahead and edit. spack.editor(pkg_path) tty.msg("Created package %s" % pkg_path)
def __init__(self): self.tempdir = tempfile.mkdtemp() self.directory = os.path.join(self.tempdir, 'dir') mkdirp(self.directory) # Script with short shebang self.short_shebang = os.path.join(self.tempdir, 'short') with open(self.short_shebang, 'w') as f: f.write(short_line) f.write(last_line) # Script with long shebang self.long_shebang = os.path.join(self.tempdir, 'long') with open(self.long_shebang, 'w') as f: f.write(long_line) f.write(last_line) # Lua script with long shebang self.lua_shebang = os.path.join(self.tempdir, 'lua') with open(self.lua_shebang, 'w') as f: f.write(lua_line) f.write(last_line) # Lua script with long shebang self.lua_textbang = os.path.join(self.tempdir, 'lua_in_text') with open(self.lua_textbang, 'w') as f: f.write(short_line) f.write(lua_in_text) f.write(last_line) # Node script with long shebang self.node_shebang = os.path.join(self.tempdir, 'node') with open(self.node_shebang, 'w') as f: f.write(node_line) f.write(last_line) # Node script with long shebang self.node_textbang = os.path.join(self.tempdir, 'node_in_text') with open(self.node_textbang, 'w') as f: f.write(short_line) f.write(node_in_text) f.write(last_line) # Script already using sbang. self.has_sbang = os.path.join(self.tempdir, 'shebang') with open(self.has_sbang, 'w') as f: f.write(sbang_line) f.write(long_line) f.write(last_line) # Fake binary file. self.binary = os.path.join(self.tempdir, 'binary') tar = which('tar', required=True) tar('czf', self.binary, self.has_sbang)
def write_section(self, section): filename = self.get_section_filename(section) data = self.get_section(section) try: mkdirp(self.path) with open(filename, 'w') as f: _validate_section(data, section_schemas[section]) syaml.dump(data, stream=f, default_flow_style=False) except (yaml.YAMLError, IOError) as e: raise ConfigFileError( "Error writing to config file: '%s'" % str(e))
def store(self, fetcher, relative_dest): # skip fetchers that aren't cachable if not fetcher.cachable: return # Don't store things that are already cached. if isinstance(fetcher, CacheURLFetchStrategy): return dst = os.path.join(self.root, relative_dest) mkdirp(os.path.dirname(dst)) fetcher.archive(dst)
def module_refresh(): """Regenerate all module files for installed packages known to spack (some packages may no longer exist).""" specs = [s for s in spack.installed_db.query(installed=True, known=True)] for name, cls in module_types.items(): tty.msg("Regenerating %s module files." % name) if os.path.isdir(cls.path): shutil.rmtree(cls.path, ignore_errors=False) mkdirp(cls.path) for spec in specs: cls(spec).write()
def write(self): """Write out a module file for this object.""" module_dir = os.path.dirname(self.file_name) if not os.path.exists(module_dir): mkdirp(module_dir) # If there are no paths, no need for a dotkit. if not self.paths: return with closing(open(self.file_name, 'w')) as f: self._write(f)
def add_single_spec(spec, mirror_root, categories, **kwargs): tty.msg("Adding package {pkg} to mirror".format(pkg=spec.format("$_$@"))) spec_exists_in_mirror = True try: with spec.package.stage: # fetcher = stage.fetcher # fetcher.fetch() # ... # fetcher.archive(archive_path) for ii, stage in enumerate(spec.package.stage): fetcher = stage.fetcher if ii == 0: # create a subdirectory for the current package@version archive_path = os.path.abspath(os.path.join( mirror_root, mirror_archive_path(spec, fetcher))) name = spec.cformat("$_$@") else: resource = stage.resource archive_path = os.path.abspath(os.path.join( mirror_root, mirror_archive_path(spec, fetcher, resource.name))) name = "{resource} ({pkg}).".format( resource=resource.name, pkg=spec.cformat("$_$@")) subdir = os.path.dirname(archive_path) mkdirp(subdir) if os.path.exists(archive_path): tty.msg("{name} : already added".format(name=name)) else: spec_exists_in_mirror = False fetcher.fetch() if not kwargs.get('no_checksum', False): fetcher.check() tty.msg("{name} : checksum passed".format(name=name)) # Fetchers have to know how to archive their files. Use # that to move/copy/create an archive in the mirror. fetcher.archive(archive_path) tty.msg("{name} : added".format(name=name)) if spec_exists_in_mirror: categories['present'].append(spec) else: categories['mirrored'].append(spec) except Exception as e: if spack.config.get('config:debug'): sys.excepthook(*sys.exc_info()) else: tty.warn( "Error while fetching %s" % spec.cformat('$_$@'), e.message) categories['error'].append(spec)
def write_section(self, section): _validate(self.sections, self.schema) try: parent = os.path.dirname(self.path) mkdirp(parent) tmp = os.path.join(parent, '.%s.tmp' % self.path) with open(tmp, 'w') as f: syaml.dump(self.sections, stream=f, default_flow_style=False) os.path.move(tmp, self.path) except (yaml.YAMLError, IOError) as e: raise ConfigFileError( "Error writing to config file: '%s'" % str(e))
def dotkit_refresh(parser, args): query_specs = spack.cmd.parse_specs(args.spec) specs = spack.db.installed_package_specs() if query_specs: specs = [s for s in specs if any(s.satisfies(q) for q in query_specs)] else: shutil.rmtree(spack.dotkit_path, ignore_errors=False) mkdirp(spack.dotkit_path) for spec in specs: spack.hooks.dotkit.post_install(spec.package)
def merge_directories(self, dest_root, ignore): for src, dest in traverse_tree(self._root, dest_root, ignore=ignore): if os.path.isdir(src): if not os.path.exists(dest): mkdirp(dest) continue if not os.path.isdir(dest): raise ValueError("File blocks directory: %s" % dest) # mark empty directories so they aren't removed on unmerge. if not os.listdir(dest): marker = os.path.join(dest, empty_file_name) touch(marker)
def push_to_url(local_file_path, remote_path, keep_original=True, extra_args=None): remote_url = url_util.parse(remote_path) verify_ssl = spack.config.get('config:verify_ssl') if __UNABLE_TO_VERIFY_SSL and verify_ssl and uses_ssl(remote_url): warn_no_ssl_cert_checking() remote_file_path = url_util.local_file_path(remote_url) if remote_file_path is not None: mkdirp(os.path.dirname(remote_file_path)) if keep_original: shutil.copy(local_file_path, remote_file_path) else: try: os.rename(local_file_path, remote_file_path) except OSError as e: if e.errno == errno.EXDEV: # NOTE(opadron): The above move failed because it crosses # filesystem boundaries. Copy the file (plus original # metadata), and then delete the original. This operation # needs to be done in separate steps. shutil.copy2(local_file_path, remote_file_path) os.remove(local_file_path) else: raise elif remote_url.scheme == 's3': if extra_args is None: extra_args = {} remote_path = remote_url.path while remote_path.startswith('/'): remote_path = remote_path[1:] s3 = s3_util.create_s3_session(remote_url) s3.upload_file(local_file_path, remote_url.netloc, remote_path, ExtraArgs=extra_args) if not keep_original: os.remove(local_file_path) else: raise NotImplementedError('Unrecognized URL scheme: {SCHEME}'.format( SCHEME=remote_url.scheme))
def __init__(self, root, db_dir=None): """Create a Database for Spack installations under ``root``. A Database is a cache of Specs data from ``$prefix/spec.yaml`` files in Spack installation directories. By default, Database files (data and lock files) are stored under ``root/.spack-db``, which is created if it does not exist. This is the ``db_dir``. The Database will attempt to read an ``index.json`` file in ``db_dir``. If it does not find one, it will fall back to read an ``index.yaml`` if one is present. If that does not exist, it will create a database when needed by scanning the entire Database root for ``spec.yaml`` files according to Spack's ``DirectoryLayout``. Caller may optionally provide a custom ``db_dir`` parameter where data will be stored. This is intended to be used for testing the Database class. """ self.root = root if db_dir is None: # If the db_dir is not provided, default to within the db root. self._db_dir = os.path.join(self.root, _db_dirname) else: # Allow customizing the database directory location for testing. self._db_dir = db_dir # Set up layout of database files within the db dir self._old_yaml_index_path = os.path.join(self._db_dir, 'index.yaml') self._index_path = os.path.join(self._db_dir, 'index.json') self._lock_path = os.path.join(self._db_dir, 'lock') # This is for other classes to use to lock prefix directories. self.prefix_lock_path = os.path.join(self._db_dir, 'prefix_lock') # Create needed directories and files if not os.path.exists(self._db_dir): mkdirp(self._db_dir) # initialize rest of state. self.lock = Lock(self._lock_path) self._data = {} # whether there was an error at the start of a read transaction self._error = None
def instance_path_for_stage(): """ Temporarily use the "traditional" spack instance stage path for staging. Note that it can be important for other tests that the previous settings be restored when the test case is over. """ current = spack.config.get('config:build_stage') base = canonicalize_path(os.path.join('$spack', 'test-stage')) mkdirp(base) path = tempfile.mkdtemp(dir=base) spack.config.set('config', {'build_stage': path}, scope='user') yield spack.config.set('config', {'build_stage': current}, scope='user') shutil.rmtree(base)
def _write_section(self, section): filename = self.get_section_filename(section) data = self.get_section(section) # We copy data here to avoid adding defaults at write time validate_data = copy.deepcopy(data) validate(validate_data, section_schemas[section]) try: mkdirp(self.path) with open(filename, 'w') as f: syaml.dump_config(data, stream=f, default_flow_style=False) except (yaml.YAMLError, IOError) as e: raise ConfigFileError( "Error writing to config file: '%s'" % str(e))
def test_fetch(tmpdir, _fetch_method): """Ensure a fetch after expanding is effectively a no-op.""" testpath = str(tmpdir) cache = os.path.join(testpath, 'cache.tar.gz') touch(cache) if is_windows: url_stub = '{0}' else: url_stub = '/{0}' url = 'file://' + url_stub.format(cache) with spack.config.override('config:url_fetch_method', _fetch_method): fetcher = CacheURLFetchStrategy(url=url) with Stage(fetcher, path=testpath) as stage: source_path = stage.source_path mkdirp(source_path) fetcher.fetch()
def link(src, dest): abs_src = os.path.abspath(src) conf_py = os.path.join(abs_src, 'conf.py') if not os.path.exists(conf_py): tty.die("Not a Sphinx docs directory: %s" % src) fs.mkdirp(dest) with fs.working_dir(dest): rel_src = os.path.relpath(abs_src) lt = LinkTree(rel_src) lt.merge('.', relative=True, ignore=lambda f: '_spack_root' in f) for f in copy_files: os.remove(f) shutil.copy(os.path.join(abs_src, f), f)
def mock_pkg_git_repo(tmpdir_factory): """Copy the builtin.mock repo and make a mutable git repo inside it.""" tmproot = tmpdir_factory.mktemp('mock_pkg_git_repo') repo_path = tmproot.join('builtin.mock') shutil.copytree(spack.paths.mock_packages_path, str(repo_path)) mock_repo = spack.repo.RepoPath(str(repo_path)) mock_repo_packages = mock_repo.repos[0].packages_path git = which('git', required=True) with working_dir(mock_repo_packages): git('init') # initial commit with mock packages # the -f is necessary in case people ignore build-* in their ignores git('add', '-f', '.') git('config', 'user.email', '*****@*****.**') git('config', 'user.name', 'Spack Testing') git('-c', 'commit.gpgsign=false', 'commit', '-m', 'initial mock repo commit') # add commit with pkg-a, pkg-b, pkg-c packages mkdirp('pkg-a', 'pkg-b', 'pkg-c') with open('pkg-a/package.py', 'w') as f: f.write(pkg_template.format(name='PkgA')) with open('pkg-b/package.py', 'w') as f: f.write(pkg_template.format(name='PkgB')) with open('pkg-c/package.py', 'w') as f: f.write(pkg_template.format(name='PkgC')) git('add', 'pkg-a', 'pkg-b', 'pkg-c') git('-c', 'commit.gpgsign=false', 'commit', '-m', 'add pkg-a, pkg-b, pkg-c') # remove pkg-c, add pkg-d with open('pkg-b/package.py', 'a') as f: f.write('\n# change pkg-b') git('add', 'pkg-b') mkdirp('pkg-d') with open('pkg-d/package.py', 'w') as f: f.write(pkg_template.format(name='PkgD')) git('add', 'pkg-d') git('rm', '-rf', 'pkg-c') git('-c', 'commit.gpgsign=false', 'commit', '-m', 'change pkg-b, remove pkg-c, add pkg-d') with spack.repo.use_repositories(mock_repo): yield mock_repo_packages
def __call__(self, *args, **kwargs): self.write_reproducibility_data() remove_directory = kwargs.get('remove_directory', True) dirty = kwargs.get('dirty', False) fail_first = kwargs.get('fail_first', False) for spec in self.specs: try: msg = "A package object cannot run in two test suites at once" assert not spec.package.test_suite, msg # Set up the test suite to know which test is running spec.package.test_suite = self self.current_base_spec = spec self.current_test_spec = spec # setup per-test directory in the stage dir test_dir = self.test_dir_for_spec(spec) if os.path.exists(test_dir): shutil.rmtree(test_dir) fs.mkdirp(test_dir) # run the package tests spec.package.do_test( dirty=dirty ) # Clean up on success and log passed test if remove_directory: shutil.rmtree(test_dir) self.write_test_result(spec, 'PASSED') except BaseException as exc: if isinstance(exc, SyntaxError): # Create the test log file and report the error. self.ensure_stage() msg = 'Testing package {0}\n{1}'\ .format(self.test_pkg_id(spec), str(exc)) _add_msg_to_file(self.log_file_for_spec(spec), msg) self.write_test_result(spec, 'FAILED') if fail_first: break finally: spec.package.test_suite = None self.current_test_spec = None self.current_base_spec = None
def expand(self): if not self.expand_archive: tty.msg("Skipping expand step for %s" % self.archive_file) return tty.msg("Staging archive: %s" % self.archive_file) if not self.archive_file: raise NoArchiveFileError( "Couldn't find archive file", "Failed on expand() for URL %s" % self.url) if not self.extension: self.extension = extension(self.archive_file) decompress = decompressor_for(self.archive_file, self.extension) # Expand all tarballs in their own directory to contain # exploding tarballs. tarball_container = os.path.join(self.stage.path, "spack-expanded-archive") mkdirp(tarball_container) with working_dir(tarball_container): decompress(self.archive_file) # Check for an exploding tarball, i.e. one that doesn't expand # to a single directory. If the tarball *didn't* explode, # move contents up & remove the container directory. # # NOTE: The tar program on Mac OS X will encode HFS metadata # in hidden files, which can end up *alongside* a single # top-level directory. We ignore hidden files to accomodate # these "semi-exploding" tarballs. files = os.listdir(tarball_container) non_hidden = [f for f in files if not f.startswith('.')] if len(non_hidden) == 1: expanded_dir = os.path.join(tarball_container, non_hidden[0]) if os.path.isdir(expanded_dir): for f in files: shutil.move(os.path.join(tarball_container, f), os.path.join(self.stage.path, f)) os.rmdir(tarball_container) if not files: os.rmdir(tarball_container)
def create_stage_archive(expected_file_list=[_include_readme]): tmpdir, test_stage_path = tmp_build_stage_dir mkdirp(test_stage_path) # Create the archive directory and associated file archive_dir = tmpdir.join(_archive_base) archive = tmpdir.join(_archive_fn) archive_url = 'file://' + str(archive) archive_dir.ensure(dir=True) # Create the optional files as requested and make sure expanded # archive peers are included. tar_args = ['czf', str(_archive_fn), _archive_base] for _include in expected_file_list: if _include == _include_hidden: # The hidden file case stands in for the way Mac OS X tar files # represent HFS metadata. Locate in the same directory as the # archive file. tar_args.append(_hidden_fn) fn, contents = (tmpdir.join(_hidden_fn), _hidden_contents) elif _include == _include_readme: # The usual README.txt file is contained in the archive dir. fn, contents = (archive_dir.join(_readme_fn), _readme_contents) elif _include == _include_extra: # The extra file stands in for exploding tar files so needs # to be in the same directory as the archive file. tar_args.append(_extra_fn) fn, contents = (tmpdir.join(_extra_fn), _extra_contents) else: break fn.write(contents) # Create the archive file with tmpdir.as_cwd(): tar = spack.util.executable.which('tar', required=True) tar(*tar_args) Archive = collections.namedtuple( 'Archive', ['url', 'tmpdir', 'stage_path', 'archive_dir']) return Archive(url=archive_url, tmpdir=tmpdir, stage_path=test_stage_path, archive_dir=archive_dir)
def store(self, fetcher, relative_dest, cosmetic_path=None): # Note this will archive package sources even if they would not # normally be cached (e.g. the current tip of an hg/git branch) dst = os.path.join(self.root, relative_dest) mkdirp(os.path.dirname(dst)) fetcher.archive(dst) # Add a symlink path that a human can read to understand what resource # the archive path refers to if not cosmetic_path: return cosmetic_path = os.path.join(self.root, cosmetic_path) relative_dst = os.path.relpath(dst, start=os.path.dirname(cosmetic_path)) if not os.path.exists(cosmetic_path): mkdirp(os.path.dirname(cosmetic_path)) os.symlink(relative_dst, cosmetic_path)
def test(parser, args): if args.list: print "Available tests:" colify(spack.test.list_tests(), indent=2) else: if not args.createXmlOutput: outputDir = None else: if not args.xmlOutputDir: outputDir = join_path(os.getcwd(), "test-output") else: outputDir = os.path.abspath(args.xmlOutputDir) if not os.path.exists(outputDir): mkdirp(outputDir) spack.test.run(args.names, outputDir, args.verbose)
def symlink_license(pkg): """Create local symlinks that point to the global license file.""" target = pkg.global_license_file for filename in pkg.license_files: link_name = join_path(pkg.prefix, filename) license_dir = os.path.dirname(link_name) if not os.path.exists(license_dir): mkdirp(license_dir) # If example file already exists, overwrite it with a symlink if os.path.exists(link_name): os.remove(link_name) if os.path.exists(target): os.symlink(target, link_name) tty.msg("Added local symlink %s to global license file" % link_name)
def create_install_directory(self, spec): _check_concrete(spec) # Create install directory with properly configured permissions # Cannot import at top of file from spack.package_prefs import get_package_dir_permissions, get_package_group # Each package folder can have its own specific permissions, while # intermediate folders (arch/compiler) are set with access permissions # equivalent to the root permissions of the layout. group = get_package_group(spec) perms = get_package_dir_permissions(spec) fs.mkdirp(spec.prefix, mode=perms, group=group, default_perms='parents') fs.mkdirp(self.metadata_path(spec), mode=perms, group=group) # in prefix self.write_spec(spec, self.spec_file_path(spec))
def symlink(self, mirror_ref): """Symlink a human readible path in our mirror to the actual storage location.""" cosmetic_path = os.path.join(self.root, mirror_ref.cosmetic_path) storage_path = os.path.join(self.root, mirror_ref.storage_path) relative_dst = os.path.relpath(storage_path, start=os.path.dirname(cosmetic_path)) if not os.path.exists(cosmetic_path): if os.path.lexists(cosmetic_path): # In this case the link itself exists but it is broken: remove # it and recreate it (in order to fix any symlinks broken prior # to https://github.com/spack/spack/pull/13908) os.unlink(cosmetic_path) mkdirp(os.path.dirname(cosmetic_path)) os.symlink(relative_dst, cosmetic_path)
def test_verify(tmpdir): source_dir = tmpdir.join('lib', 'spack', 'spack') mkdirp(str(source_dir)) no_header = source_dir.join('no_header.py') touch(str(no_header)) lgpl_header = source_dir.join('lgpl_header.py') with lgpl_header.open('w') as f: f.write("""\ # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: LGPL-2.1-only """) old_lgpl_header = source_dir.join('old_lgpl_header.py') with old_lgpl_header.open('w') as f: f.write("""\ # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. """) correct_header = source_dir.join('correct_header.py') with correct_header.open('w') as f: f.write("""\ # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) """) out = license('verify', '--root', str(tmpdir), fail_on_error=False) assert str(no_header) in out assert str(lgpl_header) in out assert str(old_lgpl_header) in out assert str(correct_header) not in out assert '3 improperly licensed files' in out assert re.search(r'files not containing expected license:\s*1', out) assert re.search(r'files with wrong SPDX-License-Identifier:\s*1', out) assert re.search(r'files with old license header:\s*1', out) assert license.returncode == 1
def refresh(mtype, specs, args): """ Regenerate all module files for installed packages known to spack (some packages may no longer exist). """ # Prompt a message to the user about what is going to change if not specs: tty.msg('No package matches your query') return if not args.yes_to_all: tty.msg( 'You are about to regenerate the {name} module files for the following specs:\n' .format(name=mtype)) for s in specs: print(s.format(color=True)) print('') ask_for_confirmation('Do you want to proceed ? ') cls = module_types[mtype] # Detect name clashes writers = [cls(spec) for spec in specs] file2writer = collections.defaultdict(list) for item in writers: file2writer[item.file_name].append(item) if len(file2writer) != len(writers): message = 'Name clashes detected in module files:\n' for filename, writer_list in file2writer.items(): if len(writer_list) > 1: message += '\nfile : {0}\n'.format(filename) for x in writer_list: message += 'spec : {0}\n'.format(x.spec.format(color=True)) tty.error(message) tty.error('Operation aborted') raise SystemExit(1) # Proceed regenerating module files tty.msg('Regenerating {name} module files'.format(name=mtype)) if os.path.isdir(cls.path) and args.delete_tree: shutil.rmtree(cls.path, ignore_errors=False) mkdirp(cls.path) for x in writers: x.write(overwrite=True)
def test_merge_with_empty_directories(stage, link_tree): with working_dir(stage.path): mkdirp('dest/f/g') mkdirp('dest/a/b/h') link_tree.merge('dest') link_tree.unmerge('dest') assert not os.path.exists('dest/1') assert not os.path.exists('dest/a/b/2') assert not os.path.exists('dest/a/b/3') assert not os.path.exists('dest/c/4') assert not os.path.exists('dest/c/d/5') assert not os.path.exists('dest/c/d/6') assert not os.path.exists('dest/c/d/e/7') assert os.path.isdir('dest/a/b/h') assert os.path.isdir('dest/f/g')
def test_pkg_add(mock_pkg_git_repo): with working_dir(mock_pkg_git_repo): mkdirp('pkg-e') with open('pkg-e/package.py', 'w') as f: f.write(pkg_template.format(name='PkgE')) pkg('add', 'pkg-e') git = which('git', required=True) with working_dir(mock_pkg_git_repo): try: assert ('A pkg-e/package.py' in git('status', '--short', output=str)) finally: shutil.rmtree('pkg-e') with pytest.raises(spack.main.SpackCommandError): pkg('add', 'does-not-exist')
def __init__(self, root, timeout=120): """Create a file cache object. This will create the cache directory if it does not exist yet. Args: root: specifies the root directory where the cache stores files timeout: when there is contention among multiple Spack processes for cache files, this specifies how long Spack should wait before assuming that there is a deadlock. """ self.root = root.rstrip(os.path.sep) if not os.path.exists(self.root): mkdirp(self.root) self._locks = {} self.lock_timeout = timeout
def _download_buildcache_entry(mirror_root, descriptions): for description in descriptions: url = os.path.join(mirror_root, description['url']) path = description['path'] fail_if_missing = not description['required'] mkdirp(path) stage = Stage(url, name="build_cache", path=path, keep=True) try: stage.fetch() except fs.FetchError: if fail_if_missing: tty.error('Failed to download required url {0}'.format(url)) return False return True
def install_sbang(): """Ensure that ``sbang`` is installed in the root of Spack's install_tree. This is the shortest known publicly accessible path, and installing ``sbang`` here ensures that users can access the script and that ``sbang`` itself is in a short path. """ # copy in a new version of sbang if it differs from what's in spack sbang_path = sbang_install_path() if os.path.exists(sbang_path) and filecmp.cmp( spack.paths.sbang_script, sbang_path): return # make $install_tree/bin and copy in a new version of sbang if needed sbang_bin_dir = os.path.dirname(sbang_path) fs.mkdirp(sbang_bin_dir) fs.install(spack.paths.sbang_script, sbang_path) fs.set_install_permissions(sbang_bin_dir)
def test_bad_command_line_scopes(tmpdir, mock_low_high_config): cfg = spack.config.Configuration() with tmpdir.as_cwd(): with pytest.raises(spack.config.ConfigError): spack.config._add_command_line_scopes(cfg, ['bad_path']) touch('unreadable_file') with pytest.raises(spack.config.ConfigError): spack.config._add_command_line_scopes(cfg, ['unreadable_file']) mkdirp('unreadable_dir') with pytest.raises(spack.config.ConfigError): try: os.chmod('unreadable_dir', 0) spack.config._add_command_line_scopes(cfg, ['unreadable_dir']) finally: os.chmod('unreadable_dir', 0o700) # so tmpdir can be removed
def refresh(mtype, specs, args): """Regenerate module files for item in specs""" # Prompt a message to the user about what is going to change if not specs: tty.msg('No package matches your query') return if not args.yes_to_all: tty.msg( 'You are about to regenerate {name} module files for:\n'.format( name=mtype)) spack.cmd.display_specs(specs, long=True) print('') answer = tty.get_yes_or_no('Do you want to proceed?') if not answer: tty.die('Will not regenerate any module files') cls = module_types[mtype] # Detect name clashes writers = [cls(spec) for spec in specs if spack.repo.exists(spec.name)] # skip unknown packages. file2writer = collections.defaultdict(list) for item in writers: file2writer[item.file_name].append(item) if len(file2writer) != len(writers): message = 'Name clashes detected in module files:\n' for filename, writer_list in file2writer.items(): if len(writer_list) > 1: message += '\nfile: {0}\n'.format(filename) for x in writer_list: message += 'spec: {0}\n'.format(x.spec.format(color=True)) tty.error(message) tty.error('Operation aborted') raise SystemExit(1) # Proceed regenerating module files tty.msg('Regenerating {name} module files'.format(name=mtype)) if os.path.isdir(cls.path) and args.delete_tree: shutil.rmtree(cls.path, ignore_errors=False) filesystem.mkdirp(cls.path) for x in writers: x.write(overwrite=True)
def test_single_file_verify_cmd(tmpdir): # Test the verify command interface to verifying a single file. filedir = os.path.join(str(tmpdir), 'a', 'b', 'c', 'd') filepath = os.path.join(filedir, 'file') metadir = os.path.join(str(tmpdir), spack.store.layout.metadata_dir) fs.mkdirp(filedir) fs.mkdirp(metadir) with open(filepath, 'w') as f: f.write("I'm a file") data = spack.verify.create_manifest_entry(filepath) manifest_file = os.path.join(metadir, spack.store.layout.manifest_file_name) with open(manifest_file, 'w') as f: sjson.dump({filepath: data}, f) results = verify('-f', filepath, fail_on_error=False) print(results) assert not results os.utime(filepath, (0, 0)) with open(filepath, 'w') as f: f.write("I changed.") results = verify('-f', filepath, fail_on_error=False) expected = ['hash'] mtime = os.stat(filepath).st_mtime if mtime != data['time']: expected.append('mtime') assert results assert filepath in results assert all(x in results for x in expected) results = verify('-fj', filepath, fail_on_error=False) res = sjson.load(results) assert len(res) == 1 errors = res.pop(filepath) assert sorted(errors) == sorted(expected)
def _write_section(self, section): data_to_write = self._raw_data # If there is no existing data, this section SingleFileScope has never # been written to disk. We need to construct the portion of the data # from the root of self._raw_data to the level at which the config # sections are defined. That requires creating keys for every entry in # self.yaml_path if not data_to_write: data_to_write = {} # reverse because we construct it from the inside out for key in reversed(self.yaml_path): data_to_write = {key: data_to_write} # data_update_pointer is a pointer to the part of data_to_write # that we are currently updating. # We start by traversing into the data to the point at which the # config sections are defined. This means popping the keys from # self.yaml_path data_update_pointer = data_to_write for key in self.yaml_path: data_update_pointer = data_update_pointer[key] # For each section, update the data at the level of our pointer # with the data from the section for key, data in self.sections.items(): data_update_pointer[key] = data[key] validate(data_to_write, self.schema) try: parent = os.path.dirname(self.path) mkdirp(parent) tmp = os.path.join(parent, '.%s.tmp' % os.path.basename(self.path)) with open(tmp, 'w') as f: syaml.dump_config(data_to_write, stream=f, default_flow_style=False) rename(tmp, self.path) except (yaml.YAMLError, IOError) as e: raise ConfigFileError("Error writing to config file: '%s'" % str(e))
def test_single_file_verification(tmpdir): # Test the API to verify a single file, including finding the package # to which it belongs filedir = os.path.join(str(tmpdir), 'a', 'b', 'c', 'd') filepath = os.path.join(filedir, 'file') metadir = os.path.join(str(tmpdir), spack.store.layout.metadata_dir) fs.mkdirp(filedir) fs.mkdirp(metadir) with open(filepath, 'w') as f: f.write("I'm a file") data = spack.verify.create_manifest_entry(filepath) manifest_file = os.path.join(metadir, spack.store.layout.manifest_file_name) with open(manifest_file, 'w') as f: sjson.dump({filepath: data}, f) results = spack.verify.check_file_manifest(filepath) assert not results.has_errors() os.utime(filepath, (0, 0)) with open(filepath, 'w') as f: f.write("I changed.") results = spack.verify.check_file_manifest(filepath) expected = ['hash'] mtime = os.stat(filepath).st_mtime if mtime != data['time']: expected.append('mtime') assert results.has_errors() assert filepath in results.errors assert sorted(results.errors[filepath]) == sorted(expected) shutil.rmtree(metadir) results = spack.verify.check_file_manifest(filepath) assert results.has_errors() assert results.errors[filepath] == ['not owned by any package']
def test_dir_manifest_entry(tmpdir): # Test that directories are properly checked against the manifest. # Test that the appropriate errors are generated when the check fails. dirent = str(tmpdir.join('dir')) fs.mkdirp(dirent) data = spack.verify.create_manifest_entry(dirent) assert data['type'] == 'dir' assert all(x in data for x in ('mode', 'owner', 'group')) results = spack.verify.check_entry(dirent, data) assert not results.has_errors() data['type'] = 'garbage' results = spack.verify.check_entry(dirent, data) assert results.has_errors() assert dirent in results.errors assert results.errors[dirent] == ['type']
def add_remote_packages(remote, exclude=[], nostack=False, hardlinks=False): """ Add all installed packages in `remote` to the packages dictionary. If nostack == True, packages will not be re-linked if they exist. If hardlinks == True, packages will be hard-linked. Not recommended! """ config = spack.config.get_config("config") # NOTE: This has to be kept in sync with spack/store.py! layout = spack.directory_layout.YamlDirectoryLayout( canonicalize_path(osp.join(remote, 'opt', 'spack')), hash_len=config.get('install_hash_length'), path_scheme=config.get('install_path_scheme')) num_packages = 0 for spec in filter_exclude(layout.all_specs(), exclude): src = layout.path_for_spec(spec) tgt = spack.store.layout.path_for_spec(spec) if osp.exists(tgt): if not (nostack or hardlinks): if osp.islink(tgt): os.remove(tgt) else: tty.warn("Cannot not stack {0} because {1} exists.".format( src, tgt)) continue else: tty.info("Not stacking {0} because already present.".format( src)) fs.mkdirp(osp.dirname(tgt)) tty.debug("Linking {0} -> {1}".format(src, tgt)) if not hardlinks: os.symlink(src, tgt) else: os.link(src, tgt) num_packages += 1 tty.info("Added {0} packages from {1}".format(num_packages, remote)) return num_packages
def test_pkg_add(mock_pkg_git_repo): with working_dir(mock_pkg_git_repo): mkdirp('pkg-e') with open('pkg-e/package.py', 'w') as f: f.write(pkg_template.format(name='PkgE')) pkg('add', 'pkg-e') git = which('git', required=True) with working_dir(mock_pkg_git_repo): try: assert ('A pkg-e/package.py' in git('status', '--short', output=str)) finally: shutil.rmtree('pkg-e') # Removing a package mid-run disrupts Spack's caching spack.repo.path.repos[0]._fast_package_checker.invalidate() with pytest.raises(spack.main.SpackCommandError): pkg('add', 'does-not-exist')