def expand(self): tty.msg("Staging archive: %s" % self.archive_file) self.stage.chdir() if not self.archive_file: raise NoArchiveFileError("URLFetchStrategy couldn't find archive file", "Failed on expand() for URL %s" % self.url) decompress = decompressor_for(self.archive_file) # Expand all tarballs in their own directory to contain # exploding tarballs. tarball_container = os.path.join(self.stage.path, "spack-expanded-archive") mkdirp(tarball_container) os.chdir(tarball_container) decompress(self.archive_file) # If the tarball *didn't* explode, move # the expanded directory up & remove the protector directory. files = os.listdir(tarball_container) if len(files) == 1: expanded_dir = os.path.join(tarball_container, files[0]) if os.path.isdir(expanded_dir): shutil.move(expanded_dir, self.stage.path) os.rmdir(tarball_container) # Set the wd back to the stage when done. self.stage.chdir()
def expand(self): tty.msg("Staging archive: %s" % self.archive_file) self.stage.chdir() if not self.archive_file: raise NoArchiveFileError( "URLFetchStrategy couldn't find archive file", "Failed on expand() for URL %s" % self.url) decompress = decompressor_for(self.archive_file) # Expand all tarballs in their own directory to contain # exploding tarballs. tarball_container = os.path.join(self.stage.path, "spack-expanded-archive") mkdirp(tarball_container) os.chdir(tarball_container) decompress(self.archive_file) # If the tarball *didn't* explode, move # the expanded directory up & remove the protector directory. files = os.listdir(tarball_container) if len(files) == 1: expanded_dir = os.path.join(tarball_container, files[0]) if os.path.isdir(expanded_dir): shutil.move(expanded_dir, self.stage.path) os.rmdir(tarball_container) # Set the wd back to the stage when done. self.stage.chdir()
def check_mirror(self): stage = Stage('spack-mirror-test') mirror_root = join_path(stage.path, 'test-mirror') try: os.chdir(stage.path) spack.mirror.create( mirror_root, self.repos, no_checksum=True) # Stage directory exists self.assertTrue(os.path.isdir(mirror_root)) # subdirs for each package for name in self.repos: subdir = join_path(mirror_root, name) self.assertTrue(os.path.isdir(subdir)) files = os.listdir(subdir) self.assertEqual(len(files), 1) # Decompress archive in the mirror archive = files[0] archive_path = join_path(subdir, archive) decomp = decompressor_for(archive_path) with working_dir(subdir): decomp(archive_path) # Find the untarred archive directory. files = os.listdir(subdir) self.assertEqual(len(files), 2) self.assertTrue(archive in files) files.remove(archive) expanded_archive = join_path(subdir, files[0]) self.assertTrue(os.path.isdir(expanded_archive)) # Compare the original repo with the expanded archive repo = self.repos[name] if not 'svn' in name: original_path = repo.path else: co = 'checked_out' svn('checkout', repo.url, co) original_path = join_path(subdir, co) dcmp = dircmp(original_path, expanded_archive) # make sure there are no new files in the expanded tarball self.assertFalse(dcmp.right_only) self.assertTrue(all(l in exclude for l in dcmp.left_only)) finally: stage.destroy()
def expand_archive(self): """Changes to the stage directory and attempt to expand the downloaded archive. Fail if the stage is not set up or if the archive is not yet downloaded. """ self.chdir() if not self.archive_file: tty.die("Attempt to expand archive before fetching.") decompress = decompressor_for(self.archive_file) decompress(self.archive_file)
def check_mirror(self): stage = Stage('spack-mirror-test') mirror_root = join_path(stage.path, 'test-mirror') try: os.chdir(stage.path) spack.mirror.create( mirror_root, self.repos, no_checksum=True) # Stage directory exists self.assertTrue(os.path.isdir(mirror_root)) # subdirs for each package for name in self.repos: subdir = join_path(mirror_root, name) self.assertTrue(os.path.isdir(subdir)) files = os.listdir(subdir) self.assertEqual(len(files), 1) # Decompress archive in the mirror archive = files[0] archive_path = join_path(subdir, archive) decomp = decompressor_for(archive_path) with working_dir(subdir): decomp(archive_path) # Find the untarred archive directory. files = os.listdir(subdir) self.assertEqual(len(files), 2) self.assertTrue(archive in files) files.remove(archive) expanded_archive = join_path(subdir, files[0]) self.assertTrue(os.path.isdir(expanded_archive)) # Compare the original repo with the expanded archive repo = self.repos[name] if not 'svn' in name: original_path = repo.path else: co = 'checked_out' svn('checkout', repo.url, co) original_path = join_path(subdir, co) dcmp = dircmp(original_path, expanded_archive) # make sure there are no new files in the expanded tarball self.assertFalse(dcmp.right_only) self.assertTrue(all(l in exclude for l in dcmp.left_only)) finally: pass #stage.destroy()
def test_native_unpacking(tmpdir_factory, archive_file): extension = scomp.extension(archive_file) util = scomp.decompressor_for(archive_file, extension) tmpdir = tmpdir_factory.mktemp("comp_test") with working_dir(str(tmpdir)): assert not os.listdir(os.getcwd()) util(archive_file) files = os.listdir(os.getcwd()) assert len(files) == 1 with open(files[0], 'r') as f: contents = f.read() assert 'TEST' in contents
def expand(self): if not self.expand_archive: tty.msg("Skipping expand step for %s" % self.archive_file) return tty.msg("Staging archive: %s" % self.archive_file) if not self.archive_file: raise NoArchiveFileError( "Couldn't find archive file", "Failed on expand() for URL %s" % self.url) if not self.extension: self.extension = extension(self.archive_file) decompress = decompressor_for(self.archive_file, self.extension) # Expand all tarballs in their own directory to contain # exploding tarballs. tarball_container = os.path.join(self.stage.path, "spack-expanded-archive") mkdirp(tarball_container) with working_dir(tarball_container): decompress(self.archive_file) # Check for an exploding tarball, i.e. one that doesn't expand # to a single directory. If the tarball *didn't* explode, # move contents up & remove the container directory. # # NOTE: The tar program on Mac OS X will encode HFS metadata # in hidden files, which can end up *alongside* a single # top-level directory. We ignore hidden files to accomodate # these "semi-exploding" tarballs. files = os.listdir(tarball_container) non_hidden = [f for f in files if not f.startswith('.')] if len(non_hidden) == 1: expanded_dir = os.path.join(tarball_container, non_hidden[0]) if os.path.isdir(expanded_dir): for f in files: shutil.move(os.path.join(tarball_container, f), os.path.join(self.stage.path, f)) os.rmdir(tarball_container) if not files: os.rmdir(tarball_container)
def expand(self): if not self.expand_archive: tty.msg("Skipping expand step for %s" % self.archive_file) return tty.msg("Staging archive: %s" % self.archive_file) self.stage.chdir() if not self.archive_file: raise NoArchiveFileError( "Couldn't find archive file", "Failed on expand() for URL %s" % self.url) if not self.extension: self.extension = extension(self.archive_file) decompress = decompressor_for(self.archive_file, self.extension) # Expand all tarballs in their own directory to contain # exploding tarballs. tarball_container = os.path.join(self.stage.path, "spack-expanded-archive") mkdirp(tarball_container) os.chdir(tarball_container) decompress(self.archive_file) # Check for an exploding tarball, i.e. one that doesn't expand # to a single directory. If the tarball *didn't* explode, # move contents up & remove the container directory. # # NOTE: The tar program on Mac OS X will encode HFS metadata # in hidden files, which can end up *alongside* a single # top-level directory. We ignore hidden files to accomodate # these "semi-exploding" tarballs. files = os.listdir(tarball_container) non_hidden = [f for f in files if not f.startswith('.')] if len(non_hidden) == 1: expanded_dir = os.path.join(tarball_container, non_hidden[0]) if os.path.isdir(expanded_dir): for f in files: shutil.move(os.path.join(tarball_container, f), os.path.join(self.stage.path, f)) os.rmdir(tarball_container) if not files: os.rmdir(tarball_container) # Set the wd back to the stage when done. self.stage.chdir()
def expand(self): if not self.expand_archive: tty.msg("Staging unexpanded archive %s in %s" % (self.archive_file, self.stage.source_path)) if not self.stage.expanded: mkdirp(self.stage.source_path) dest = os.path.join(self.stage.source_path, os.path.basename(self.archive_file)) # if the archive is a symlink itself, copy the target because # otherwise the symlink target might get modified by # staging-operations if os.path.islink(self.archive_file): shutil.copy(self.archive_file, dest) else: shutil.move(self.archive_file, dest) return tty.msg("Staging archive: %s" % self.archive_file) if not self.archive_file: raise NoArchiveFileError( "Couldn't find archive file", "Failed on expand() for URL %s" % self.url) if not self.extension: self.extension = extension(self.archive_file) if self.stage.expanded: tty.debug('Source already staged to %s' % self.stage.source_path) return decompress = decompressor_for(self.archive_file, self.extension) # Expand all tarballs in their own directory to contain # exploding tarballs. tarball_container = os.path.join(self.stage.path, "spack-expanded-archive") mkdirp(tarball_container) with working_dir(tarball_container): decompress(self.archive_file) # Check for an exploding tarball, i.e. one that doesn't expand to # a single directory. If the tarball *didn't* explode, move its # contents to the staging source directory & remove the container # directory. If the tarball did explode, just rename the tarball # directory to the staging source directory. # # NOTE: The tar program on Mac OS X will encode HFS metadata in # hidden files, which can end up *alongside* a single top-level # directory. We initially ignore presence of hidden files to # accomodate these "semi-exploding" tarballs but ensure the files # are copied to the source directory. files = os.listdir(tarball_container) non_hidden = [f for f in files if not f.startswith('.')] if len(non_hidden) == 1: src = os.path.join(tarball_container, non_hidden[0]) if os.path.isdir(src): self.stage.srcdir = non_hidden[0] shutil.move(src, self.stage.source_path) if len(files) > 1: files.remove(non_hidden[0]) for f in files: src = os.path.join(tarball_container, f) dest = os.path.join(self.stage.path, f) shutil.move(src, dest) os.rmdir(tarball_container) else: # This is a non-directory entry (e.g., a patch file) so simply # rename the tarball container to be the source path. shutil.move(tarball_container, self.stage.source_path) else: shutil.move(tarball_container, self.stage.source_path)
def test_unallowed_extension(): bad_ext_archive = 'Foo.py' with pytest.raises(CommandNotFoundError): scomp.decompressor_for(bad_ext_archive, 'py')