def test_restage(self): stage = Stage(archive_url, name=stage_name) stage.fetch() stage.expand_archive() stage.chdir_to_archive() self.check_expand_archive(stage, stage_name) self.check_chdir_to_archive(stage, stage_name) # Try to make a file in the old archive dir with closing(open('foobar', 'w')) as file: file.write("this file is to be destroyed.") self.assertTrue('foobar' in os.listdir(stage.expanded_archive_path)) # Make sure the file is not there after restage. stage.restage() self.check_chdir(stage, stage_name) self.check_fetch(stage, stage_name) stage.chdir_to_archive() self.check_chdir_to_archive(stage, stage_name) self.assertFalse('foobar' in os.listdir(stage.expanded_archive_path)) stage.destroy() self.check_destroy(stage, stage_name)
def test_setup_and_destroy_name_with_tmp(self): with use_tmp(True): stage = Stage(archive_url, name=stage_name) self.check_setup(stage, stage_name) stage.destroy() self.check_destroy(stage, stage_name)
def test_setup_and_destroy_no_name_without_tmp(self): with use_tmp(False): stage = Stage(archive_url) self.check_setup(stage, None) stage.destroy() self.check_destroy(stage, None)
def checkSetupAndDestroy(self, stage_name=None): stage = Stage(archive_url, name=stage_name) stage.setup() self.check_setup(stage, stage_name) stage.destroy() self.check_destroy(stage, stage_name)
class InstallTest(MockPackagesTest): """Tests install and uninstall on a trivial package.""" def setUp(self): super(InstallTest, self).setUp() self.stage = Stage('not_a_real_url') archive_dir = join_path(self.stage.path, dir_name) dummy_configure = join_path(archive_dir, 'configure') mkdirp(archive_dir) with closing(open(dummy_configure, 'w')) as configure: configure.write( "#!/bin/sh\n" "prefix=$(echo $1 | sed 's/--prefix=//')\n" "cat > Makefile <<EOF\n" "all:\n" "\techo Building...\n\n" "install:\n" "\tmkdir -p $prefix\n" "\ttouch $prefix/dummy_file\n" "EOF\n") os.chmod(dummy_configure, 0755) with working_dir(self.stage.path): tar = which('tar') tar('-czf', archive_name, dir_name) # We use a fake pacakge, so skip the checksum. spack.do_checksum = False def tearDown(self): super(InstallTest, self).tearDown() if self.stage is not None: self.stage.destroy() # Turn checksumming back on spack.do_checksum = True def test_install_and_uninstall(self): # Get a basic concrete spec for the trivial install package. spec = Spec(install_test_package) spec.concretize() self.assertTrue(spec.concrete) # Get the package pkg = spack.db.get(spec) # Fake the URL for the package so it downloads from a file. archive_path = join_path(self.stage.path, archive_name) pkg.url = 'file://' + archive_path try: pkg.do_install() pkg.do_uninstall() except Exception, e: pkg.remove_prefix() raise
def test_chdir(self): stage = Stage(archive_url, name=stage_name) stage.chdir() self.check_setup(stage, stage_name) self.check_chdir(stage, stage_name) stage.destroy() self.check_destroy(stage, stage_name)
def test_expand_archive(self): stage = Stage(archive_url, name=stage_name) stage.fetch() self.check_setup(stage, stage_name) self.check_fetch(stage, stage_name) stage.expand_archive() self.check_expand_archive(stage, stage_name) stage.destroy() self.check_destroy(stage, stage_name)
class MockRepo(object): def __init__(self, stage_name, repo_name): """This creates a stage where some archive/repo files can be staged for testing spack's fetch strategies.""" # Stage where this repo has been created self.stage = Stage(stage_name) # Full path to the repo within the stage. self.path = join_path(self.stage.path, repo_name) mkdirp(self.path) def destroy(self): """Destroy resources associated with this mock repo.""" if self.stage: self.stage.destroy()
def stage(): """Creates a stage with the directory structure for the tests.""" s = Stage('link-tree-test') s.create() with working_dir(s.path): touchp('source/1') touchp('source/a/b/2') touchp('source/a/b/3') touchp('source/c/4') touchp('source/c/d/5') touchp('source/c/d/6') touchp('source/c/d/e/7') yield s s.destroy()
class LinkTreeTest(unittest.TestCase): """Tests Spack's LinkTree class.""" def setUp(self): self.stage = Stage('link-tree-test') with working_dir(self.stage.path): touchp('source/1') touchp('source/a/b/2') touchp('source/a/b/3') touchp('source/c/4') touchp('source/c/d/5') touchp('source/c/d/6') touchp('source/c/d/e/7') source_path = os.path.join(self.stage.path, 'source') self.link_tree = LinkTree(source_path) def tearDown(self): if self.stage: self.stage.destroy() def check_file_link(self, filename): self.assertTrue(os.path.isfile(filename)) self.assertTrue(os.path.islink(filename)) def check_dir(self, filename): self.assertTrue(os.path.isdir(filename)) def test_merge_to_new_directory(self): with working_dir(self.stage.path): self.link_tree.merge('dest') self.check_file_link('dest/1') self.check_file_link('dest/a/b/2') self.check_file_link('dest/a/b/3') self.check_file_link('dest/c/4') self.check_file_link('dest/c/d/5') self.check_file_link('dest/c/d/6') self.check_file_link('dest/c/d/e/7') self.link_tree.unmerge('dest') self.assertFalse(os.path.exists('dest')) def test_merge_to_existing_directory(self): with working_dir(self.stage.path): touchp('dest/x') touchp('dest/a/b/y') self.link_tree.merge('dest') self.check_file_link('dest/1') self.check_file_link('dest/a/b/2') self.check_file_link('dest/a/b/3') self.check_file_link('dest/c/4') self.check_file_link('dest/c/d/5') self.check_file_link('dest/c/d/6') self.check_file_link('dest/c/d/e/7') self.assertTrue(os.path.isfile('dest/x')) self.assertTrue(os.path.isfile('dest/a/b/y')) self.link_tree.unmerge('dest') self.assertTrue(os.path.isfile('dest/x')) self.assertTrue(os.path.isfile('dest/a/b/y')) self.assertFalse(os.path.isfile('dest/1')) self.assertFalse(os.path.isfile('dest/a/b/2')) self.assertFalse(os.path.isfile('dest/a/b/3')) self.assertFalse(os.path.isfile('dest/c/4')) self.assertFalse(os.path.isfile('dest/c/d/5')) self.assertFalse(os.path.isfile('dest/c/d/6')) self.assertFalse(os.path.isfile('dest/c/d/e/7')) def test_merge_with_empty_directories(self): with working_dir(self.stage.path): mkdirp('dest/f/g') mkdirp('dest/a/b/h') self.link_tree.merge('dest') self.link_tree.unmerge('dest') self.assertFalse(os.path.exists('dest/1')) self.assertFalse(os.path.exists('dest/a/b/2')) self.assertFalse(os.path.exists('dest/a/b/3')) self.assertFalse(os.path.exists('dest/c/4')) self.assertFalse(os.path.exists('dest/c/d/5')) self.assertFalse(os.path.exists('dest/c/d/6')) self.assertFalse(os.path.exists('dest/c/d/e/7')) self.assertTrue(os.path.isdir('dest/a/b/h')) self.assertTrue(os.path.isdir('dest/f/g')) def test_ignore(self): with working_dir(self.stage.path): touchp('source/.spec') touchp('dest/.spec') self.link_tree.merge('dest', ignore=lambda x: x == '.spec') self.link_tree.unmerge('dest', ignore=lambda x: x == '.spec') self.assertFalse(os.path.exists('dest/1')) self.assertFalse(os.path.exists('dest/a')) self.assertFalse(os.path.exists('dest/c')) self.assertTrue(os.path.isfile('source/.spec')) self.assertTrue(os.path.isfile('dest/.spec'))
def buildcache_sync(args): """ Syncs binaries (and associated metadata) from one mirror to another. Requires an active environment in order to know which specs to sync. Args: src (str): Source mirror URL dest (str): Destination mirror URL """ # Figure out the source mirror source_location = None if args.src_directory: source_location = args.src_directory scheme = url_util.parse(source_location, scheme='<missing>').scheme if scheme != '<missing>': raise ValueError( '"--src-directory" expected a local path; got a URL, instead') # Ensure that the mirror lookup does not mistake this for named mirror source_location = 'file://' + source_location elif args.src_mirror_name: source_location = args.src_mirror_name result = spack.mirror.MirrorCollection().lookup(source_location) if result.name == "<unnamed>": raise ValueError('no configured mirror named "{name}"'.format( name=source_location)) elif args.src_mirror_url: source_location = args.src_mirror_url scheme = url_util.parse(source_location, scheme='<missing>').scheme if scheme == '<missing>': raise ValueError( '"{url}" is not a valid URL'.format(url=source_location)) src_mirror = spack.mirror.MirrorCollection().lookup(source_location) src_mirror_url = url_util.format(src_mirror.fetch_url) # Figure out the destination mirror dest_location = None if args.dest_directory: dest_location = args.dest_directory scheme = url_util.parse(dest_location, scheme='<missing>').scheme if scheme != '<missing>': raise ValueError( '"--dest-directory" expected a local path; got a URL, instead') # Ensure that the mirror lookup does not mistake this for named mirror dest_location = 'file://' + dest_location elif args.dest_mirror_name: dest_location = args.dest_mirror_name result = spack.mirror.MirrorCollection().lookup(dest_location) if result.name == "<unnamed>": raise ValueError('no configured mirror named "{name}"'.format( name=dest_location)) elif args.dest_mirror_url: dest_location = args.dest_mirror_url scheme = url_util.parse(dest_location, scheme='<missing>').scheme if scheme == '<missing>': raise ValueError( '"{url}" is not a valid URL'.format(url=dest_location)) dest_mirror = spack.mirror.MirrorCollection().lookup(dest_location) dest_mirror_url = url_util.format(dest_mirror.fetch_url) # Get the active environment env = spack.cmd.require_active_env(cmd_name='buildcache sync') tty.msg('Syncing environment buildcache files from {0} to {1}'.format( src_mirror_url, dest_mirror_url)) build_cache_dir = bindist.build_cache_relative_path() buildcache_rel_paths = [] tty.debug('Syncing the following specs:') for s in env.all_specs(): tty.debug(' {0}{1}: {2}'.format('* ' if s in env.roots() else ' ', s.name, s.dag_hash())) buildcache_rel_paths.extend([ os.path.join(build_cache_dir, bindist.tarball_path_name(s, '.spack')), os.path.join(build_cache_dir, bindist.tarball_name(s, '.spec.yaml')), os.path.join(build_cache_dir, bindist.tarball_name(s, '.cdashid')) ]) tmpdir = tempfile.mkdtemp() try: for rel_path in buildcache_rel_paths: src_url = url_util.join(src_mirror_url, rel_path) local_path = os.path.join(tmpdir, rel_path) dest_url = url_util.join(dest_mirror_url, rel_path) tty.debug('Copying {0} to {1} via {2}'.format( src_url, dest_url, local_path)) stage = Stage(src_url, name="temporary_file", path=os.path.dirname(local_path), keep=True) try: stage.create() stage.fetch() web_util.push_to_url(local_path, dest_url, keep_original=True) except fs.FetchError as e: tty.debug( 'spack buildcache unable to sync {0}'.format(rel_path)) tty.debug(e) finally: stage.destroy() finally: shutil.rmtree(tmpdir)
digest = pkg.versions[version] stage.check(digest) tty.msg("Checksum passed for %s@%s" % (pkg.name, version)) # change back and move the new archive into place. os.chdir(working_dir) shutil.move(stage.archive_file, mirror_file) tty.msg("Added %s to mirror" % mirror_file) num_mirrored += 1 except Exception, e: tty.warn("Error while fetching %s." % url, e.message) num_error += 1 finally: stage.destroy() # If nothing happened, try to say why. if not num_mirrored: if num_error: tty.error("No packages added to mirror.", "All packages failed to fetch.") else: tty.error( "No packages added to mirror. No versions matched specs:") colify(args.specs, indent=4) def mirror(parser, args): action = { 'create': mirror_create,
digest = pkg.versions[version] stage.check(digest) tty.msg("Checksum passed for %s@%s" % (pkg.name, version)) # change back and move the new archive into place. os.chdir(working_dir) shutil.move(stage.archive_file, mirror_file) tty.msg("Added %s to mirror" % mirror_file) num_mirrored += 1 except Exception, e: tty.warn("Error while fetching %s." % url, e.message) num_error += 1 finally: stage.destroy() # If nothing happened, try to say why. if not num_mirrored: if num_error: tty.error("No packages added to mirror.", "All packages failed to fetch.") else: tty.error("No packages added to mirror. No versions matched specs:") colify(args.specs, indent=4) def mirror(parser, args): action = { 'create' : mirror_create, 'add' : mirror_add, 'remove' : mirror_remove,
class InstallTest(MockPackagesTest): """Tests install and uninstall on a trivial package.""" def setUp(self): super(InstallTest, self).setUp() self.stage = Stage('not_a_real_url') archive_dir = join_path(self.stage.path, dir_name) dummy_configure = join_path(archive_dir, 'configure') mkdirp(archive_dir) with closing(open(dummy_configure, 'w')) as configure: configure.write("#!/bin/sh\n" "prefix=$(echo $1 | sed 's/--prefix=//')\n" "cat > Makefile <<EOF\n" "all:\n" "\techo Building...\n\n" "install:\n" "\tmkdir -p $prefix\n" "\ttouch $prefix/dummy_file\n" "EOF\n") os.chmod(dummy_configure, 0755) with working_dir(self.stage.path): tar = which('tar') tar('-czf', archive_name, dir_name) # We use a fake package, so skip the checksum. spack.do_checksum = False # Use a fake install directory to avoid conflicts bt/w # installed pkgs and mock packages. self.tmpdir = tempfile.mkdtemp() self.orig_layout = spack.install_layout spack.install_layout = SpecHashDirectoryLayout(self.tmpdir) def tearDown(self): super(InstallTest, self).tearDown() if self.stage is not None: self.stage.destroy() # Turn checksumming back on spack.do_checksum = True # restore spack's layout. spack.install_layout = self.orig_layout shutil.rmtree(self.tmpdir, ignore_errors=True) def test_install_and_uninstall(self): # Get a basic concrete spec for the trivial install package. spec = Spec(install_test_package) spec.concretize() self.assertTrue(spec.concrete) # Get the package pkg = spack.db.get(spec) # Fake the URL for the package so it downloads from a file. archive_path = join_path(self.stage.path, archive_name) pkg.url = 'file://' + archive_path try: pkg.do_install() pkg.do_uninstall() except Exception, e: pkg.remove_prefix() raise