def test_restage(self): stage = Stage(archive_url, name=stage_name) stage.fetch() stage.expand_archive() stage.chdir_to_archive() self.check_expand_archive(stage, stage_name) self.check_chdir_to_archive(stage, stage_name) # Try to make a file in the old archive dir with closing(open('foobar', 'w')) as file: file.write("this file is to be destroyed.") self.assertTrue('foobar' in os.listdir(stage.expanded_archive_path)) # Make sure the file is not there after restage. stage.restage() self.check_chdir(stage, stage_name) self.check_fetch(stage, stage_name) stage.chdir_to_archive() self.check_chdir_to_archive(stage, stage_name) self.assertFalse('foobar' in os.listdir(stage.expanded_archive_path)) stage.destroy() self.check_destroy(stage, stage_name)
def test_fetch(self): stage = Stage(archive_url, name=stage_name) stage.fetch() self.check_setup(stage, stage_name) self.check_chdir(stage, stage_name) self.check_fetch(stage, stage_name) stage.destroy() self.check_destroy(stage, stage_name)
def test_expand_archive(self): stage = Stage(archive_url, name=stage_name) stage.fetch() self.check_setup(stage, stage_name) self.check_fetch(stage, stage_name) stage.expand_archive() self.check_expand_archive(stage, stage_name) stage.destroy() self.check_destroy(stage, stage_name)
class InstallTest(unittest.TestCase): """Tests the configure guesser in spack create""" def setUp(self): self.tar = which('tar') self.tmpdir = tempfile.mkdtemp() self.orig_dir = os.getcwd() os.chdir(self.tmpdir) self.stage = None def tearDown(self): shutil.rmtree(self.tmpdir, ignore_errors=True) if self.stage: self.stage.destroy() os.chdir(self.orig_dir) def check_archive(self, filename, system): mkdirp('archive') touch(join_path('archive', filename)) self.tar('czf', 'archive.tar.gz', 'archive') url = 'file://' + join_path(os.getcwd(), 'archive.tar.gz') print url self.stage = Stage(url) self.stage.fetch() guesser = ConfigureGuesser() guesser(self.stage) self.assertEqual(system, guesser.build_system) def test_python(self): self.check_archive('setup.py', 'python') def test_autotools(self): self.check_archive('configure', 'autotools') def test_cmake(self): self.check_archive('CMakeLists.txt', 'cmake') def test_unknown(self): self.check_archive('foobar', 'unknown')
def download_tarball(spec): """ Download binary tarball for given package into stage area Return True if successful """ mirrors = spack.config.get('mirrors') if len(mirrors) == 0: tty.die("Please add a spack mirror to allow " + "download of pre-compiled packages.") tarball = tarball_path_name(spec, '.spack') for key in mirrors: url = mirrors[key] + "/build_cache/" + tarball # stage the tarball into standard place stage = Stage(url, name="build_cache", keep=True) try: stage.fetch() return stage.save_filename except fs.FetchError: continue return None
def get_checksums(versions, urls, **kwargs): # Allow commands like create() to do some analysis on the first # archive after it is downloaded. first_stage_function = kwargs.get('first_stage_function', None) tty.msg("Downloading...") hashes = [] for i, (url, version) in enumerate(zip(urls, versions)): stage = Stage(url) try: stage.fetch() if i == 0 and first_stage_function: first_stage_function(stage) hashes.append( spack.util.crypto.checksum(hashlib.md5, stage.archive_file)) except FailedDownloadError, e: tty.msg("Failed to fetch %s" % url) continue finally:
def install(self, spec, prefix): build_dir = join_path(self.stage.source_path, "builddir") os.mkdir(build_dir) stage1 = Stage("https://github.com/ncbi/ngs/archive/1.1.3.tar.gz") # Download tarball. fetch() doesn't work with git url? stage1.fetch() stage1.expand_archive() with working_dir(stage1.source_path): ngs_build = join_path(stage1.source_path,"builddir") os.mkdir(ngs_build) configure("--build-prefix="+ ngs_build, "--prefix=" + prefix) make("ngs-sdk") make("ngs-python") make("ngs-java") configure("--build-prefix=" + ngs_build, "--with-ngs-sdk=" + prefix, "--prefix=" + prefix) make("ngs-bam") make("install") stage2 = Stage("https://github.com/ncbi/ncbi-vdb/archive/2.5.2.tar.gz") stage2.fetch() stage2.expand_archive() with working_dir(stage2.source_path): vdb_build = join_path(stage2.source_path,"builddir") os.mkdir(vdb_build) configure("--build-prefix=" + vdb_build,"--with-ngs-sdk-prefix=" + prefix, "--with-ngs-java-prefix=" + prefix, "--prefix=" + prefix) make() make("install") # sra-tools build block with working_dir(self.stage.source_path): configure("--build-prefix=" + build_dir, "--with-ngs-sdk-prefix=" + prefix, "--with-ncbi-vdb-build=" + vdb_build, "--with-ncbi-vdb-sources=" + stage2.source_path, "--prefix=" + prefix) make() make("install")
def _download_buildcache_entry(mirror_root, descriptions): for description in descriptions: description_url = os.path.join(mirror_root, description['url']) path = description['path'] fail_if_missing = description['required'] mkdirp(path) stage = Stage(description_url, name="build_cache", path=path, keep=True) try: stage.fetch() except fs.FetchError as e: tty.debug(e) if fail_if_missing: tty.error('Failed to download required url {0}'.format( description_url)) return False return True
def download_tarball(spec): """ Download binary tarball for given package into stage area Return True if successful """ if not spack.mirror.MirrorCollection(): tty.die("Please add a spack mirror to allow " + "download of pre-compiled packages.") tarball = tarball_path_name(spec, '.spack') for mirror in spack.mirror.MirrorCollection().values(): url = url_util.join(mirror.fetch_url, _build_cache_relative_path, tarball) # stage the tarball into standard place stage = Stage(url, name="build_cache", keep=True) try: stage.fetch() return stage.save_filename except fs.FetchError: continue return None
def buildcache_sync(args): """ Syncs binaries (and associated metadata) from one mirror to another. Requires an active environment in order to know which specs to sync. Args: src (str): Source mirror URL dest (str): Destination mirror URL """ # Figure out the source mirror source_location = None if args.src_directory: source_location = args.src_directory scheme = url_util.parse(source_location, scheme='<missing>').scheme if scheme != '<missing>': raise ValueError( '"--src-directory" expected a local path; got a URL, instead') # Ensure that the mirror lookup does not mistake this for named mirror source_location = 'file://' + source_location elif args.src_mirror_name: source_location = args.src_mirror_name result = spack.mirror.MirrorCollection().lookup(source_location) if result.name == "<unnamed>": raise ValueError('no configured mirror named "{name}"'.format( name=source_location)) elif args.src_mirror_url: source_location = args.src_mirror_url scheme = url_util.parse(source_location, scheme='<missing>').scheme if scheme == '<missing>': raise ValueError( '"{url}" is not a valid URL'.format(url=source_location)) src_mirror = spack.mirror.MirrorCollection().lookup(source_location) src_mirror_url = url_util.format(src_mirror.fetch_url) # Figure out the destination mirror dest_location = None if args.dest_directory: dest_location = args.dest_directory scheme = url_util.parse(dest_location, scheme='<missing>').scheme if scheme != '<missing>': raise ValueError( '"--dest-directory" expected a local path; got a URL, instead') # Ensure that the mirror lookup does not mistake this for named mirror dest_location = 'file://' + dest_location elif args.dest_mirror_name: dest_location = args.dest_mirror_name result = spack.mirror.MirrorCollection().lookup(dest_location) if result.name == "<unnamed>": raise ValueError('no configured mirror named "{name}"'.format( name=dest_location)) elif args.dest_mirror_url: dest_location = args.dest_mirror_url scheme = url_util.parse(dest_location, scheme='<missing>').scheme if scheme == '<missing>': raise ValueError( '"{url}" is not a valid URL'.format(url=dest_location)) dest_mirror = spack.mirror.MirrorCollection().lookup(dest_location) dest_mirror_url = url_util.format(dest_mirror.fetch_url) # Get the active environment env = spack.cmd.require_active_env(cmd_name='buildcache sync') tty.msg('Syncing environment buildcache files from {0} to {1}'.format( src_mirror_url, dest_mirror_url)) build_cache_dir = bindist.build_cache_relative_path() buildcache_rel_paths = [] tty.debug('Syncing the following specs:') for s in env.all_specs(): tty.debug(' {0}{1}: {2}'.format('* ' if s in env.roots() else ' ', s.name, s.dag_hash())) buildcache_rel_paths.extend([ os.path.join(build_cache_dir, bindist.tarball_path_name(s, '.spack')), os.path.join(build_cache_dir, bindist.tarball_name(s, '.spec.yaml')), os.path.join(build_cache_dir, bindist.tarball_name(s, '.cdashid')) ]) tmpdir = tempfile.mkdtemp() try: for rel_path in buildcache_rel_paths: src_url = url_util.join(src_mirror_url, rel_path) local_path = os.path.join(tmpdir, rel_path) dest_url = url_util.join(dest_mirror_url, rel_path) tty.debug('Copying {0} to {1} via {2}'.format( src_url, dest_url, local_path)) stage = Stage(src_url, name="temporary_file", path=os.path.dirname(local_path), keep=True) try: stage.create() stage.fetch() web_util.push_to_url(local_path, dest_url, keep_original=True) except fs.FetchError as e: tty.debug( 'spack buildcache unable to sync {0}'.format(rel_path)) tty.debug(e) finally: stage.destroy() finally: shutil.rmtree(tmpdir)
mirror_path = "%s/%s-%s.%s" % ( pkg.name, pkg.name, version, extension(pkg.url)) os.chdir(working_dir) mirror_file = join_path(args.directory, mirror_path) if os.path.exists(mirror_file): tty.msg("Already fetched %s." % mirror_file) num_mirrored += 1 continue # Get the URL for the version and set up a stage to download it. url = pkg.url_for_version(version) stage = Stage(url) try: # fetch changes directory into the stage stage.fetch() if not args.no_checksum and version in pkg.versions: digest = pkg.versions[version] stage.check(digest) tty.msg("Checksum passed for %s@%s" % (pkg.name, version)) # change back and move the new archive into place. os.chdir(working_dir) shutil.move(stage.archive_file, mirror_file) tty.msg("Added %s to mirror" % mirror_file) num_mirrored += 1 except Exception, e: tty.warn("Error while fetching %s." % url, e.message) num_error += 1
mirror_path = "%s/%s-%s.%s" % (pkg.name, pkg.name, version, extension(pkg.url)) os.chdir(working_dir) mirror_file = join_path(args.directory, mirror_path) if os.path.exists(mirror_file): tty.msg("Already fetched %s." % mirror_file) num_mirrored += 1 continue # Get the URL for the version and set up a stage to download it. url = pkg.url_for_version(version) stage = Stage(url) try: # fetch changes directory into the stage stage.fetch() if not args.no_checksum and version in pkg.versions: digest = pkg.versions[version] stage.check(digest) tty.msg("Checksum passed for %s@%s" % (pkg.name, version)) # change back and move the new archive into place. os.chdir(working_dir) shutil.move(stage.archive_file, mirror_file) tty.msg("Added %s to mirror" % mirror_file) num_mirrored += 1 except Exception, e: tty.warn("Error while fetching %s." % url, e.message) num_error += 1