def clean(parser, args): if not args.packages: tty.die("spack clean requires at least one package argument") specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: tty.msg("Cleaning for spec:", spec) package = packages.get(spec.name) if args.dist: package.do_clean_dist() elif args.work: package.do_clean_work() else: package.do_clean()
def run(names, verbose=False): """Run tests with the supplied names. Names should be a list. If it's empty, run ALL of Spack's tests.""" verbosity = 1 if not verbose else 2 if not names: names = test_names else: for test in names: if test not in test_names: tty.error("%s is not a valid spack test name." % test, "Valid names are:") colify(test_names, indent=4) sys.exit(1) runner = unittest.TextTestRunner(verbosity=verbosity) testsRun = errors = failures = skipped = 0 for test in names: module = 'spack.test.' + test print module suite = unittest.defaultTestLoader.loadTestsFromName(module) tty.msg("Running test: %s" % test) result = runner.run(suite) testsRun += result.testsRun errors += len(result.errors) failures += len(result.failures) skipped += len(result.skipped) succeeded = not errors and not failures tty.msg("Tests Complete.", "%5d tests run" % testsRun, "%5d skipped" % skipped, "%5d failures" % failures, "%5d errors" % errors) if not errors and not failures: tty.info("OK", format='g') else: tty.info("FAIL", format='r') sys.exit(1)
def get_checksums(versions, urls, **kwargs): # Allow commands like create() to do some analysis on the first # archive after it is downloaded. first_stage_function = kwargs.get('first_stage_function', None) tty.msg("Downloading...") hashes = [] for i, (url, version) in enumerate(zip(urls, versions)): stage = Stage(url) try: stage.fetch() if i == 0 and first_stage_function: first_stage_function(stage) hashes.append( spack.util.crypto.checksum(hashlib.md5, stage.archive_file)) except FailedDownloadError, e: tty.msg("Failed to fetch %s" % url) continue finally:
def fetch(self): """Downloads the file at URL to the stage. Returns true if it was downloaded, false if it already existed.""" self.chdir() if self.archive_file: tty.msg("Already downloaded %s." % self.archive_file) else: urls = [self.url] if self.mirror_path: urls += ["%s/%s" % (m, self.mirror_path) for m in spack.mirrors] for url in urls: tty.msg("Trying to fetch from %s" % url) self.fetch_from_url(url) if self.archive_file: break if not self.archive_file: raise FailedDownloadError(url) return self.archive_file
def fetch(self): """Downloads the file at URL to the stage. Returns true if it was downloaded, false if it already existed.""" self.chdir() if self.archive_file: tty.msg("Already downloaded %s." % self.archive_file) else: tty.msg("Fetching %s" % self.url) try: # Run curl but grab the mime type from the http headers headers = spack.curl('-#', # status bar '-O', # save file to disk '-D', '-', # print out HTML headers '-L', self.url, return_output=True) except: # clean up archive on failure. if self.archive_file: os.remove(self.archive_file) raise # Check if we somehow got an HTML file rather than the archive we # asked for. We only look at the last content type, to handle # redirects properly. content_types = re.findall(r'Content-Type:[^\r\n]+', headers) if content_types and 'text/html' in content_types[-1]: tty.warn("The contents of " + self.archive_file + " look like HTML.", "The checksum will likely be bad. If it is, you can use", "'spack clean --all' to remove the bad archive, then fix", "your internet gateway issue and install again.") if not self.archive_file: raise FailedDownloadError(url) return self.archive_file
def checksum(parser, args): # get the package we're going to generate checksums for pkg = packages.get(args.package) # If the user asked for specific versions, use those. versions = [ver(v) for v in args.versions] if not all(type(v) == Version for v in versions): tty.die("Cannot generate checksums for version lists or " + "version ranges. Use unambiguous versions.") if not versions: versions = pkg.fetch_available_versions() if not versions: tty.die("Could not fetch any available versions for %s." % pkg.name) versions = list(reversed(versions)) urls = [pkg.url_for_version(v) for v in versions] tty.msg("Found %s versions of %s." % (len(urls), pkg.name), *spack.cmd.elide_list( ["%-10s%s" % (v,u) for v, u in zip(versions, urls)])) print archives_to_fetch = tty.get_number( "How many would you like to checksum?", default=5, abort='q') if not archives_to_fetch: tty.msg("Aborted.") return version_hashes = get_checksums( versions[:archives_to_fetch], urls[:archives_to_fetch]) if not version_hashes: tty.die("Could not fetch any available versions for %s." % pkg.name) dict_string = [" '%s' : '%s'," % (v, h) for v, h in version_hashes] dict_string = ['{'] + dict_string + ["}"] tty.msg("Checksummed new versions of %s:" % pkg.name, *dict_string)
def bootstrap(parser, args): origin_url = get_origin_url() prefix = args.prefix tty.msg("Fetching spack from origin: %s" % origin_url) if os.path.exists(new_path(prefix, ".git")): tty.die("There already seems to be a git repository in %s" % prefix) files_in_the_way = os.listdir(prefix) if files_in_the_way: tty.die("There are already files there! Delete these files before boostrapping spack.", *files_in_the_way) tty.msg("Installing:", "%s/bin/spack" % prefix, "%s/lib/spack/..." % prefix) os.chdir(prefix) check_call(["git", "init", "--shared", "-q"]) check_call(["git", "remote", "add", "origin", origin_url]) check_call(["git", "fetch", "origin", "master:refs/remotes/origin/master", "-n", "-q"]) check_call(["git", "reset", "--hard", "origin/master", "-q"]) tty.msg("Successfully created a new spack in %s" % prefix, "Run %s/bin/spack to use this installation." % prefix)
def create(parser, args): url = args.url # Try to deduce name and version of the new package from the URL name, version = spack.url.parse_name_and_version(url) if not name: tty.msg("Couldn't guess a name for this package.") while not name: new_name = raw_input("Name: ") if packages.valid_name(name): name = new_name else: print "Package name can only contain A-Z, a-z, 0-9, '_' and '-'" if not version: tty.die("Couldn't guess a version string from %s." % url) tty.msg("Creating template for package %s" % name) pkg_path = packages.filename_for_package_name(name) if os.path.exists(pkg_path) and not args.force: tty.die("%s already exists." % pkg_path) class_name = packages.class_name_for_package_name(name) versions = list(reversed(spack.package.find_versions_of_archive(url))) archives_to_fetch = 1 if not versions: # If the fetch failed for some reason, revert to what the user provided versions = [version] urls = [url] else: urls = [spack.url.substitute_version(url, v) for v in versions] if len(urls) > 1: tty.msg("Found %s versions of %s." % (len(urls), name), *spack.cmd.elide_list( ["%-10s%s" % (v,u) for v, u in zip(versions, urls)])) print archives_to_fetch = tty.get_number( "Include how many checksums in the package file?", default=5, abort='q') if not archives_to_fetch: tty.msg("Aborted.") return guesser = ConfigureGuesser() ver_hash_tuples = spack.cmd.checksum.get_checksums( versions[:archives_to_fetch], urls[:archives_to_fetch], first_stage_function=guesser) if not ver_hash_tuples: tty.die("Could not fetch any tarballs for %s." % name) # Write out a template for the file with closing(open(pkg_path, "w")) as pkg_file: pkg_file.write( package_template.substitute( name=name, configure=guesser.configure, class_name=class_name, url=url, versions=make_version_dict(ver_hash_tuples))) # If everything checks out, go ahead and edit. spack.editor(pkg_path) tty.msg("Created package %s." % pkg_path)
def compilers(parser, args): tty.msg("Supported compilers") colify(spack.compilers.supported_compilers(), indent=4)