def _print_ref_counts(): """Print out all ref counts for the graph used here, for debugging""" recs = [] def add_rec(spec): cspecs = spack.installed_db.query(spec, installed=any) if not cspecs: recs.append("[ %-7s ] %-20s-" % ('', spec)) else: key = cspecs[0].dag_hash() rec = spack.installed_db.get_record(cspecs[0]) recs.append("[ %-7s ] %-20s%d" % (key[:7], spec, rec.ref_count)) with spack.installed_db.read_transaction(): add_rec('mpileaks ^mpich') add_rec('callpath ^mpich') add_rec('mpich') add_rec('mpileaks ^mpich2') add_rec('callpath ^mpich2') add_rec('mpich2') add_rec('mpileaks ^zmpi') add_rec('callpath ^zmpi') add_rec('zmpi') add_rec('fake') add_rec('dyninst') add_rec('libdwarf') add_rec('libelf') colify(recs, cols=3)
def versions(parser, args): pkg = spack.repo.get(args.package) tty.msg('Safe versions (already checksummed):') safe_versions = pkg.versions if not safe_versions: print(' Found no versions for {0}'.format(pkg.name)) tty.debug('Manually add versions to the package.') else: colify(sorted(safe_versions, reverse=True), indent=2) tty.msg('Remote versions (not yet checksummed):') fetched_versions = pkg.fetch_remote_versions() remote_versions = set(fetched_versions).difference(safe_versions) if not remote_versions: if not fetched_versions: print(' Found no versions for {0}'.format(pkg.name)) tty.debug('Check the list_url and list_depth attributes of the ' 'package to help Spack find versions.') else: print(' Found no unchecksummed versions for {0}'.format(pkg.name)) else: colify(sorted(remote_versions, reverse=True), indent=2)
def do_list(args, unknown_args): """Print a lists of tests than what pytest offers.""" # Run test collection and get the tree out. old_output = sys.stdout try: sys.stdout = output = StringIO() pytest.main(['--collect-only']) finally: sys.stdout = old_output # put the output in a more readable tree format. lines = output.getvalue().split('\n') output_lines = [] for line in lines: match = re.match(r"(\s*)<([^ ]*) '([^']*)'", line) if not match: continue indent, nodetype, name = match.groups() # only print top-level for short list if args.list: if not indent: output_lines.append( os.path.basename(name).replace('.py', '')) else: print(indent + name) if args.list: colify(output_lines)
def dependents(parser, args): specs = spack.cmd.parse_specs(args.spec) if len(specs) != 1: tty.die("spack dependents takes only one spec.") if args.installed: spec = spack.cmd.disambiguate_spec(specs[0]) tty.msg("Dependents of %s" % spec.cformat('$_$@$%@$/')) deps = spack.store.db.installed_relatives( spec, 'parents', args.transitive) if deps: spack.cmd.display_specs(deps, long=True) else: print("No dependents") else: spec = specs[0] ideps = inverted_dependencies() dependents = get_dependents(spec.name, ideps, args.transitive) dependents.remove(spec.name) if dependents: colify(sorted(dependents)) else: print("No dependents")
def compiler_find(args): """Search either $PATH or a list of paths OR MODULES for compilers and add them to Spack's configuration. """ paths = args.add_paths if not paths: paths = get_path('PATH') # Don't initialize compilers config via compilers.get_compiler_config. # Just let compiler_find do the # entire process and return an empty config from all_compilers # Default for any other process is init_config=True compilers = [c for c in spack.compilers.find_compilers(*paths)] new_compilers = [] for c in compilers: arch_spec = ArchSpec(None, c.operating_system, c.target) same_specs = spack.compilers.compilers_for_spec(c.spec, arch_spec, args.scope) if not same_specs: new_compilers.append(c) if new_compilers: spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope, init_config=False) n = len(new_compilers) s = 's' if n > 1 else '' filename = spack.config.get_config_filename(args.scope, 'compilers') tty.msg("Added %d new compiler%s to %s" % (n, s, filename)) colify(reversed(sorted(c.spec for c in new_compilers)), indent=4) else: tty.msg("Found no new compilers")
def test(parser, args): if args.list: print "Available tests:" colify(cram.test.list_tests(), indent=2) else: cram.test.run(args.names, args.verbose)
def providers(parser, args): valid_virtuals = sorted(spack.repo.path.provider_index.providers.keys()) buffer = six.StringIO() isatty = sys.stdout.isatty() if isatty: buffer.write('Virtual packages:\n') colify.colify(valid_virtuals, output=buffer, tty=isatty, indent=4) valid_virtuals_str = buffer.getvalue() # If called without arguments, list all the virtual packages if not args.virtual_package: print(valid_virtuals_str) return # Otherwise, parse the specs from command line specs = spack.cmd.parse_specs(args.virtual_package) # Check prerequisites non_virtual = [ str(s) for s in specs if not s.virtual or s.name not in valid_virtuals ] if non_virtual: msg = 'non-virtual specs cannot be part of the query ' msg += '[{0}]\n'.format(', '.join(non_virtual)) msg += valid_virtuals_str raise ValueError(msg) # Display providers for spec in specs: if sys.stdout.isatty(): print("{0}:".format(spec)) spack.cmd.display_specs(sorted(spack.repo.path.providers_for(spec))) print('')
def mirror_create(args): """Create a directory to be used as a spack mirror, and fill it with package archives.""" # try to parse specs from the command line first. with spack.concretize.concretizer.disable_compiler_existence_check(): specs = spack.cmd.parse_specs(args.specs, concretize=True) # If there is a file, parse each line as a spec and add it to the list. if args.file: if specs: tty.die("Cannot pass specs on the command line with --file.") specs = _read_specs_from_file(args.file) # If nothing is passed, use all packages. if not specs: specs = [Spec(n) for n in spack.repo.all_package_names()] specs.sort(key=lambda s: s.format("$_$@").lower()) # If the user asked for dependencies, traverse spec DAG get them. if args.dependencies: new_specs = set() for spec in specs: spec.concretize() for s in spec.traverse(): new_specs.add(s) specs = list(new_specs) # Skip external specs, as they are already installed external_specs = [s for s in specs if s.external] specs = [s for s in specs if not s.external] for spec in external_specs: msg = 'Skipping {0} as it is an external spec.' tty.msg(msg.format(spec.cshort_spec)) # Default name for directory is spack-mirror-<DATESTAMP> directory = args.directory if not directory: timestamp = datetime.now().strftime("%Y-%m-%d") directory = 'spack-mirror-' + timestamp # Make sure nothing is in the way. existed = os.path.isdir(directory) # Actually do the work to create the mirror present, mirrored, error = spack.mirror.create( directory, specs, num_versions=args.one_version_per_spec) p, m, e = len(present), len(mirrored), len(error) verb = "updated" if existed else "created" tty.msg( "Successfully %s mirror in %s" % (verb, directory), "Archive stats:", " %-4d already present" % p, " %-4d added" % m, " %-4d failed to fetch." % e) if error: tty.error("Failed downloads:") colify(s.cformat("$_$@") for s in error)
def compiler_list(args): tty.msg("Available compilers") index = index_by(spack.compilers.all_compilers(scope=args.scope), 'name') for i, (name, compilers) in enumerate(index.items()): if i >= 1: print cname = "%s{%s}" % (spack.spec.compiler_color, name) tty.hline(colorize(cname), char='-') colify(reversed(sorted(compilers)))
def extensions(parser, args): if not args.spec: tty.die("extensions requires a package spec.") # # Checks # spec = spack.cmd.parse_specs(args.spec) if len(spec) > 1: tty.die("Can only list extensions for one package.") if not spec[0].package.extendable: tty.die("%s is not an extendable package." % spec[0].name) spec = spack.cmd.disambiguate_spec(spec[0]) if not spec.package.extendable: tty.die("%s does not have extensions." % spec.short_spec) if not args.mode: args.mode = 'short' # # List package names of extensions extensions = spack.repo.extensions_for(spec) if not extensions: tty.msg("%s has no extensions." % spec.cshort_spec) return tty.msg(spec.cshort_spec) tty.msg("%d extensions:" % len(extensions)) colify(ext.name for ext in extensions) # # List specs of installed extensions. # installed = [s.spec for s in spack.store.db.installed_extensions_for(spec)] print if not installed: tty.msg("None installed.") return tty.msg("%d installed:" % len(installed)) spack.cmd.find.display_specs(installed, mode=args.mode) # # List specs of activated extensions. # activated = spack.store.layout.extension_map(spec) print if not activated: tty.msg("None activated.") return tty.msg("%d currently activated:" % len(activated)) spack.cmd.find.display_specs( activated.values(), mode=args.mode, long=args.long)
def pkg_diff(args): """Compare packages available in two different git revisions.""" u1, u2 = diff_packages(args.rev1, args.rev2) if u1: print "%s:" % args.rev1 colify(sorted(u1), indent=4) if u1: print if u2: print "%s:" % args.rev2 colify(sorted(u2), indent=4)
def compiler_list(args): tty.msg("Available compilers") index = index_by(spack.compilers.all_compilers(scope=args.scope), lambda c: (c.spec.name, c.operating_system, c.target)) ordered_sections = sorted(index.items(), key=lambda item: item[0]) for i, (key, compilers) in enumerate(ordered_sections): if i >= 1: print() name, os, target = key os_str = os if target: os_str += "-%s" % target cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str) tty.hline(colorize(cname), char='-') colify(reversed(sorted(c.spec for c in compilers)))
def compiler_remove(args): cspec = CompilerSpec(args.compiler_spec) compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope) if not compilers: tty.die("No compilers match spec %s" % cspec) elif not args.all and len(compilers) > 1: tty.error("Multiple compilers match spec %s. Choose one:" % cspec) colify(reversed(sorted([c.spec for c in compilers])), indent=4) tty.msg("Or, you can use `spack compiler remove -a` to remove all of them.") sys.exit(1) for compiler in compilers: spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope) tty.msg("Removed compiler %s" % compiler.spec)
def versions(parser, args): pkg = spack.repo.get(args.package) safe_versions = pkg.versions if args.safe_only: tty.warn('"--safe-only" is deprecated. Use "--safe" instead.') args.safe = args.safe_only if not (args.remote or args.new): if sys.stdout.isatty(): tty.msg('Safe versions (already checksummed):') if not safe_versions: if sys.stdout.isatty(): tty.warn('Found no versions for {0}'.format(pkg.name)) tty.debug('Manually add versions to the package.') else: colify(sorted(safe_versions, reverse=True), indent=2) if args.safe: return fetched_versions = pkg.fetch_remote_versions(args.concurrency) if args.new: if sys.stdout.isatty(): tty.msg('New remote versions (not yet checksummed):') highest_safe_version = VersionList(safe_versions).highest_numeric() remote_versions = set([ ver(v) for v in set(fetched_versions) if v > highest_safe_version ]) else: if sys.stdout.isatty(): tty.msg('Remote versions (not yet checksummed):') remote_versions = set(fetched_versions).difference(safe_versions) if not remote_versions: if sys.stdout.isatty(): if not fetched_versions: tty.warn('Found no versions for {0}'.format(pkg.name)) tty.debug('Check the list_url and list_depth attributes of ' 'the package to help Spack find versions.') else: tty.warn('Found no unchecksummed versions for {0}'.format( pkg.name)) else: colify(sorted(remote_versions, reverse=True), indent=2)
def tags(parser, args): # Disallow combining all option with (positional) tags to avoid confusion if args.all and args.tag: tty.die("Use the '--all' option OR provide tag(s) on the command line") # Provide a nice, simple message if database is empty if args.installed and not spack.environment.installed_specs(): tty.msg("No installed packages") return # unique list of available tags available_tags = sorted(spack.repo.path.tag_index.keys()) if not available_tags: tty.msg("No tagged packages") return show_packages = args.tag or args.all # Only report relevant, available tags if no packages are to be shown if not show_packages: if not args.installed: report_tags("available", available_tags) else: tag_pkgs = spack.tag.packages_with_tags(available_tags, True, True) tags = tag_pkgs.keys() if tag_pkgs else [] report_tags("installed", tags) return # Report packages associated with tags buffer = six.StringIO() isatty = sys.stdout.isatty() tags = args.tag if args.tag else available_tags tag_pkgs = spack.tag.packages_with_tags(tags, args.installed, False) missing = 'No installed packages' if args.installed else 'None' for tag in sorted(tag_pkgs): # TODO: Remove the sorting once we're sure noone has an old # TODO: tag cache since it can accumulate duplicates. packages = sorted(list(set(tag_pkgs[tag]))) if isatty: buffer.write("{0}:\n".format(tag)) if packages: colify.colify(packages, output=buffer, tty=isatty, indent=4) else: buffer.write(" {0}\n".format(missing)) buffer.write("\n") print(buffer.getvalue())
def test(parser, args): if args.list: print "Available tests:" colify(spack.test.list_tests(), indent=2) else: if not args.createXmlOutput: outputDir = None else: if not args.xmlOutputDir: outputDir = join_path(os.getcwd(), "test-output") else: outputDir = os.path.abspath(args.xmlOutputDir) if not os.path.exists(outputDir): mkdirp(outputDir) spack.test.run(args.names, outputDir, args.verbose)
def run(names, outputDir, verbose=False): """Run tests with the supplied names. Names should be a list. If it's empty, run ALL of Spack's tests.""" # Print output to stdout if verbose is 1. if verbose: os.environ['NOSE_NOCAPTURE'] = '1' if not names: names = test_names else: for test in names: if test not in test_names: tty.error("%s is not a valid spack test name." % test, "Valid names are:") colify(sorted(test_names), indent=4) sys.exit(1) tally = Tally() modules = ['spack.test.' + test for test in names] runOpts = ["--with-%s" % spack.test.tally_plugin.Tally.name] if outputDir: xmlOutputFname = "unittests-{0}.xml".format(test) xmlOutputPath = join_path(outputDir, xmlOutputFname) runOpts += ["--with-xunit", "--xunit-file={0}".format(xmlOutputPath)] argv = [""] + runOpts + modules nose.run(argv=argv, addplugins=[tally]) succeeded = not tally.failCount and not tally.errorCount tty.msg("Tests Complete.", "%5d tests run" % tally.numberOfTestsRun, "%5d failures" % tally.failCount, "%5d errors" % tally.errorCount) if tally.fail_list: items = [x for x in tally.fail_list] tty.msg('List of failing tests:', *items) if tally.error_list: items = [x for x in tally.error_list] tty.msg('List of tests with errors:', *items) if succeeded: tty.info("OK", format='g') else: tty.info("FAIL", format='r') sys.exit(1)
def env_list(args): names = ev.all_environment_names() color_names = [] for name in names: if ev.active(name): name = colorize('@*g{%s}' % name) color_names.append(name) # say how many there are if writing to a tty if sys.stdout.isatty(): if not names: tty.msg('No environments') else: tty.msg('%d environments' % len(names)) colify(color_names, indent=4)
def extensions(parser, args): if not args.spec: tty.die("extensions requires a package spec.") # Checks spec = spack.cmd.parse_specs(args.spec) if len(spec) > 1: tty.die("Can only list extensions for one package.") if not spec[0].package.extendable: tty.die("%s is not an extendable package." % spec[0].name) spec = spack.cmd.disambiguate_spec(spec[0]) if not spec.package.extendable: tty.die("%s does not have extensions." % spec.short_spec) if not args.mode: args.mode = 'short' # List package names of extensions extensions = spack.db.extensions_for(spec) if not extensions: tty.msg("%s has no extensions." % spec.cshort_spec) return tty.msg(spec.cshort_spec) tty.msg("%d extensions:" % len(extensions)) colify(ext.name for ext in extensions) # List specs of installed extensions. installed = [s.spec for s in spack.db.installed_extensions_for(spec)] print if not installed: tty.msg("None installed.") return tty.msg("%d installed:" % len(installed)) spack.cmd.find.display_specs(installed, mode=args.mode) # List specs of activated extensions. activated = spack.install_layout.extension_map(spec) print if not activated: tty.msg("None activated.") return tty.msg("%d currently activated:" % len(activated)) spack.cmd.find.display_specs(activated.values(), mode=args.mode)
def dependencies(parser, args): specs = spack.cmd.parse_specs(args.spec) if len(specs) != 1: tty.die("spack dependencies takes only one spec.") if args.installed: env = ev.get_env(args, 'dependencies') spec = spack.cmd.disambiguate_spec(specs[0], env) format_string = '{name}{@version}{%compiler}{/hash:7}' tty.msg("Dependencies of %s" % spec.format(format_string, color=True)) deps = spack.store.db.installed_relatives(spec, 'children', args.transitive, deptype=args.deptype) if deps: spack.cmd.display_specs(deps, long=True) else: print("No dependencies") else: spec = specs[0] if not spec.virtual: packages = [spec.package] else: packages = [ spack.repo.get(s.name) for s in spack.repo.path.providers_for(spec) ] dependencies = set() for pkg in packages: possible = pkg.possible_dependencies(args.transitive, args.expand_virtuals, deptype=args.deptype) dependencies.update(possible) if spec.name in dependencies: dependencies.remove(spec.name) if dependencies: colify(sorted(dependencies)) else: print("No dependencies")
def mirror_create(args): """Create a directory to be used as a spack mirror, and fill it with package archives.""" # try to parse specs from the command line first. specs = spack.cmd.parse_specs(args.specs) # If there is a file, parse each line as a spec and add it to the list. if args.file: if specs: tty.die("Cannot pass specs on the command line with --file.") specs = _read_specs_from_file(args.file) # If nothing is passed, use all packages. if not specs: specs = [Spec(n) for n in spack.db.all_package_names()] specs.sort(key=lambda s: s.format("$_$@").lower()) # Default name for directory is spack-mirror-<DATESTAMP> directory = args.directory if not directory: timestamp = datetime.now().strftime("%Y-%m-%d") directory = "spack-mirror-" + timestamp # Make sure nothing is in the way. existed = False if os.path.isfile(directory): tty.error("%s already exists and is a file." % directory) elif os.path.isdir(directory): existed = True # Actually do the work to create the mirror present, mirrored, error = spack.mirror.create(directory, specs, num_versions=args.one_version_per_spec) p, m, e = len(present), len(mirrored), len(error) verb = "updated" if existed else "created" tty.msg( "Successfully %s mirror in %s." % (verb, directory), "Archive stats:", " %-4d already present" % p, " %-4d added" % m, " %-4d failed to fetch." % e, ) if error: tty.error("Failed downloads:") colify(s.format("$_$@") for s in error)
def compiler_add(args): """Search either $PATH or a list of paths for compilers and add them to Spack's configuration.""" paths = args.add_paths if not paths: paths = get_path('PATH') compilers = [c for c in spack.compilers.find_compilers(*args.add_paths) if c.spec not in spack.compilers.all_compilers()] if compilers: spack.compilers.add_compilers_to_config('user', *compilers) n = len(compilers) tty.msg("Added %d new compiler%s to %s" % ( n, 's' if n > 1 else '', spack.config.get_filename('user'))) colify(reversed(sorted(c.spec for c in compilers)), indent=4) else: tty.msg("Found no new compilers")
def compiler_remove(args): cspec = CompilerSpec(args.compiler_spec) compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope) if not compilers: tty.die("No compilers match spec %s" % cspec) elif not args.all and len(compilers) > 1: tty.error("Multiple compilers match spec %s. Choose one:" % cspec) colify(reversed(sorted([c.spec for c in compilers])), indent=4) tty.msg( "Or, you can use `spack compiler remove -a` to remove all of them." ) sys.exit(1) for compiler in compilers: spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope) tty.msg("Removed compiler %s" % compiler.spec)
def versions(parser, args): pkg = spack.repo.get(args.package) safe_versions = pkg.versions fetched_versions = pkg.fetch_remote_versions() remote_versions = set(fetched_versions).difference(safe_versions) tty.msg("Safe versions (already checksummed):") colify(sorted(safe_versions, reverse=True), indent=2) tty.msg("Remote versions (not yet checksummed):") if not remote_versions: if not fetched_versions: print " Found no versions for %s" % pkg.name tty.debug("Check the list_url and list_depth attribute on the " "package to help Spack find versions.") else: print " Found no unckecksummed versions for %s" % pkg.name else: colify(sorted(remote_versions, reverse=True), indent=2)
def mirror_create(args): """Create a directory to be used as a spack mirror, and fill it with package archives.""" # try to parse specs from the command line first. specs = spack.cmd.parse_specs(args.specs) # If there is a file, parse each line as a spec and add it to the list. if args.file: if specs: tty.die("Cannot pass specs on the command line with --file.") specs = _read_specs_from_file(args.file) # If nothing is passed, use all packages. if not specs: specs = [Spec(n) for n in spack.db.all_package_names()] specs.sort(key=lambda s: s.format("$_$@").lower()) # Default name for directory is spack-mirror-<DATESTAMP> directory = args.directory if not directory: timestamp = datetime.now().strftime("%Y-%m-%d") directory = 'spack-mirror-' + timestamp # Make sure nothing is in the way. existed = False if os.path.isfile(directory): tty.error("%s already exists and is a file." % directory) elif os.path.isdir(directory): existed = True # Actually do the work to create the mirror present, mirrored, error = spack.mirror.create( directory, specs, num_versions=args.one_version_per_spec) p, m, e = len(present), len(mirrored), len(error) verb = "updated" if existed else "created" tty.msg("Successfully %s mirror in %s." % (verb, directory), "Archive stats:", " %-4d already present" % p, " %-4d added" % m, " %-4d failed to fetch." % e) if error: tty.error("Failed downloads:") colify(s.format("$_$@") for s in error)
def run(names, verbose=False): """Run tests with the supplied names. Names should be a list. If it's empty, run ALL of Spack's tests.""" verbosity = 1 if not verbose else 2 if not names: names = test_names else: for test in names: if test not in test_names: tty.error("%s is not a valid spack test name." % test, "Valid names are:") colify(test_names, indent=4) sys.exit(1) runner = unittest.TextTestRunner(verbosity=verbosity) testsRun = errors = failures = skipped = 0 for test in names: module = 'spack.test.' + test print module suite = unittest.defaultTestLoader.loadTestsFromName(module) tty.msg("Running test: %s" % test) result = runner.run(suite) testsRun += result.testsRun errors += len(result.errors) failures += len(result.failures) skipped += len(result.skipped) succeeded = not errors and not failures tty.msg("Tests Complete.", "%5d tests run" % testsRun, "%5d skipped" % skipped, "%5d failures" % failures, "%5d errors" % errors) if not errors and not failures: tty.info("OK", format='g') else: tty.info("FAIL", format='r') sys.exit(1)
def compiler_add(args): """Search either $PATH or a list of paths for compilers and add them to Spack's configuration.""" paths = args.add_paths if not paths: paths = get_path('PATH') compilers = [ c for c in spack.compilers.find_compilers(*args.add_paths) if c.spec not in spack.compilers.all_compilers() ] if compilers: spack.compilers.add_compilers_to_config('user', *compilers) n = len(compilers) tty.msg("Added %d new compiler%s to %s" % (n, 's' if n > 1 else '', spack.config.get_filename('user'))) colify(reversed(sorted(c.spec for c in compilers)), indent=4) else: tty.msg("Found no new compilers")
def dependencies(parser, args): specs = spack.cmd.parse_specs(args.spec) if len(specs) != 1: tty.die("spack dependencies takes only one spec.") if args.installed: spec = spack.cmd.disambiguate_spec(specs[0]) tty.msg("Dependencies of %s" % spec.format('$_$@$%@$/', color=True)) deps = spack.store.db.installed_relatives(spec, 'children', args.transitive) if deps: spack.cmd.display_specs(deps, long=True) else: print("No dependencies") else: spec = specs[0] if not spec.virtual: packages = [spec.package] else: packages = [ spack.repo.get(s.name) for s in spack.repo.path.providers_for(spec) ] dependencies = set() for pkg in packages: dependencies.update( set( pkg.possible_dependencies(args.transitive, args.expand_virtuals))) if spec.name in dependencies: dependencies.remove(spec.name) if dependencies: colify(sorted(dependencies)) else: print("No dependencies")
def list(parser, args): # Start with all package names. pkgs = spack.repo.all_package_names() # filter if a filter arg was provided if args.filter: def match(p, f): if args.insensitive: p = p.lower() f = f.lower() return fnmatch.fnmatchcase(p, f) pkgs = [p for p in pkgs if any(match(p, f) for f in args.filter)] # sort before displaying. sorted_packages = sorted(pkgs, key=lambda s:s.lower()) # Print all the package names in columns indent=0 if sys.stdout.isatty(): tty.msg("%d packages." % len(sorted_packages)) colify(sorted_packages, indent=indent)
def run(names, outputDir, verbose=False): """Run tests with the supplied names. Names should be a list. If it's empty, run ALL of Spack's tests.""" if not names: names = test_names else: for test in names: if test not in test_names: tty.error("%s is not a valid spack test name." % test, "Valid names are:") colify(sorted(test_names), indent=4) sys.exit(1) tally = Tally() for test in names: module = 'spack.test.' + test print(module) tty.msg("Running test: %s" % test) runOpts = ["--with-%s" % spack.test.tally_plugin.Tally.name] if outputDir: xmlOutputFname = "unittests-{0}.xml".format(test) xmlOutputPath = join_path(outputDir, xmlOutputFname) runOpts += [ "--with-xunit", "--xunit-file={0}".format(xmlOutputPath) ] argv = [""] + runOpts + [module] nose.run(argv=argv, addplugins=[tally]) succeeded = not tally.failCount and not tally.errorCount tty.msg("Tests Complete.", "%5d tests run" % tally.numberOfTestsRun, "%5d failures" % tally.failCount, "%5d errors" % tally.errorCount) if succeeded: tty.info("OK", format='g') else: tty.info("FAIL", format='r') sys.exit(1)
def compiler_list(args): compilers = spack.compilers.all_compilers(scope=args.scope, init_config=False) # If there are no compilers in any scope, and we're outputting to a tty, give a # hint to the user. if len(compilers) == 0: if not sys.stdout.isatty(): return msg = "No compilers available" if args.scope is None: msg += ". Run `spack compiler find` to autodetect compilers" tty.msg(msg) return index = index_by(compilers, lambda c: (c.spec.name, c.operating_system, c.target)) tty.msg("Available compilers") # For a container, take each element which does not evaluate to false and # convert it to a string. For elements which evaluate to False (e.g. None) # convert them to '' (in which case it still evaluates to False but is a # string type). Tuples produced by this are guaranteed to be comparable in # Python 3 convert_str = (lambda tuple_container: tuple( str(x) if x else '' for x in tuple_container)) index_str_keys = list((convert_str(x), y) for x, y in index.items()) ordered_sections = sorted(index_str_keys, key=lambda item: item[0]) for i, (key, compilers) in enumerate(ordered_sections): if i >= 1: print() name, os, target = key os_str = os if target: os_str += "-%s" % target cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str) tty.hline(colorize(cname), char='-') colify(reversed(sorted(c.spec for c in compilers)))
def run(names, verbose=False): """Run tests with the supplied names. Names should be a list. If it's empty, run ALL of Spack's tests.""" verbosity = 1 if not verbose else 2 if not names: names = test_names else: for test in names: if test not in test_names: tty.error("%s is not a valid spack test name." % test, "Valid names are:") colify(test_names, indent=4) sys.exit(1) runner = unittest.TextTestRunner(verbosity=verbosity) testsRun = errors = failures = 0 for test in names: module = 'spack.test.' + test print module suite = unittest.defaultTestLoader.loadTestsFromName(module) tty.msg("Running test: %s" % test) result = runner.run(suite) testsRun += result.testsRun errors += len(result.errors) failures += len(result.failures) succeeded = not errors and not failures tty.msg("Tests Complete.", "%5d tests run" % testsRun, "%5d failures" % failures, "%5d errors" % errors) if not errors and not failures: tty.info("OK", format='g') else: tty.info("FAIL", format='r') sys.exit(1)
def info(parser, args): package = spack.db.get(args.name) print "Package: ", package.name print "Homepage: ", package.homepage print print "Safe versions: " if not package.versions: print("None.") else: maxlen = max(len(str(v)) for v in package.versions) fmt = "%%-%ss" % maxlen for v in reversed(sorted(package.versions)): print " " + (fmt % v) + " " + package.url_for_version(v) print print "Dependencies:" if package.dependencies: colify(package.dependencies, indent=4) else: print " None" print print "Virtual packages: " if package.provided: for spec, when in package.provided.items(): print " %s provides %s" % (when, spec) else: print " None" print print "Description:" if package.__doc__: doc = re.sub(r'\s+', ' ', package.__doc__) lines = textwrap.wrap(doc, 72) for line in lines: print " " + line else: print " None"
def dependencies(parser, args): specs = spack.cmd.parse_specs(args.spec) if len(specs) != 1: tty.die("spack dependencies takes only one spec.") if args.installed: spec = spack.cmd.disambiguate_spec(specs[0]) tty.msg("Dependencies of %s" % spec.format('$_$@$%@$/', color=True)) deps = spack.store.db.installed_relatives( spec, 'children', args.transitive) if deps: spack.cmd.display_specs(deps, long=True) else: print("No dependencies") else: spec = specs[0] if not spec.virtual: packages = [spec.package] else: packages = [ spack.repo.get(s.name) for s in spack.repo.path.providers_for(spec)] dependencies = set() for pkg in packages: dependencies.update( set(pkg.possible_dependencies( args.transitive, args.expand_virtuals))) if spec.name in dependencies: dependencies.remove(spec.name) if dependencies: colify(sorted(dependencies)) else: print("No dependencies")
def run(names, outputDir, verbose=False): """Run tests with the supplied names. Names should be a list. If it's empty, run ALL of Spack's tests.""" if not names: names = test_names else: for test in names: if test not in test_names: tty.error("%s is not a valid spack test name." % test, "Valid names are:") colify(sorted(test_names), indent=4) sys.exit(1) tally = Tally() for test in names: module = 'spack.test.' + test print(module) tty.msg("Running test: %s" % test) runOpts = ["--with-%s" % spack.test.tally_plugin.Tally.name] if outputDir: xmlOutputFname = "unittests-{0}.xml".format(test) xmlOutputPath = join_path(outputDir, xmlOutputFname) runOpts += ["--with-xunit", "--xunit-file={0}".format(xmlOutputPath)] argv = [""] + runOpts + [module] nose.run(argv=argv, addplugins=[tally]) succeeded = not tally.failCount and not tally.errorCount tty.msg("Tests Complete.", "%5d tests run" % tally.numberOfTestsRun, "%5d failures" % tally.failCount, "%5d errors" % tally.errorCount) if succeeded: tty.info("OK", format='g') else: tty.info("FAIL", format='r') sys.exit(1)
def test_list(args): """List installed packages with available tests.""" if args.list_all: all_packages_with_tests = [ pkg_class.name for pkg_class in spack.repo.path.all_package_classes() if has_test_method(pkg_class) ] if sys.stdout.isatty(): tty.msg("%d packages with tests." % len(all_packages_with_tests)) colify.colify(all_packages_with_tests) return # TODO: This can be extended to have all of the output formatting options # from `spack find`. env = ev.get_env(args, 'test') hashes = env.all_hashes() if env else None specs = spack.store.db.query(hashes=hashes) specs = list(filter(lambda s: has_test_method(s.package_class), specs)) spack.cmd.display_specs(specs, long=True)
def info(parser, args): package = spack.db.get(args.name) print "Package: ", package.name print "Homepage: ", package.homepage print "Download: ", package.url print print "Safe versions: " if package.versions: colify(reversed(sorted(package.versions)), indent=4) else: print "None. Use spack versions %s to get a list of downloadable versions." % package.name print print "Dependencies:" if package.dependencies: colify(package.dependencies, indent=4) else: print " None" print print "Virtual packages: " if package.provided: for spec, when in package.provided.items(): print " %s provides %s" % (when, spec) else: print " None" print print "Description:" if package.__doc__: doc = re.sub(r'\s+', ' ', package.__doc__) lines = textwrap.wrap(doc, 72) for line in lines: print " " + line else: print " None"
def _compiler_specs(self, data): """Read compiler specs from YAML data. Example YAML: gcc: versions: [4.4.8, 4.9.3] clang: versions: [3.6.1, 3.7.2, 3.8] Optionally, data can be 'all', in which case all compilers for the current platform are returned. """ # get usable compilers for current platform. arch = ArchSpec(str(sarch.platform()), 'default_os', 'default_target') available_compilers = [ c.spec for c in spack.compilers.compilers_for_arch(arch) ] # return compilers for this platform if asked for everything. if data == 'all': return [cspec.copy() for cspec in available_compilers] # otherwise create specs from the YAML file. cspecs = set([ Spec('%{0}@{1}'.format(compiler, version)) for compiler in data for version in data[compiler]['versions'] ]) # filter out invalid specs if caller said to ignore them. if self.ignore_invalid: missing = [ c for c in cspecs if not any( c.compiler.satisfies(comp) for comp in available_compilers) ] tty.warn("The following compilers were unavailable:") colify(sorted(m.compiler for m in missing)) cspecs -= set(missing) return cspecs
def compiler_find(args): """Search either $PATH or a list of paths OR MODULES for compilers and add them to Spack's configuration. """ # None signals spack.compiler.find_compilers to use its default logic paths = args.add_paths or None # Don't initialize compilers config via compilers.get_compiler_config. # Just let compiler_find do the # entire process and return an empty config from all_compilers # Default for any other process is init_config=True compilers = [c for c in spack.compilers.find_compilers(paths)] new_compilers = [] for c in compilers: arch_spec = ArchSpec((None, c.operating_system, c.target)) same_specs = spack.compilers.compilers_for_spec(c.spec, arch_spec, init_config=False) if not same_specs: new_compilers.append(c) if new_compilers: spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope, init_config=False) n = len(new_compilers) s = 's' if n > 1 else '' config = spack.config.config filename = config.get_config_filename(args.scope, 'compilers') tty.msg("Added %d new compiler%s to %s" % (n, s, filename)) colify(reversed(sorted(c.spec for c in new_compilers)), indent=4) else: tty.msg("Found no new compilers") tty.msg("Compilers are defined in the following files:") colify(spack.compilers.compiler_config_files(), indent=4)
def dependencies(parser, args): specs = spack.cmd.parse_specs(args.spec) if len(specs) != 1: tty.die("spack dependencies takes only one spec.") if args.installed: env = ev.active_environment() spec = spack.cmd.disambiguate_spec(specs[0], env) format_string = '{name}{@version}{%compiler}{/hash:7}' if sys.stdout.isatty(): tty.msg("Dependencies of %s" % spec.format(format_string, color=True)) deps = spack.store.db.installed_relatives(spec, 'children', args.transitive, deptype=args.deptype) if deps: spack.cmd.display_specs(deps, long=True) else: print("No dependencies") else: spec = specs[0] dependencies = spack.package_base.possible_dependencies( spec, transitive=args.transitive, expand_virtuals=args.expand_virtuals, deptype=args.deptype) if spec.name in dependencies: del dependencies[spec.name] if dependencies: colify(sorted(dependencies)) else: print("No dependencies")
def format_list(specs): """Display a single list of specs, with no groups""" # create the final, formatted versions of all specs formatted = [] for spec in specs: formatted.append((fmt(spec), spec)) if deps: for depth, dep in spec.traverse(root=False, depth=True): formatted.append((fmt(dep, depth), dep)) formatted.append(('', None)) # mark newlines # unless any of these are set, we can just colify and be done. if not any((deps, paths)): colify((f[0] for f in formatted), indent=indent, output=output) return '' # otherwise, we'll print specs one by one max_width = max(len(f[0]) for f in formatted) path_fmt = "%%-%ds%%s" % (max_width + 2) out = '' # getting lots of prefixes requires DB lookups. Ensure # all spec.prefix calls are in one transaction. with spack.store.db.read_transaction(): for string, spec in formatted: if not string: # print newline from above out += '\n' continue if paths: out += path_fmt % (string, spec.prefix) + '\n' else: out += string + '\n' return out
def maintainers(parser, args): if args.maintained or args.unmaintained: maintained, unmaintained = maintained_packages() pkgs = maintained if args.maintained else unmaintained colify(pkgs) return 0 if pkgs else 1 if args.all: if args.by_user: maintainers = maintainers_to_packages(args.package_or_user) for user, packages in sorted(maintainers.items()): color.cprint('@c{%s}: %s' % (user, ', '.join(sorted(packages)))) return 0 if maintainers else 1 else: packages = packages_to_maintainers(args.package_or_user) for pkg, maintainers in sorted(packages.items()): color.cprint('@c{%s}: %s' % (pkg, ', '.join(sorted(maintainers)))) return 0 if packages else 1 if args.by_user: if not args.package_or_user: tty.die('spack maintainers --by-user requires a user or --all') packages = union_values(maintainers_to_packages(args.package_or_user)) colify(packages) return 0 if packages else 1 else: if not args.package_or_user: tty.die('spack maintainers requires a package or --all') users = union_values(packages_to_maintainers(args.package_or_user)) colify(users) return 0 if users else 1
def extensions(parser, args): if not args.spec: # If called without arguments, list all the extendable packages isatty = sys.stdout.isatty() if isatty: tty.info('Extendable packages:') extendable_pkgs = [] for name in spack.repo.all_package_names(): pkg = spack.repo.get(name) if pkg.extendable: extendable_pkgs.append(name) colify(extendable_pkgs, indent=4) return # Checks spec = cmd.parse_specs(args.spec) if len(spec) > 1: tty.die("Can only list extensions for one package.") if not spec[0].package.extendable: tty.die("%s is not an extendable package." % spec[0].name) env = ev.active_environment() spec = cmd.disambiguate_spec(spec[0], env) if not spec.package.extendable: tty.die("%s does not have extensions." % spec.short_spec) if args.show in ("packages", "all"): # List package names of extensions extensions = spack.repo.path.extensions_for(spec) if not extensions: tty.msg("%s has no extensions." % spec.cshort_spec) else: tty.msg(spec.cshort_spec) tty.msg("%d extensions:" % len(extensions)) colify(ext.name for ext in extensions) if args.view: target = args.view else: target = spec.prefix view = YamlFilesystemView(target, spack.store.layout) if args.show in ("installed", "all"): # List specs of installed extensions. installed = [ s.spec for s in spack.store.db.installed_extensions_for(spec) ] if args.show == "all": print if not installed: tty.msg("None installed.") else: tty.msg("%d installed:" % len(installed)) cmd.display_specs(installed, args) if args.show in ("activated", "all"): # List specs of activated extensions. activated = view.extensions_layout.extension_map(spec) if args.show == "all": print if not activated: tty.msg("None activated.") else: tty.msg("%d activated:" % len(activated)) cmd.display_specs(activated.values(), args)
def pkg_list(args): """List packages associated with a particular spack git revision.""" colify(list_packages(args.rev))
def providers(parser, args): for spec in spack.cmd.parse_specs(args.vpkg_spec): colify(sorted(spack.repo.providers_for(spec)), indent=4)
def pkg_removed(args): """Show packages removed since a commit.""" u1, u2 = diff_packages(args.rev1, args.rev2) if u1: colify(sorted(u1))
def pkg_added(args): """Show packages added since a commit.""" u1, u2 = diff_packages(args.rev1, args.rev2) if u2: colify(sorted(u2))
def get_uninstall_list(args, specs, env): # Gets the list of installed specs that match the ones give via cli # args.all takes care of the case where '-a' is given in the cli uninstall_list = find_matching_specs(env, specs, args.all, args.force) # Takes care of '-R' active_dpts, inactive_dpts = installed_dependents(uninstall_list, env) # if we are in the global scope, we complain if you try to remove a # spec that's in an environment. If we're in an environment, we'll # just *remove* it from the environment, so we ignore this # error when *in* an environment spec_envs = dependent_environments(uninstall_list) spec_envs = inactive_dependent_environments(spec_envs) # Process spec_dependents and update uninstall_list has_error = not args.force and ( (active_dpts and not args.dependents) # dependents in the current env or (not env and spec_envs) # there are environments that need specs ) # say why each problem spec is needed if has_error: specs = set(active_dpts) if not env: specs.update(set(spec_envs)) # environments depend on this for i, spec in enumerate(sorted(specs)): # space out blocks of reasons if i > 0: print() spec_format = '{name}{@version}{%compiler}{/hash:7}' tty.info("Will not uninstall %s" % spec.cformat(spec_format), format='*r') dependents = active_dpts.get(spec) if dependents: print('The following packages depend on it:') spack.cmd.display_specs(dependents, **display_args) if not env: envs = spec_envs.get(spec) if envs: print('It is used by the following environments:') colify([e.name for e in envs], indent=4) msgs = [] if active_dpts: msgs.append( 'use `spack uninstall --dependents` to remove dependents too') if spec_envs: msgs.append('use `spack env remove` to remove from environments') print() tty.die('There are still dependents.', *msgs) elif args.dependents: for spec, lst in active_dpts.items(): uninstall_list.extend(lst) uninstall_list = list(set(uninstall_list)) # only force-remove (don't completely uninstall) specs that still # have external dependent envs or pkgs removes = set(inactive_dpts) if env: removes.update(spec_envs) # remove anything in removes from the uninstall list uninstall_list = set(uninstall_list) - removes return uninstall_list, removes
def name_only(pkgs, out): indent = 0 if out.isatty(): tty.msg("%d packages." % len(pkgs)) colify(pkgs, indent=indent, output=out)
def print_text_info(pkg): """Print out a plain text description of a package.""" header = section_title( '{0}: ' ).format(pkg.build_system_class) + pkg.name color.cprint(header) color.cprint('') color.cprint(section_title('Description:')) if pkg.__doc__: color.cprint(color.cescape(pkg.format_doc(indent=4))) else: color.cprint(" None") color.cprint(section_title('Homepage: ') + pkg.homepage) if len(pkg.maintainers) > 0: mnt = " ".join(['@@' + m for m in pkg.maintainers]) color.cprint('') color.cprint(section_title('Maintainers: ') + mnt) color.cprint('') color.cprint(section_title("Tags: ")) if hasattr(pkg, 'tags'): tags = sorted(pkg.tags) colify(tags, indent=4) else: color.cprint(" None") color.cprint('') color.cprint(section_title('Preferred version: ')) if not pkg.versions: color.cprint(version(' None')) color.cprint('') color.cprint(section_title('Safe versions: ')) color.cprint(version(' None')) else: pad = padder(pkg.versions, 4) # Here we sort first on the fact that a version is marked # as preferred in the package, then on the fact that the # version is not develop, then lexicographically key_fn = lambda v: (pkg.versions[v].get('preferred', False), not v.isdevelop(), v) preferred = sorted(pkg.versions, key=key_fn).pop() f = fs.for_package_version(pkg, preferred) line = version(' {0}'.format(pad(preferred))) + color.cescape(f) color.cprint(line) color.cprint('') color.cprint(section_title('Safe versions: ')) for v in reversed(sorted(pkg.versions)): f = fs.for_package_version(pkg, v) line = version(' {0}'.format(pad(v))) + color.cescape(f) color.cprint(line) color.cprint('') color.cprint(section_title('Variants:')) formatter = VariantFormatter(pkg.variants) for line in formatter.lines: color.cprint(line) color.cprint('') color.cprint(section_title('Installation Phases:')) phase_str = '' for phase in pkg.phases: phase_str += " {0}".format(phase) color.cprint(phase_str) for deptype in ('build', 'link', 'run'): color.cprint('') color.cprint(section_title('%s Dependencies:' % deptype.capitalize())) deps = sorted(pkg.dependencies_of_type(deptype)) if deps: colify(deps, indent=4) else: color.cprint(' None') color.cprint('') color.cprint(section_title('Virtual Packages: ')) if pkg.provided: inverse_map = {} for spec, whens in pkg.provided.items(): for when in whens: if when not in inverse_map: inverse_map[when] = set() inverse_map[when].add(spec) for when, specs in reversed(sorted(inverse_map.items())): line = " %s provides %s" % ( when.colorized(), ', '.join(s.colorized() for s in specs) ) print(line) else: color.cprint(" None") color.cprint('')
def versions(parser, args): pkg = spack.db.get(args.package) colify(reversed(pkg.fetch_available_versions()))
def list(parser, args): # Print all the package names in columns colify(spack.db.all_package_names())