def restore_dependencies(): # each time through restore original dependencies & constraints global original_deps for pkg_name, deps in original_deps.iteritems(): packages.get(pkg_name).dependencies.clear() for dep in deps: set_pkg_dep(pkg_name, dep)
def test_one_version_match(self): pkg = packages.get('[email protected]') self.assertEqual(pkg.no_version_2(), 1) pkg = packages.get('[email protected]') self.assertEqual(pkg.no_version_2(), 3) pkg = packages.get('[email protected]') self.assertEqual(pkg.no_version_2(), 4)
def test_mpi_version(self): pkg = packages.get('multimethod^[email protected]') self.assertEqual(pkg.mpi_version(), 3) pkg = packages.get('multimethod^[email protected]') self.assertEqual(pkg.mpi_version(), 2) pkg = packages.get('multimethod^[email protected]') self.assertEqual(pkg.mpi_version(), 1)
def test_default_works(self): pkg = packages.get('multimethod%gcc') self.assertEqual(pkg.has_a_default(), 'gcc') pkg = packages.get('multimethod%intel') self.assertEqual(pkg.has_a_default(), 'intel') pkg = packages.get('multimethod%pgi') self.assertEqual(pkg.has_a_default(), 'default')
def test_dependency_match(self): pkg = packages.get('multimethod^zmpi') self.assertEqual(pkg.different_by_dep(), 'zmpi') pkg = packages.get('multimethod^mpich') self.assertEqual(pkg.different_by_dep(), 'mpich') # If we try to switch on some entirely different dep, it's ambiguous, # but should take the first option pkg = packages.get('multimethod^foobar') self.assertEqual(pkg.different_by_dep(), 'mpich')
def test_architecture_match(self): pkg = packages.get('multimethod=x86_64') self.assertEqual(pkg.different_by_architecture(), 'x86_64') pkg = packages.get('multimethod=ppc64') self.assertEqual(pkg.different_by_architecture(), 'ppc64') pkg = packages.get('multimethod=ppc32') self.assertEqual(pkg.different_by_architecture(), 'ppc32') pkg = packages.get('multimethod=arm64') self.assertEqual(pkg.different_by_architecture(), 'arm64') pkg = packages.get('multimethod=macos') self.assertRaises(NoSuchMethodError, pkg.different_by_architecture)
def uninstall(parser, args): if not args.packages: tty.die("uninstall requires at least one package argument.") specs = spack.cmd.parse_specs(args.packages) # For each spec provided, make sure it refers to only one package. # Fail and ask user to be unambiguous if it doesn't pkgs = [] for spec in specs: matching_specs = packages.get_installed(spec) if len(matching_specs) > 1: tty.die("%s matches multiple packages. Which one did you mean?" % spec, *matching_specs) elif len(matching_specs) == 0: tty.die("%s does not match any installed packages." % spec) installed_spec = matching_specs[0] pkgs.append(packages.get(installed_spec)) # Sort packages to be uninstalled by the number of installed dependents # This ensures we do things in the right order def num_installed_deps(pkg): return len(pkg.installed_dependents) pkgs.sort(key=num_installed_deps) # Uninstall packages in order now. for pkg in pkgs: pkg.do_uninstall()
def stage(parser, args): if not args.packages: tty.die("stage requires at least one package argument") if args.no_checksum: spack.do_checksum = False specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: package = packages.get(spec) package.do_stage()
def setUpClass(cls): # Use a different packages directory for these tests. We want to use # mocked up packages that don't interfere with the real ones. cls.real_packages_path = spack.packages_path spack.packages_path = mock_packages_path # First time through, record original relationships bt/w packages global original_deps original_deps = {} for name in list_modules(mock_packages_path): pkg = packages.get(name) original_deps[name] = [ spec for spec in pkg.dependencies.values()]
def install(parser, args): if not args.packages: tty.die("install requires at least one package argument") if args.no_checksum: spack.do_checksum = False spack.ignore_dependencies = args.ignore_dependencies specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: package = packages.get(spec) package.dirty = args.dirty package.do_install()
def clean(parser, args): if not args.packages: tty.die("spack clean requires at least one package argument") specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: tty.message("Cleaning for spec:", spec) package = packages.get(spec.name) if args.dist: package.do_clean_dist() elif args.work: package.do_clean_work() else: package.do_clean()
def checksum(parser, args): # get the package we're going to generate checksums for pkg = packages.get(args.package) # If the user asked for specific versions, use those. versions = [ver(v) for v in args.versions] if not all(type(v) == Version for v in versions): tty.die("Cannot generate checksums for version lists or " + "version ranges. Use unambiguous versions.") if not versions: versions = pkg.fetch_available_versions() if not versions: tty.die("Could not fetch any available versions for %s." % pkg.name) versions = list(reversed(versions)) urls = [pkg.url_for_version(v) for v in versions] tty.msg("Found %s versions of %s." % (len(urls), pkg.name), *spack.cmd.elide_list( ["%-10s%s" % (v,u) for v, u in zip(versions, urls)])) print archives_to_fetch = tty.get_number( "How many would you like to checksum?", default=5, abort='q') if not archives_to_fetch: tty.msg("Aborted.") return version_hashes = get_checksums( versions[:archives_to_fetch], urls[:archives_to_fetch]) if not version_hashes: tty.die("Could not fetch any available versions for %s." % pkg.name) dict_string = [" '%s' : '%s'," % (v, h) for v, h in version_hashes] dict_string = ['{'] + dict_string + ["}"] tty.msg("Checksummed new versions of %s:" % pkg.name, *dict_string)
def info(parser, args): package = packages.get(args.name) print "Package: ", package.name print "Homepage: ", package.homepage print "Download: ", package.url print print "Safe versions: " if package.versions: colify(reversed(sorted(package.versions)), indent=4) else: print "None. Use spack versions %s to get a list of downloadable versions." % package.name print print "Dependencies:" if package.dependencies: colify(package.dependencies, indent=4) else: print " None" print print "Virtual packages: " if package.provided: for spec, when in package.provided.items(): print " %s provides %s" % (when, spec) else: print " None" print print "Description:" if package.__doc__: doc = re.sub(r'\s+', ' ', package.__doc__) lines = textwrap.wrap(doc, 72) for line in lines: print " " + line else: print " None"
def test_undefined_mpi_version(self): # This currently fails because provides() doesn't do # the right thing undefined version ranges. # TODO: fix this. pkg = packages.get('multimethod^[email protected]') self.assertEqual(pkg.mpi_version(), 0)
def test_version_overlap(self): pkg = packages.get('[email protected]') self.assertEqual(pkg.version_overlap(), 1) pkg = packages.get('[email protected]') self.assertEqual(pkg.version_overlap(), 2)
def test_no_version_match(self): pkg = packages.get('[email protected]') self.assertRaises(NoSuchMethodError, pkg.no_version_2)
def test_virtual_dep_match(self): pkg = packages.get('multimethod^mpich2') self.assertEqual(pkg.different_by_virtual_dep(), 2) pkg = packages.get('multimethod^[email protected]') self.assertEqual(pkg.different_by_virtual_dep(), 1)
def set_pkg_dep(pkg, spec): """Alters dependence information for a pacakge. Use this to mock up constraints. """ spec = Spec(spec) packages.get(pkg).dependencies[spec.name] = spec
def versions(parser, args): pkg = packages.get(args.package) colify(reversed(pkg.fetch_available_versions()))