def wildcard_version(path): """Find the version in the supplied path, and return a regular expression that will match this path with any version in its place. """ ver, start, end = parse_version_string_with_indices(path) v = Version(ver) parts = list(re.escape(p) for p in path.split(str(v))) return v.wildcard().join(parts)
def test_version_range_nonempty(): assert Version('1.2.9') in VersionRange('1.2.0', '1.2') assert Version('1.1.1') in ver('1.0:1')
import spack.repository import spack.error import spack.config import spack.fetch_strategy from spack.file_cache import FileCache from spack.abi import ABI from spack.concretize import DefaultConcretizer from spack.version import Version from spack.util.path import canonicalize_path #----------------------------------------------------------------------------- # Initialize various data structures & objects at the core of Spack. #----------------------------------------------------------------------------- # Version information spack_version = Version("0.10.0") # Set up the default packages database. try: repo = spack.repository.RepoPath() sys.meta_path.append(repo) except spack.error.SpackError as e: tty.die('while initializing Spack RepoPath:', e.message) # Tests ABI compatibility between packages abi = ABI() # This controls how things are concretized in spack.
def test_up_to(): v = Version('1.23-4_5b') assert v.up_to(1).string == '1' assert v.up_to(2).string == '1.23' assert v.up_to(3).string == '1.23-4' assert v.up_to(4).string == '1.23-4_5' assert v.up_to(5).string == '1.23-4_5b' assert v.up_to(-1).string == '1.23-4_5' assert v.up_to(-2).string == '1.23-4' assert v.up_to(-3).string == '1.23' assert v.up_to(-4).string == '1' assert v.up_to(2).dotted.string == '1.23' assert v.up_to(2).dashed.string == '1-23' assert v.up_to(2).underscored.string == '1_23' assert v.up_to(2).joined.string == '123' assert v.dotted.up_to(2).string == '1.23' == v.up_to(2).dotted.string assert v.dashed.up_to(2).string == '1-23' == v.up_to(2).dashed.string assert v.underscored.up_to(2).string == '1_23' assert v.up_to(2).underscored.string == '1_23' assert v.up_to(2).up_to(1).string == '1'
def url_for_version(self, version): url = 'http://meme-suite.org/meme-software/{0}/meme{1}{2}.tar.gz' sep = '-' if version >= Version('5.0.2') else '_' return url.format(version.up_to(3), sep, version)
def go_version(self): vstring = self.go('version', output=str).split(' ')[2] return Version(vstring)
def add_package(self, name, dependencies=None, dependency_types=None, conditions=None): """Factory method for creating mock packages. This creates a new subclass of ``MockPackageBase``, ensures that its ``name`` and ``__name__`` properties are set up correctly, and returns a new instance. We use a factory function here because many functions and properties of packages need to be class functions. Args: name (str): name of the new package dependencies (list): list of mock packages to be dependencies for this new package (optional; no deps if not provided) dependency_type (list): list of deptypes for each dependency (optional; will be default_deptype if not provided) conditions (list): condition specs for each dependency (optional) """ if not dependencies: dependencies = [] if not dependency_types: dependency_types = [spack.dependency.default_deptype ] * len(dependencies) assert len(dependencies) == len(dependency_types) # new class for the mock package class MockPackage(MockPackageBase): pass MockPackage.__name__ = spack.util.naming.mod_to_class(name) MockPackage.name = name MockPackage._repo = self # set up dependencies MockPackage.dependencies = collections.OrderedDict() for dep, dtype in zip(dependencies, dependency_types): d = Dependency(MockPackage, Spec(dep.name), type=dtype) if not conditions or dep.name not in conditions: MockPackage.dependencies[dep.name] = {Spec(name): d} else: dep_conditions = conditions[dep.name] dep_conditions = dict( (Spec(x), Dependency(MockPackage, Spec(y), type=dtype)) for x, y in dep_conditions.items()) MockPackage.dependencies[dep.name] = dep_conditions # each package has some fake versions versions = list(Version(x) for x in [1, 2, 3]) MockPackage.versions = dict((x, { 'preferred': False }) for x in versions) MockPackage.variants = {} MockPackage.provided = {} MockPackage.conflicts = {} MockPackage.patches = {} mock_package = MockPackage(dependencies, dependency_types, conditions, versions) self.spec_to_pkg[name] = mock_package self.spec_to_pkg["mockrepo." + name] = mock_package return mock_package
def test_version_list_with_range_and_concrete_version_is_not_concrete(): v = VersionList([Version('3.1'), VersionRange('3.1.1', '3.1.2')]) assert v.concrete
def fake_stage(pkg, mirror_only=False): assert pkg.name == 'mpileaks' assert pkg.version == Version('100.100')
def setup_build_environment(self, env): if (self.spec.platform == 'darwin' and macos_version() >= Version('11')): # Many configure files rely on matching '10.*' for macOS version # detection and fail to add flags if it shows as version 11. env.set('MACOSX_DEPLOYMENT_TARGET', '10.16')
def concretize_version(self, spec): """If the spec is already concrete, return. Otherwise take the preferred version from spackconfig, and default to the package's version if there are no available versions. TODO: In many cases we probably want to look for installed versions of each package and use an installed version if we can link to it. The policy implemented here will tend to rebuild a lot of stuff becasue it will prefer a compiler in the spec to any compiler already- installed things were built with. There is likely some better policy that finds some middle ground between these two extremes. """ # return if already concrete. if spec.versions.concrete: return False # List of versions we could consider, in sorted order pkg_versions = spec.package_class.versions usable = [v for v in pkg_versions if any(v.satisfies(sv) for sv in spec.versions)] yaml_prefs = PackagePrefs(spec.name, 'version') # The keys below show the order of precedence of factors used # to select a version when concretizing. The item with # the "largest" key will be selected. # # NOTE: When COMPARING VERSIONS, the '@develop' version is always # larger than other versions. BUT when CONCRETIZING, # the largest NON-develop version is selected by default. keyfn = lambda v: ( # ------- Special direction from the user # Respect order listed in packages.yaml -yaml_prefs(v), # The preferred=True flag (packages or packages.yaml or both?) pkg_versions.get(Version(v)).get('preferred', False), # ------- Regular case: use latest non-develop version by default. # Avoid @develop version, which would otherwise be the "largest" # in straight version comparisons not v.isdevelop(), # Compare the version itself # This includes the logic: # a) develop > everything (disabled by "not v.isdevelop() above) # b) numeric > non-numeric # c) Numeric or string comparison v) usable.sort(key=keyfn, reverse=True) if usable: spec.versions = ver([usable[0]]) else: # We don't know of any SAFE versions that match the given # spec. Grab the spec's versions and grab the highest # *non-open* part of the range of versions it specifies. # Someone else can raise an error if this happens, # e.g. when we go to fetch it and don't know how. But it # *might* work. if not spec.versions or spec.versions == VersionList([':']): raise NoValidVersionError(spec) else: last = spec.versions[-1] if isinstance(last, VersionRange): if last.end: spec.versions = ver([last.end]) else: spec.versions = ver([last.start]) else: spec.versions = ver([last]) return True # Things changed
def parse_version(path): """Given a URL or archive name, extract a version from it and return a version object. """ ver, start, end = parse_version_string_with_indices(path) return Version(ver)
def _read_from_file(self, stream, format='json'): """ Fill database from file, do not maintain old data Translate the spec portions from node-dict form to spec form Does not do any locking. """ if format.lower() == 'json': load = sjson.load elif format.lower() == 'yaml': load = syaml.load else: raise ValueError("Invalid database format: %s" % format) try: if isinstance(stream, string_types): with open(stream, 'r') as f: fdata = load(f) else: fdata = load(stream) except MarkedYAMLError as e: raise syaml.SpackYAMLError("error parsing YAML database:", str(e)) except Exception as e: raise CorruptDatabaseError("error parsing database:", str(e)) if fdata is None: return def check(cond, msg): if not cond: raise CorruptDatabaseError( "Spack database is corrupt: %s" % msg, self._index_path) check('database' in fdata, "No 'database' attribute in YAML.") # High-level file checks db = fdata['database'] check('installs' in db, "No 'installs' in YAML DB.") check('version' in db, "No 'version' in YAML DB.") installs = db['installs'] # TODO: better version checking semantics. version = Version(db['version']) if version > _db_version: raise InvalidDatabaseVersionError(_db_version, version) elif version < _db_version: self.reindex(spack.store.layout) installs = dict((k, v.to_dict()) for k, v in self._data.items()) def invalid_record(hash_key, error): msg = ("Invalid record in Spack database: " "hash: %s, cause: %s: %s") msg %= (hash_key, type(error).__name__, str(error)) raise CorruptDatabaseError(msg, self._index_path) # Build up the database in three passes: # # 1. Read in all specs without dependencies. # 2. Hook dependencies up among specs. # 3. Mark all specs concrete. # # The database is built up so that ALL specs in it share nodes # (i.e., its specs are a true Merkle DAG, unlike most specs.) # Pass 1: Iterate through database and build specs w/o dependencies data = {} for hash_key, rec in installs.items(): try: # This constructs a spec DAG from the list of all installs spec = self._read_spec_from_dict(hash_key, installs) # Insert the brand new spec in the database. Each # spec has its own copies of its dependency specs. # TODO: would a more immmutable spec implementation simplify # this? data[hash_key] = InstallRecord.from_dict(spec, rec) except Exception as e: invalid_record(hash_key, e) # Pass 2: Assign dependencies once all specs are created. for hash_key in data: try: self._assign_dependencies(hash_key, installs, data) except Exception as e: invalid_record(hash_key, e) # Pass 3: Mark all specs concrete. Specs representing real # installations must be explicitly marked. # We do this *after* all dependencies are connected because if we # do it *while* we're constructing specs,it causes hashes to be # cached prematurely. for hash_key, rec in data.items(): rec.spec._mark_concrete() self._data = data
import spack.repo import spack.spec import spack.util.spack_yaml as syaml import spack.util.spack_json as sjson from spack.filesystem_view import YamlFilesystemView from spack.util.crypto import bit_length from spack.directory_layout import DirectoryLayoutError from spack.error import SpackError from spack.version import Version from spack.util.lock import Lock, WriteTransaction, ReadTransaction # DB goes in this directory underneath the root _db_dirname = '.spack-db' # DB version. This is stuck in the DB file to track changes in format. _db_version = Version('0.9.3') # Timeout for spack database locks in seconds _db_lock_timeout = 60 # Types of dependencies tracked by the database _tracked_deps = ('link', 'run') def _now(): """Returns the time since the epoch""" return time.time() def _autospec(function): """Decorator that automatically converts the argument of a single-arg
def test_version_empty_slice(): """Check an empty slice to confirm get "empty" version instead of an IndexError (#25953). """ assert Version('1.')[1:] == Version('')
def test_version_wrong_idx_type(): """Ensure exception raised if attempt to use non-integer index.""" v = Version('1.1') with pytest.raises(TypeError): v['0:']
def parse_version(path): """Given a URL or archive name, extract a version from it and return a version object. """ ver, start, l = parse_version_offset(path) return Version(ver)
def _tweak_dev_package_fetcher(dp, spec): """Attempt to configure the package's fetcher and stage to obtain the source we want to develop for this package. """ if dp.tag_or_branch is None: # Nothing to do. return fetcher_Version = None spack_package = spec.package # We want the tag or branch specified in dp. package_Version = Version(dp.tag_or_branch) develop_Version = Version('develop') if package_Version in spack_package.versions and \ _version_is_vc(spack_package, package_Version): # Specified version is version-controlled. fetcher_Version = package_Version elif not fetcher_Version and \ develop_Version in spack_package.versions and \ _version_is_vc(spack_package, develop_Version): # Repurpose develop to obtain the tag/branch we need. version_dict = spack_package.versions[develop_Version] # Attempt to tweak things to check out our desired tag or branch. if 'git' in version_dict: # Git. version_dict.pop('commit', None) if dp.key == 'tag': version_dict['tag'] = dp.tag_or_branch version_dict.pop('branch', None) else: # Branch. version_dict['branch'] = dp.tag_or_branch version_dict.pop('tag', None) elif 'hg' in version_dict: # Mercury version_dict['revision'] = dp.tag_or_branch elif 'svn' in version_dict: # Subversion. # Can't reliably do anything here since SVN URL structure is # convention only, and it is also not possible to reliably # distinguish between two common conventions # ('project/<trunk-or-branches-or-tags>' vs # '<trunk-or-branches-or-tags>/project'. raise ExtrapolationError( 'For subversion repositories, a VC version corresponding to ' '{0} must be defined in the recipe for {1}.'.format( dp.tag_or_branch, dp.name)) else: raise SpackError('INTERNAL ERROR: spack dev cannot handle ' 'apparently-supported VC method\n' 'version_dict = {0}'.format(version_dict)) fetcher_Version = develop_Version if fetcher_Version: version_dict = spack_package.versions[fetcher_Version] version_dict['no_cache'] = True # Disable caching. if 'git' in version_dict: # Disable efficiency options that aren't wanted here. version_dict.update({'full_depth': True, 'all_branches': True}) spack_package.fetcher = fs.for_package_version(spack_package, fetcher_Version) spack_package.stage = Stage(spack_package.fetcher, path=spack_package.path) else: tty.warn('Spack dev unable to obtain VC source for package {0} {1}' '\nFalling back to version {2} as concretized'.format( dp.name, 'with user-specified {0} {1}'.format( dp.key, dp.tag_or_branch) if dp.key else '', spack_package.version))
def macos_version(): """temporary workaround to return a macOS version as a Version object """ return Version(py_platform.mac_ver()[0])
# This tests ABI compatibility between packages # from spack.abi import ABI abi = ABI() # # This controls how things are concretized in spack. # Replace it with a subclass if you want different # policies. # from spack.concretize import DefaultConcretizer concretizer = DefaultConcretizer() # Version information from spack.version import Version spack_version = Version("0.9.1") # # Executables used by Spack # from spack.util.executable import Executable, which # User's editor from the environment editor = Executable(os.environ.get("EDITOR", "vi")) # Curl tool for fetching files. curl = which("curl", required=True) # Whether to build in tmp space or directly in the stage_path. # If this is true, then spack will make stage directories in # a tmp filesystem, and it will symlink them into stage_path.
def git_version(self): vstring = self.git('--version', output=str).lstrip('git version ') return Version(vstring)
def test_len(): a = Version('1.2.3.4') assert len(a) == len(a.version) assert (len(a) == 4) b = Version('2018.0') assert (len(b) == 2)
def test_preferred_versions_mixed_version_types(self): update_packages('mixedversions', 'version', ['2.0']) spec = concretize('mixedversions') assert spec.version == Version('2.0')
def test_invalid_versions(version_str): """Ensure invalid versions are rejected with a ValueError""" with pytest.raises(ValueError): Version(version_str)
def test_version_git_vs_base(string, git): assert isinstance(Version(string), GitVersion) == git
def macos_version(): """temporary workaround to return a macOS version as a Version object """ return Version('.'.join(py_platform.mac_ver()[0].split('.')[:2]))