def update_copyright_year(args): """update copyright for the current year in all licensed files""" llns_and_other = ' Lawrence Livermore National Security, LLC and other' for filename in _licensed_files(args): fs.filter_file(r'Copyright \d{4}-\d{4}' + llns_and_other, strict_date + llns_and_other, os.path.join(args.root, filename))
def install(self, spec, prefix): # edit cns_solve_environment to allow a build shutil.copy('cns_solve_env', 'cns_solve_env.back') filter_file(r"setenv CNS_SOLVE '_CNSsolve_location_'", f"setenv CNS_SOLVE '{self.stage.source_path}'", 'cns_solve_env') # copy over an almost right machine make file we could have got it from v1.3 but this is simpler src_file = 'instlib/machine/supported/intel-x86_64bit-linux/Makefile.header.2.gfortran' dest_file = 'instlib/machine/supported/mac-intel-darwin/Makefile.header.5.gfortran' shutil.move(src_file, dest_file) if not self.spec.satisfies('%fortran@:10.0.0'): # patch the machine make file, can't be done with a patch statement it doesn't exists till we copy it # tried just copying the file from the package directory but it caused a lockup patch = which('patch') patch_file = join_path( package_root, 'nmrpack/packages/cns', 'gfortran_10_allow_argument_mismatch.patch') patch('-p1', '-i', patch_file) if '+aria' in self.spec: from_path = pathlib.Path('aria2.3/cns/src') to_path = 'source' for target_file in from_path.iterdir(): if target_file.is_file() and target_file.suffix in ('.f', '.inc'): print(f'copying {target_file} to {to_path}') shutil.copy(target_file, to_path) if target_file.is_dir(): print(f'copying {target_file} to {to_path}') shutil.copytree(target_file, join_path(to_path, target_file.name)) shutil.copytree(from_path, 'aria2.3_patches_applied') shutil.rmtree('aria2.3') make('install') install_tree('.', prefix) with working_dir(prefix): shutil.move('cns_solve_env.back', 'cns_solve_env') replacement_env = f" setenv CNS_SOLVE '{prefix}'" filter_file(r"setenv CNS_SOLVE '_CNSsolve_location_'", replacement_env, 'cns_solve_env') # remove a leftover from our previous edits os.remove(pathlib.Path(prefix) / pathlib.Path('cns_solve_env' + '~'))
def update_copyright_year(args): """update copyright for the current year in all licensed files""" llns_and_other = ' Lawrence Livermore National Security, LLC and other' for filename in _licensed_files(args): fs.filter_file(r'Copyright \d{4}-\d{4}' + llns_and_other, strict_date + llns_and_other, os.path.join(args.root, filename)) # also update MIT license file at root. Don't use llns_and_other; it uses # a shortened version of that for better github detection. mit_date = strict_date.replace("Copyright", "Copyright (c)") mit_file = os.path.join(args.root, "LICENSE-MIT") fs.filter_file(r"Copyright \(c\) \d{4}-\d{4}", mit_date, mit_file)
def test_update_index_fix_deps(monkeypatch, tmpdir, mutable_config): """Ensure spack buildcache update-index properly fixes up spec descriptor files on the mirror when updating the buildcache index.""" # Create a temp mirror directory for buildcache usage mirror_dir = tmpdir.join('mirror_dir') mirror_url = 'file://{0}'.format(mirror_dir.strpath) spack.config.set('mirrors', {'test': mirror_url}) a = Spec('a').concretized() b = Spec('b').concretized() new_b_full_hash = 'abcdef' # Install package a with dep b install_cmd('--no-cache', a.name) # Create a buildcache for a and its dep b, and update index buildcache_cmd('create', '-uad', mirror_dir.strpath, a.name) buildcache_cmd('update-index', '-d', mirror_dir.strpath) # Simulate an update to b that only affects full hash by simply overwriting # the full hash in the spec.json file on the mirror b_spec_json_name = bindist.tarball_name(b, '.spec.json') b_spec_json_path = os.path.join(mirror_dir.strpath, bindist.build_cache_relative_path(), b_spec_json_name) fs.filter_file(r'"full_hash":\s"\S+"', '"full_hash": "{0}"'.format(new_b_full_hash), b_spec_json_path) # When we update the index, spack should notice that a's notion of the # full hash of b doesn't match b's notion of it's own full hash, and as # a result, spack should fix the spec.json for a buildcache_cmd('update-index', '-d', mirror_dir.strpath) # Read in the concrete spec json of a a_spec_json_name = bindist.tarball_name(a, '.spec.json') a_spec_json_path = os.path.join(mirror_dir.strpath, bindist.build_cache_relative_path(), a_spec_json_name) # Turn concrete spec json into a concrete spec (a) with open(a_spec_json_path) as fd: a_prime = spec.Spec.from_json(fd.read()) # Make sure the full hash of b in a's spec json matches the new value assert (a_prime[b.name].full_hash() == new_b_full_hash)
def add_files_to_view(self, view, merge_map): bin_dir = self.spec.prefix.bin python_prefix = self.extendee_spec.prefix global_view = same_path(python_prefix, view.root) for src, dst in merge_map.items(): if os.path.exists(dst): continue elif global_view or not path_contains_subdirectory(src, bin_dir): view.link(src, dst) elif not os.path.islink(src): shutil.copy2(src, dst) if 'script' in get_filetype(src): filter_file(python_prefix, os.path.abspath(view.root), dst) else: orig_link_target = os.path.realpath(src) new_link_target = os.path.abspath(merge_map[orig_link_target]) view.link(new_link_target, dst)
def add_files_to_view(self, view, merge_map): bin_dir = self.spec.prefix.bin python_prefix = self.extendee_spec.prefix global_view = same_path(python_prefix, view.root) for src, dst in merge_map.items(): if os.path.exists(dst): continue elif global_view or not path_contains_subdirectory(src, bin_dir): view.link(src, dst) elif not os.path.islink(src): shutil.copy2(src, dst) if 'script' in get_filetype(src): filter_file( python_prefix, os.path.abspath(view.root), dst) else: orig_link_target = os.path.realpath(src) new_link_target = os.path.abspath(merge_map[orig_link_target]) view.link(new_link_target, dst)
def test_filter_files_with_different_encodings(regex, replacement, filename, tmpdir, keyword_args): # All files given as input to this test must satisfy the pre-requisite # that the 'replacement' string is not present in the file initially and # that there's at least one match for the regex original_file = os.path.join(spack.paths.test_path, 'data', 'filter_file', filename) target_file = os.path.join(str(tmpdir), filename) shutil.copy(original_file, target_file) # This should not raise exceptions fs.filter_file(regex, replacement, target_file, **keyword_args) # Check the strings have been replaced extra_kwargs = {} if sys.version_info > (3, 0): extra_kwargs = {'errors': 'surrogateescape'} with open(target_file, mode='r', **extra_kwargs) as f: assert replacement in f.read()
def _patch_libtool(self, libtool_path): if self.spec.satisfies('%arm')\ or self.spec.satisfies('%clang')\ or self.spec.satisfies('%fj'): fs.filter_file('wl=""\n', 'wl="-Wl,"\n', libtool_path) fs.filter_file('pic_flag=""\n', 'pic_flag="{0}"\n' .format(self.compiler.cc_pic_flag), libtool_path) if self.spec.satisfies('%fj'): fs.filter_file('-nostdlib', '', libtool_path) rehead = r'/\S*/' objfile = ['fjhpctag.o', 'fjcrt0.o', 'fjlang08.o', 'fjomp.o', 'crti.o', 'crtbeginS.o', 'crtendS.o'] for o in objfile: fs.filter_file(rehead + o, '', libtool_path)
def add_files_to_view(self, view, merge_map, skip_if_exists=True): bin_dir = self.spec.prefix.bin python_prefix = self.extendee_spec.prefix python_is_external = self.extendee_spec.external global_view = same_path(python_prefix, view.get_projection_for_spec(self.spec)) for src, dst in merge_map.items(): if os.path.exists(dst): continue elif global_view or not path_contains_subdirectory(src, bin_dir): view.link(src, dst) elif not os.path.islink(src): shutil.copy2(src, dst) is_script = is_nonsymlink_exe_with_shebang(src) if is_script and not python_is_external: filter_file( python_prefix, os.path.abspath(view.get_projection_for_spec( self.spec)), dst) else: orig_link_target = os.path.realpath(src) new_link_target = os.path.abspath(merge_map[orig_link_target]) view.link(new_link_target, dst)
def _patch_libtool(self, libtool_path): if self.spec.satisfies('%arm')\ or self.spec.satisfies('%clang')\ or self.spec.satisfies('%fj'): fs.filter_file('wl=""\n', 'wl="-Wl,"\n', libtool_path) fs.filter_file('pic_flag=""\n', 'pic_flag="{0}"\n' .format(self.compiler.cc_pic_flag), libtool_path) if self.spec.satisfies('%fj'): fs.filter_file(r'/\S*/fjhpctag.o', '', libtool_path)
def relocate_text(path_names, oldpath, newpath, oldprefix, newprefix): """ Replace old path with new path in text file path_name """ fs.filter_file('%s' % oldpath, '%s' % newpath, *path_names, backup=False, string=True) sbangre = '#!/bin/bash %s/bin/sbang' % oldprefix sbangnew = '#!/bin/bash %s/bin/sbang' % newprefix fs.filter_file(sbangre, sbangnew, *path_names, backup=False, string=True) fs.filter_file(oldprefix, newprefix, *path_names, backup=False, string=True)
def test_filter_files_multiple(tmpdir): # All files given as input to this test must satisfy the pre-requisite # that the 'replacement' string is not present in the file initially and # that there's at least one match for the regex original_file = os.path.join(spack.paths.test_path, 'data', 'filter_file', 'x86_cpuid_info.c') target_file = os.path.join(str(tmpdir), 'x86_cpuid_info.c') shutil.copy(original_file, target_file) # This should not raise exceptions fs.filter_file(r'\<malloc.h\>', '<unistd.h>', target_file) fs.filter_file(r'\<string.h\>', '<unistd.h>', target_file) fs.filter_file(r'\<stdio.h\>', '<unistd.h>', target_file) # Check the strings have been replaced extra_kwargs = {} if sys.version_info > (3, 0): extra_kwargs = {'errors': 'surrogateescape'} with open(target_file, mode='r', **extra_kwargs) as f: assert '<malloc.h>' not in f.read() assert '<string.h>' not in f.read() assert '<stdio.h>' not in f.read()
def patch(self): filter_file(r'^dictdir=.*$', 'dictdir=/lib', 'configure') filter_file(r'^datadir=.*$', 'datadir=/lib', 'configure')
def fix_shebang(self): if self.build_method == 'Build.PL': pattern = '#!{0}'.format(self.spec['perl'].command.path) repl = '#!/usr/bin/env perl' filter_file(pattern, repl, 'Build', backup=False)
def configure_pipeline(parser, args): # Parse all of our inputs before trying to modify any recipes. modifications = {} packages_to_ignore = set(args.ignore_packages) mod_pattern = re.compile("^([^=]+)_(BRANCH|COMMIT|TAG)=(.*)$", re.IGNORECASE) for mod_str in args.modifications: match = mod_pattern.match(mod_str) if not match: raise Exception("Could not parse: {}".format(mod_str)) package_name = match.group(1) ref_type = match.group(2).lower() val = match.group(3) # Handle --ignore-packges arguments if package_name in packages_to_ignore: tty.info("{}: ignoring {}".format(package_name, mod_str)) continue # Try and transform the input name, which is probably all upper case # and may contain underscores, into a Spack-style name that is all # lower case and contains hyphens. spack_package_name = simplify_name(package_name) # Check if this package exists try: spack.repo.get(spack_package_name) except spack.repo.UnknownPackageError: raise Exception( "Could not find a Spack package corresponding to {}, tried {}". format(package_name, spack_package_name)) if spack_package_name in modifications: raise Exception( "Parsed multiple modifications for Spack package {} from: {}". format(spack_package_name, " ".join(args.modifications))) modifications[spack_package_name] = { "bash_name": package_name, "ref_type": ref_type, "ref": val, } # Translate any branches or tags into commit hashes and then use those # consistently. This guarantees different jobs in a pipeline all get the # same commit, and means we can handle provenance information (what did # @develop mean) in one place. git = which("git") if not git: raise Exception("Git is required") for spack_package_name, info in modifications.items(): if info["ref_type"] == "commit": info["commit"] = info["ref"] else: if info["ref_type"] == "branch": remote_ref = "refs/heads/" + info["ref"] else: assert info["ref_type"] == "tag" remote_ref = "refs/tags/" + info["ref"] spack_package = spack.repo.get(spack_package_name) remote_refs = git("ls-remote", spack_package.git, remote_ref, output=str).splitlines() assert len(remote_refs) < 2 if len(remote_refs) == 0: raise Exception( "Could not find {} {} on remote {} (tried {})".format( info["ref_type"], info["ref"], spack_package.git, remote_ref)) commit, ref_check = remote_refs[0].split() assert remote_ref == ref_check tty.info("{}: resolved {} {} to {}".format(spack_package_name, info["ref_type"], info["ref"], commit)) info["commit"] = commit if args.write_commit_file is not None: with open(args.write_commit_file, "w") as ofile: for spack_package_name, info in modifications.items(): ofile.write("{}_COMMIT={}\n".format(info["bash_name"], info["commit"])) # Now modify the Spack recipes of the given packages for spack_package_name, info in modifications.items(): spack_package = spack.repo.get(spack_package_name) spack_recipe = os.path.join(spack_package.package_dir, spack.repo.package_file_name) # Using filter_file seems neater than calling sed, but it is a little # more limited. First, remove any existing branch/commit/tag from the # develop version. tty.info( "{}@develop: remove branch/commit/tag".format(spack_package_name)) filter_file( "version\\s*\\(\\s*(['\"]{1})develop\\1(.*?)" + ",\\s*(branch|commit|tag)=(['\"]{1})(.*?)\\4(.*?)\\)", "version('develop'\\2\\6) # old: \\3=\\4\\5\\4", spack_recipe, ) # Second, insert the new commit="sha" part tty.info('{}@develop: use commit="{}"'.format(spack_package_name, info["commit"])) filter_file( "version\\('develop'", "version('develop', commit='{}'".format(info["commit"]), spack_recipe, ) # Third, make sure that the develop version, and only the develop # version, is flagged as the preferred version. Start by getting a list # of versions that are already explicitly flagged as preferred. already_preferred = { str(v) for v, v_info in spack_package.versions.items() if v_info.get("preferred", False) } # Make sure the develop version has an explicit preferred=True. if "develop" not in already_preferred: tty.info( "{}@develop: add preferred=True".format(spack_package_name)) filter_file("version\\('develop'", "version('develop', preferred=True", spack_recipe) # Make sure no other versions have an explicit preferred=True. for other_version in already_preferred - {"develop"}: tty.info("{}@{}: remove preferred=True".format( spack_package_name, other_version)) escaped_version = re.escape(other_version) filter_file( "version\\s*\\(\\s*(['\"]{1})" + escaped_version + "\\1(.*?),\\s*preferred=True(.*?)\\)", "version('{version}'\\2\\3)".format(version=other_version), spack_recipe, )
def relocate_text(path_names, old_dir, new_dir): """ Replace old path with new path in text file path_name """ filter_file('%s' % old_dir, '%s' % new_dir, *path_names, backup=False)
def relocate_text(path_name, old_dir, new_dir): """ Replace old path with new path in text file path_name """ filter_file("r'%s'" % old_dir, "r'%s'" % new_dir, path_name)