def test_pkg_install_paths(install_mockery): # Get a basic concrete spec for the trivial install package. spec = Spec('trivial-install-test-package').concretized() log_path = os.path.join(spec.prefix, '.spack', _spack_build_logfile) assert spec.package.install_log_path == log_path env_path = os.path.join(spec.prefix, '.spack', _spack_build_envfile) assert spec.package.install_env_path == env_path args_path = os.path.join(spec.prefix, '.spack', _spack_configure_argsfile) assert spec.package.install_configure_args_path == args_path # Backward compatibility checks log_dir = os.path.dirname(log_path) fs.mkdirp(log_dir) with fs.working_dir(log_dir): # Start with the older of the previous install log filenames older_log = 'build.out' fs.touch(older_log) assert spec.package.install_log_path.endswith(older_log) # Now check the newer install log filename last_log = 'build.txt' fs.rename(older_log, last_log) assert spec.package.install_log_path.endswith(last_log) # Check the old install environment file last_env = 'build.env' fs.rename(last_log, last_env) assert spec.package.install_env_path.endswith(last_env) # Cleanup shutil.rmtree(log_dir)
def test_pkg_build_paths(install_mockery): # Get a basic concrete spec for the trivial install package. spec = Spec('trivial-install-test-package').concretized() log_path = spec.package.log_path assert log_path.endswith(_spack_build_logfile) env_path = spec.package.env_path assert env_path.endswith(_spack_build_envfile) # Backward compatibility checks log_dir = os.path.dirname(log_path) fs.mkdirp(log_dir) with fs.working_dir(log_dir): # Start with the older of the previous log filenames older_log = 'spack-build.out' fs.touch(older_log) assert spec.package.log_path.endswith(older_log) # Now check the newer log filename last_log = 'spack-build.txt' fs.rename(older_log, last_log) assert spec.package.log_path.endswith(last_log) # Check the old environment file last_env = 'spack-build.env' fs.rename(last_log, last_env) assert spec.package.env_path.endswith(last_env) # Cleanup shutil.rmtree(log_dir)
def push_to_url(local_file_path, remote_path, keep_original=True, extra_args=None): if sys.platform == "win32": if remote_path[1] == ':': remote_path = "file://" + remote_path remote_url = url_util.parse(remote_path) verify_ssl = spack.config.get('config:verify_ssl') if __UNABLE_TO_VERIFY_SSL and verify_ssl and uses_ssl(remote_url): warn_no_ssl_cert_checking() remote_file_path = url_util.local_file_path(remote_url) if remote_file_path is not None: mkdirp(os.path.dirname(remote_file_path)) if keep_original: shutil.copy(local_file_path, remote_file_path) else: try: rename(local_file_path, remote_file_path) except OSError as e: if e.errno == errno.EXDEV: # NOTE(opadron): The above move failed because it crosses # filesystem boundaries. Copy the file (plus original # metadata), and then delete the original. This operation # needs to be done in separate steps. shutil.copy2(local_file_path, remote_file_path) os.remove(local_file_path) else: raise elif remote_url.scheme == 's3': if extra_args is None: extra_args = {} remote_path = remote_url.path while remote_path.startswith('/'): remote_path = remote_path[1:] s3 = s3_util.create_s3_session( remote_url, connection=s3_util.get_mirror_connection(remote_url)) # noqa: E501 s3.upload_file(local_file_path, remote_url.netloc, remote_path, ExtraArgs=extra_args) if not keep_original: os.remove(local_file_path) elif remote_url.scheme == 'gs': gcs = gcs_util.GCSBlob(remote_url) gcs.upload_to_blob(local_file_path) if not keep_original: os.remove(local_file_path) else: raise NotImplementedError('Unrecognized URL scheme: {SCHEME}'.format( SCHEME=remote_url.scheme))
def _write_extensions(self, spec, extensions): path = self.extension_file_path(spec) if not extensions: # Remove the empty extensions file os.remove(path) return # Create a temp file in the same directory as the actual file. dirname, basename = os.path.split(path) fs.mkdirp(dirname) tmp = tempfile.NamedTemporaryFile( prefix=basename, dir=dirname, delete=False) # write tmp file with tmp: yaml.dump({ 'extensions': [ {ext.name: { 'hash': ext.dag_hash(), 'path': str(ext.prefix) }} for ext in sorted(extensions.values())] }, tmp, default_flow_style=False, encoding='utf-8') # Atomic update by moving tmpfile on top of old one. fs.rename(tmp.name, path)
def __exit__(cm, type, value, traceback): # noqa if cm.orig_file: cm.orig_file.close() cm.tmp_file.close() if value: # remove tmp on exception & raise it shutil.rmtree(cm.tmp_filename, True) else: rename(cm.tmp_filename, cm.orig_filename)
def _write_section(self, section): data_to_write = self._raw_data # If there is no existing data, this section SingleFileScope has never # been written to disk. We need to construct the portion of the data # from the root of self._raw_data to the level at which the config # sections are defined. That requires creating keys for every entry in # self.yaml_path if not data_to_write: data_to_write = {} # reverse because we construct it from the inside out for key in reversed(self.yaml_path): data_to_write = {key: data_to_write} # data_update_pointer is a pointer to the part of data_to_write # that we are currently updating. # We start by traversing into the data to the point at which the # config sections are defined. This means popping the keys from # self.yaml_path data_update_pointer = data_to_write for key in self.yaml_path: data_update_pointer = data_update_pointer[key] # For each section, update the data at the level of our pointer # with the data from the section for key, data in self.sections.items(): data_update_pointer[key] = data[key] validate(data_to_write, self.schema) try: parent = os.path.dirname(self.path) mkdirp(parent) tmp = os.path.join(parent, '.%s.tmp' % os.path.basename(self.path)) with open(tmp, 'w') as f: syaml.dump_config(data_to_write, stream=f, default_flow_style=False) rename(tmp, self.path) except (yaml.YAMLError, IOError) as e: raise ConfigFileError("Error writing to config file: '%s'" % str(e))