def main(trainsize, num_modes): """Compute the POD basis (dominant left singular values) of a set of lifted, scaled snapshot training data and save the basis and the corresponding singular values. WARNING: This will OVERWRITE any existing basis for this `trainsize`. Parameters ---------- trainsize : int The number of snapshots to use in the computation. There must exist a file of exactly `trainsize` lifted, scaled snapshots (see step2a_transform.py). num_modes : int or list(int) The number of POD modes (left singular vectors) to retain. """ utils.reset_logger(trainsize) # Load the first `trainsize` lifted, scaled snapshot data. training_data, _, qbar, scales = utils.load_scaled_data(trainsize) if num_modes == -1: # Secret mode! Compute all singular values (EXPENSIVE). return compute_and_save_all_svdvals(training_data) else: # Compute and save the (randomized) SVD from the training data. return compute_and_save_pod_basis(num_modes, training_data, qbar, scales)
def main(trainsize, num_modes): """Project lifted, scaled snapshot training data to the subspace spanned by the columns of the POD basis V; compute velocity information for the projected snapshots; and save the projected data. Parameters ---------- trainsize : int The number of snapshots to use in the computation. There must exist a file of exactly `trainsize` lifted, scaled snapshots (see step2a_lift.py). num_modes : int or list(int) The number of POD modes (left singular vectors) to use in the projection, which determines the dimension of the resulting ROM. There must exist a file of at least `num_modes` left singular vectors computed from exactly `trainsize` lifted, scaled snapshots (see step2b_basis.py). """ utils.reset_logger(trainsize) if np.isscalar(num_modes): num_modes = [int(num_modes)] # Load lifted, scaled snapshot data. X, time_domain, scales = utils.load_scaled_data(trainsize) # Load the POD basis. V, _ = utils.load_basis(trainsize, max(num_modes)) # Project and save the data for each number of POD modes. for r in num_modes: project_and_save_data(trainsize, r, X, time_domain, scales, V)
def main(trainsize, num_modes, center=False): """Lift and scale the GEMS simulation data; compute a POD basis of the lifted, scaled snapshot training data; project the lifted, scaled snapshot training data to the subspace spanned by the columns of the POD basis V, and compute velocity information for the projected snapshots. Save lifted/scaled snapshots, the POD basis, and the projected data. Parameters ---------- trainsize : int Number of snapshots to lift / scale / save. num_modes : int or None The number of POD modes (left singular vectors) to use in the projection. This is the upper bound for the size of ROMs that can be trained with this data set. center : bool If True, center the scaled snapshots by the mean scaled snapshot before computing the POD basis. """ utils.reset_logger(trainsize) # STEP 2A: Lift and scale the data ---------------------------------------- try: # Attempt to load existing lifted, scaled data. training_data, time, qbar, scales = utils.load_scaled_data(trainsize) except utils.DataNotFoundError: # Lift the GEMS data, then scale the lifted snapshots by variable. lifted_data, time = step2a.load_and_lift_gems_data(trainsize) training_data, qbar, scales = step2a.scale_and_save_data( trainsize, lifted_data, time, center) del lifted_data # STEP 2B: Get the POD basis from the lifted, scaled data ----------------- try: # Attempt to load existing SVD data. basis, qbar, scales = utils.load_basis(trainsize, None) if basis.shape[1] < num_modes: raise utils.DataNotFoundError("not enough saved basis vectors") num_modes = basis.shape[1] # Use larger basis size if available. except utils.DataNotFoundError: # Compute and save the (randomized) SVD from the training data. basis = step2b.compute_and_save_pod_basis(num_modes, training_data, qbar, scales) # STEP 2C: Project data to the appropriate subspace ----------------------- return step2c.project_and_save_data(training_data, time, basis)
def main(trainsize, num_modes): """Lift and scale the GEMS simulation data; compute a POD basis of the lifted, scaled snapshot training data; project the lifted, scaled snapshot training data to the subspace spanned by the columns of the POD basis V, and compute velocity information for the projected snapshots. Save lifted/scaled snapshots, the POD basis, and the projected data. Parameters ---------- trainsize : int Number of snapshots to lift / scale / save. num_modes : int or list(int) The number of POD modes (left singular vectors) to use in the projection, which determines the dimension of the resulting ROM. """ utils.reset_logger(trainsize) if np.isscalar(num_modes): num_modes = [int(num_modes)] # STEP 2A: Lift and scale the data ---------------------------------------- try: # Attempt to load existing lifted, scaled data. X, time_domain, scales = utils.load_scaled_data(trainsize) except utils.DataNotFoundError: # Lift the GEMS data, then scale the lifted snapshots by variable. lifted_data, time_domain = step2a.load_and_lift_gems_data(trainsize) X, scales = step2a.scale_and_save_data(trainsize, lifted_data, time_domain) # STEP 2B: Get the POD basis from the lifted, scaled data ----------------- try: # Attempt to load existing SVD data. V, _ = utils.load_basis(trainsize, max(num_modes)) except utils.DataNotFoundError: # Compute and save the (randomized) SVD from the training data. V, _ = step2b.compute_and_save_pod_basis(trainsize, max(num_modes), X, scales) # STEP 2C: Project data to the appropriate subspace ----------------------- for r in num_modes: step2c.project_and_save_data(trainsize, r, X, time_domain, scales, V)
def main(trainsize, num_modes): """Compute the POD basis (dominant left singular values) of a set of lifted, scaled snapshot training data and save the basis and the corresponding singular values. Parameters ---------- trainsize : int The number of snapshots to use in the computation. There must exist a file of exactly `trainsize` lifted, scaled snapshots (see step2a_lift.py). num_modes : int or list(int) The number of POD modes (left singular vectors) to retain. """ utils.reset_logger(trainsize) # Load the first `trainsize` lifted, scaled snapshot data. training_data, _, scales = utils.load_scaled_data(trainsize) # Compute and save the (randomized) SVD from the training data. compute_and_save_pod_basis(trainsize, num_modes, training_data, scales)
def main(trainsize): """Project lifted, scaled snapshot training data to the subspace spanned by the columns of the POD basis V; compute velocity information for the projected snapshots; and save the projected data. Parameters ---------- trainsize : int The number of snapshots to use in the computation. There must exist a file of exactly `trainsize` lifted, scaled snapshots (see step2a_transform.py) and a basis for those snapshots (see step2b_basis.py). """ utils.reset_logger(trainsize) # Load lifted, scaled snapshot data. scaled_data, time_domain, _, _ = utils.load_scaled_data(trainsize) # Load the POD basis. V, _, _ = utils.load_basis(trainsize, None) # Project and save the data. return project_and_save_data(scaled_data, time_domain, V)