Ejemplo n.º 1
0
def compute_and_save_all_svdvals(training_data):
    """Compute and save the singular values corresponding to the *full* POD
    basis for the training data.

    Parameters
    ----------
    training_data : (NUM_ROMVARS*DOF,trainsize) ndarray
        Training snapshots to take the SVD of.

    Returns
    -------
    svdvals : (trainsize,) ndarray
        Singular values for the full POD basis.
    """
    # Compute the DENSE SVD of the training data to get the singular values.
    with utils.timed_block("Computing *dense* SVD for singular values"):
        svdvals = la.svdvals(training_data,
                             overwrite_a=True,
                             check_finite=False)

    # Save the POD basis.
    save_path = config.basis_path(training_data.shape[1])
    save_path = save_path.replace(config.BASIS_FILE, "svdvals.h5")
    with utils.timed_block("Saving singular values"):
        with h5py.File(save_path, 'w') as hf:
            hf.create_dataset("svdvals", data=svdvals)
    logging.info(f"Singular values saved to {save_path}.\n")

    return svdvals
Ejemplo n.º 2
0
def compute_and_save_pod_basis(trainsize, num_modes, training_data, scales):
    """Compute and save the POD basis via a randomized SVD.

    Parameters
    ----------
    trainsize : int
        Number of snapshots to use in computing the basis.

    num_modes : list(int) or int
        Number of POD modes to compute.

    training_data : (NUM_ROMVARS*DOF,trainsize) ndarray
        Training snapshots to take the SVD of.

    scales : (NUM_ROMVARS,2) ndarray
        Info on how the snapshot data was scaled.

    Returns
    -------
    V : (NUM_ROMVARS*DOF,r) ndarray
        POD basis of rank r = max(num_modes).

    svdvals : (r,) ndarray
        Singular values corresponding to the POD modes.
    """
    if trainsize != training_data.shape[1]:
        raise ValueError("trainsize and training_data not aligned")

    if np.isscalar(num_modes):
        num_modes = [int(num_modes)]

    # Compute the randomized SVD from the training data.
    rmax = max(num_modes)
    with utils.timed_block(f"Computing {rmax}-component randomized SVD"):
        V, svdvals = roi.pre.pod_basis(training_data,
                                       r=rmax,
                                       mode="randomized",
                                       n_iter=15,
                                       random_state=42)
    # Save the POD basis.
    for r in num_modes:
        save_path = config.basis_path(trainsize, r)
        with utils.timed_block(f"Saving POD basis of rank {r}"):
            with h5py.File(save_path, 'w') as hf:
                hf.create_dataset("V", data=V[:, :r])
                hf.create_dataset("svdvals", data=svdvals[:r])
        logging.info(f"POD basis of rank {r} saved to {save_path}.\n")

    return V, svdvals
Ejemplo n.º 3
0
def load_basis(trainsize, r):
    """Load a POD basis and the associated scales.

    Parameters
    ----------
    trainsize : int
        Number of snapshots used when the SVD was computed.

    r : int
        Number of left singular vectors to load.

    Returns
    -------
    V : (NUM_ROMVARS*DOF,r) ndarray
        POD basis of rank `r`, i.e., the first `r` left singular vectors of
        the training data.

    qbar : (NUM_ROMVARS*DOF,) ndarray
        Mean snapshot that the training data was shifted by after scaling
        but before projection.

    scales : (NUM_ROMVARS,2) ndarray
        Factors used to scale the variables before projecting.
    """
    # Locate the data.
    data_path = _checkexists(config.basis_path(trainsize))

    # Secret! Return list of full singular values.
    if r == -1:
        data_path = data_path.replace(config.BASIS_FILE, "svdvals.h5")
        with h5py.File(data_path, 'r') as hf:
            return hf["svdvals"][:]

    # Extract the data.
    with timed_block(f"Loading POD basis from {data_path}"):
        with h5py.File(data_path, 'r') as hf:
            # Check data shapes.
            rmax = hf["basis"].shape[1]
            if r is not None and rmax < r:
                raise ValueError(f"basis only has {rmax} columns")
            if "mean" in hf:
                if hf["mean"].shape != (hf["basis"].shape[0], ):
                    raise RuntimeError("basis and mean snapshot not aligned!")
                mean = hf["mean"][:]
            else:
                mean = np.zeros(hf["basis"].shape[0])

            # Load the data.
            return hf["basis"][:, :r], mean, hf["scales"][:]
Ejemplo n.º 4
0
def compute_and_save_pod_basis(num_modes, training_data, qbar, scales):
    """Compute and save the POD basis via a randomized SVD.

    Parameters
    ----------
    num_modes : int
        Number of POD modes to compute.

    training_data : (NUM_ROMVARS*DOF,trainsize) ndarray
        Training snapshots to take the SVD of.

    qbar : (NUM_ROMVARS*DOF,) ndarray
        Mean snapshot of the scaled training data.

    scales : (NUM_ROMVARS,2) ndarray
        Info on how the snapshot data was scaled.

    Returns
    -------
    V : (NUM_ROMVARS*DOF,r) ndarray
        POD basis of rank r.
    """
    # Compute the randomized SVD from the training data.
    with utils.timed_block(f"Computing {num_modes}-component randomized SVD"):
        V, svdvals = opinf.pre.pod_basis(training_data,
                                         r=num_modes,
                                         mode="randomized",
                                         n_iter=15,
                                         random_state=42)

    # Save the POD basis.
    save_path = config.basis_path(training_data.shape[1])
    with utils.timed_block("Saving POD basis"):
        with h5py.File(save_path, 'w') as hf:
            hf.create_dataset("basis", data=V)
            hf.create_dataset("svdvals", data=svdvals)
            hf.create_dataset("mean", data=qbar)
            hf.create_dataset("scales", data=scales)
    logging.info(f"POD bases of rank {num_modes} saved to {save_path}.\n")

    return V