def load_scaled_data(trainsize):
    """Load scaled snapshot data and the associated scaling factors.

    Parameters
    ----------
    trainsize : int
        The number of snapshots of scaled data to load. See step1b.py.

    Returns
    -------
    X : (NUM_ROMVARS*DOF,trainsize) ndarray
        The lifted, scaled data.

    time_domain : (trainsize) ndarray
        The time domain corresponding to the lifted, scaled data.

    scales : (NUM_ROMVARS,4) ndarray
        The min/max factors used to scale the variables.
    """
    # Locate the data.
    data_path = _checkexists(config.scaled_data_path(trainsize))

    # Extract the data.
    with timed_block(f"Loading lifted, scaled snapshot data from {data_path}"):
        with h5py.File(data_path, 'r') as hf:
            # Check data shapes.
            if hf["data"].shape != (config.NUM_ROMVARS*config.DOF, trainsize):
                raise RuntimeError(f"data set 'data' has incorrect shape")
            if hf["time"].shape != (trainsize,):
                raise RuntimeError(f"data set 'time' has incorrect shape")
            if hf["scales"].shape != (config.NUM_ROMVARS, 4):
                raise RuntimeError(f"data set 'scales' has incorrect shape")

            # Load and return the data.
            return hf["data"][:,:], hf["time"][:], hf["scales"][:,:]
Beispiel #2
0
def scale_and_save_data(trainsize, lifted_data, time_domain):
    """Scale lifted snapshots (by variable) and save the scaled snapshots.

    Parameters
    ----------
    trainsize : int
        Number of snapshots to scale and save.

    lifted_data : (NUM_ROMVARS*DOF, k>trainsize) ndarray
        Lifted snapshots to scale and then save.

    time_domain : (k>trainsize,) ndarray
        The time domain corresponding to the lifted snapshots.
    """
    # Scale the learning variables to the bounds in config.SCALE_TO.
    with utils.timed_block(f"Scaling {trainsize:d} lifted snapshots"):
        scaled_data, scales = dproc.scale(lifted_data[:,:trainsize].copy())

    # Save the lifted, scaled training data.
    save_path = config.scaled_data_path(trainsize)
    with utils.timed_block("Saving scaled, lifted training data"):
        with h5py.File(save_path, 'w') as hf:
            hf.create_dataset("data", data=scaled_data)
            hf.create_dataset("time", data=time_domain[:trainsize])
            hf.create_dataset("scales", data=scales)
    logging.info(f"Scaled data saved as {save_path}.\n")

    return scaled_data, scales
def scale_and_save_data(trainsize, lifted_data, time_domain, center=False):
    """Scale lifted snapshots (by variable) and save the scaled snapshots.

    Parameters
    ----------
    trainsize : int
        Number of snapshots to scale and save.

    lifted_data : (NUM_ROMVARS*DOF, k>trainsize) ndarray
        Lifted snapshots to scale and then save.

    time_domain : (k>trainsize,) ndarray
        The time domain corresponding to the lifted snapshots.

    center : bool
        If True, center the scaled snapshots by the mean scaled snapshot
        before computing the POD basis. Default False (no shift).

    Returns
    -------
    training_data : (NUM_ROMVARS*DOF, trainsize) ndarray
        Scaled, shifted snapshots to use as training data for the basis.

    qbar : (NUM_ROMVARS*DOF,) ndarray
        Mean snapshot of the scaled training data. All zeros if center=False.

    scales : (NUM_ROMVARS,2) ndarray
        Info on how the snapshot data was scaled.
    """
    # Scale the learning variables to the bounds in config.SCALE_TO.
    with utils.timed_block(f"Scaling {trainsize:d} lifted snapshots"):
        training_data, scales = dproc.scale(lifted_data[:, :trainsize].copy())

    # Shift the scaled data by the mean snapshot.
    if center:
        with utils.timed_block("Shifting scaled snapshots by mean"):
            qbar = np.mean(training_data, axis=1)  # Compute mean snapshot.
            training_data -= qbar.reshape((-1, 1))  # Shift columns by mean.
    else:
        qbar = np.zeros(training_data.shape[0])

    # Save the lifted, scaled training data.
    save_path = config.scaled_data_path(trainsize)
    with utils.timed_block("Saving scaled, lifted training data"):
        with h5py.File(save_path, 'w') as hf:
            hf.create_dataset("data", data=training_data)
            hf.create_dataset("time", data=time_domain[:trainsize])
            hf.create_dataset("mean", data=qbar)
            hf.create_dataset("scales", data=scales)
    logging.info(f"Processed data saved to {save_path}.\n")

    return training_data, qbar, scales
def load_scaled_data(trainsize):
    """Load scaled snapshot data and the associated scaling factors.

    Parameters
    ----------
    trainsize : int
        Number of snapshots of scaled data to load. See step2a_transform.py.

    Returns
    -------
    Q : (NUM_ROMVARS*DOF,trainsize) ndarray
        Lifted, scaled, shifted data.

    time_domain : (trainsize) ndarray
        Time domain corresponding to the lifted, scaled data.

    qbar : (NUM_ROMVARS*DOF,) ndarray
        Mean snapshot of the scaled training data.

    scales : (NUM_ROMVARS,2) ndarray
        Factors used to scale the variables.
    """
    # Locate the data.
    data_path = _checkexists(config.scaled_data_path(trainsize))

    # Extract the data.
    with timed_block(f"Loading lifted, scaled snapshot data from {data_path}"):
        with h5py.File(data_path, 'r') as hf:
            # Check data shapes.
            if hf["data"].shape != (config.NUM_ROMVARS * config.DOF,
                                    trainsize):
                raise RuntimeError("data set 'data' has incorrect shape")
            if hf["time"].shape != (trainsize, ):
                raise RuntimeError("data set 'time' has incorrect shape")
            if "mean" in hf:
                if hf["mean"].shape != (hf["data"].shape[0], ):
                    raise RuntimeError("data set 'mean' has incorrect shape")
                mean = hf["mean"][:]
            else:
                mean = np.zeros(hf["data"].shape[0])
            if hf["scales"].shape != (config.NUM_ROMVARS, 2):
                raise RuntimeError("data set 'scales' has incorrect shape")

            # Load and return the data.
            return (hf["data"][:, :], hf["time"][:], mean, hf["scales"][:, :])