Example #1
0
    B.epsilon = 1e-8
    wbml.out.report_time = True
    wd = WorkingDirectory("_experiments", "simulators", log=f"log{suffix}.txt")

    # Load data.
    loc, temp, sims = load()
    sims = {k: v for k, v in list(sims.items())}
    x_data = np.array([(day - temp.index[0]).days
                       for day in temp.index[:args.n]])
    y_data = np.concatenate([sim.to_numpy()[:args.n] for sim in sims.values()],
                            axis=1)
    wbml.out.out("Data loaded")

    # Normalise training data.
    normaliser = Normaliser(y_data)
    y_data = normaliser.normalise(y_data)

    # Determine initialisation of spatial length scales.
    scales_init = 0.5 * np.array(loc.max() - loc.min())

    # Convert to PyTorch.
    loc = torch.tensor(np.array(loc), dtype=torch.float64)
    x_data = torch.tensor(x_data, dtype=torch.float64)
    y_data = torch.tensor(y_data, dtype=torch.float64)

    # Determine number of latent processes.
    m_r = args.mr
    m_s = args.ms
    m = m_r * m_s
Example #2
0
from wbml.data.exchange import load
from wbml.experiment import WorkingDirectory

if __name__ == "__main__":
    wbml.out.report_time = True
    wd = WorkingDirectory("_experiments", "exchange_ilmm")

    B.epsilon = 1e-8

    _, train, test = load()

    x = np.array(train.index)
    y = np.array(train)

    # Normalise data.
    normaliser = Normaliser(y)
    y_norm = normaliser.normalise(y)

    p = B.shape(y)[1]
    m = 3
    vs = Vars(torch.float64)

    def construct_model(vs):
        kernels = [
            vs.pos(1, name=f"{i}/var") *
            Matern12().stretch(vs.pos(0.1, name=f"{i}/scale"))
            for i in range(m)
        ]
        noise = vs.pos(1e-2, name="noise")
        latent_noises = vs.pos(1e-2 * B.ones(m), name="latent_noises")
        h = Dense(vs.get(shape=(p, m), name="h"))
Example #3
0
                        min_periods=1,
                        win_type="hamming")
    temp = temp.mean().iloc[::31, :]

    # Create train and test splits
    x = np.array([(day - temp.index[0]).days for day in temp.index])
    y = np.array(temp)

    # Divide into training and test set.
    x_train = x[:250]
    y_train = y[:250]
    x_test = x[250:350]
    y_test = y[250:350]

    # Perform normalisation.
    normaliser = Normaliser(y_train)
    y_train_norm = normaliser.normalise(y_train)

    # Determine initialisation of spatial length scales.
    scales_init = np.maximum(0.2 * np.array(loc.max() - loc.min()), 1)

    # Convert to PyTorch.
    loc = torch.tensor(np.array(loc))

    p = B.shape(y)[1]
    m = args.m
    vs = Vars(torch.float64)

    def construct_model(vs):
        kernels = [
            vs.pos(0.5, name=f"{i}/k_var") *