Ejemplo n.º 1
0
         'vent2_H']][start_valid:stop_valid].values.T

reg.eval_log_likelihood(tp, yp, up, 'foh')

reg.ss.parameters.set_parameter("x0_i", value=y0, transform="fixed")

upred = df[['Ambient temperature',
            'north_T',
            'Solar radiation: global horizontal',
            'south_H',
            'vent2_H']][start_fit:stop_valid].values.T

df["south_T_pred"] = df["south_T"].copy()
df["south_T_pred"][start_valid:stop_valid] = np.nan
tpred = np.arange(0, dt * upred.shape[1], dt)
ypred = df["south_T_pred"].values[np.newaxis, :]

y_mean, y_std = reg.predict(tpred, tpred, ypred, upred, 'foh', smooth=False)

plt.close("all")
fig, axes = plt.subplots(nrows=1, ncols=1, sharex=True, sharey=True)

ytruth = df["south_T"][start_fit:stop_valid].values
axes.plot(tpred, ytruth, 'k')
axes.plot(tpred, y_mean, 'C0', lw=2)
axes.fill_between(tpred,
                  y_mean - 1.96 * y_std,
                  y_mean + 1.96 * y_std,
                  color='C0', alpha=0.2)
plt.show()
Ejemplo n.º 2
0
# compute log-likelihood on validation dataset
x, P = reg.estimate_states(df=df[start_fit:stop_fit],
                           inputs=inputs,
                           outputs=outputs)

x0v = x[-1, :, :]
P0v = np.linalg.cholesky(P[-1, :, :]).T

reg.eval_log_likelihood(df=df[start_valid:stop_valid],
                        inputs=inputs,
                        outputs=outputs,
                        x0=x0v,
                        P0=P0v)

y_mean, y_std = reg.predict(df=df[start_valid:stop_valid],
                            inputs=inputs,
                            x0=x0v,
                            P0=P0v)

dfp = df[start_valid:stop_valid].copy()
dfp['yp'] = y_mean
dfp['lower'] = y_mean - 1.96 * y_std
dfp['upper'] = y_mean + 1.96 * y_std

fig, axes = plt.subplots(nrows=1, ncols=1, sharex=True, sharey=True)
axes.plot(dfp.index, dfp['south_T'], 'k', lw=2)
axes.plot(dfp.index, dfp['yp'], 'C0', lw=2)
axes.fill_between(dfp.index, dfp['lower'], dfp['upper'], color='C0', alpha=0.2)
plt.show()
Ejemplo n.º 3
0
P0p_w = np.sqrt(P[-1, 0, 0])
P0p_i = np.sqrt(P[-1, 1, 1])
P0p_b = np.sqrt(P[-1, 2, 2])
theta_fit = np.copy(reg.ss.parameters.theta)
theta_valid = np.copy(theta_fit)
theta_valid[-6:] = [x0p_w, x0p_i, x0p_b, P0p_w, P0p_i, P0p_b]
reg.ss.parameters.theta = theta_valid

# compute log-likelihood on validation dataset
yp = df['south_T'][start_valid:stop_valid].values[np.newaxis, :]
tp = np.arange(0, dt * yp.shape[1], dt)
up = df[[
    'Ambient temperature', 'north_T', 'Solar radiation: global horizontal',
    'south_H', 'vent2_H'
]][start_valid:stop_valid].values.T

reg.eval_log_likelihood(tp, yp, up, 'foh')

y_mean, y_std = reg.predict(tp, tp, None, up, 'foh')

plt.close("all")
fig, axes = plt.subplots(nrows=1, ncols=1, sharex=True, sharey=True)
axes.plot(tp, yp.squeeze(), 'k')
axes.plot(tp, y_mean, 'C0', lw=2)
axes.fill_between(tp,
                  y_mean - 1.96 * y_std,
                  y_mean + 1.96 * y_std,
                  color='C0',
                  alpha=0.2)
plt.show()
Ejemplo n.º 4
0
    dict(name="Ci", value=0.01, transform="log"),
    dict(name="sigw_w", value=0.01, transform="log"),
    dict(name="sigw_i", value=0.0, transform="fixed"),
    dict(name="sigv", value=0.01, transform="log"),
    dict(name="x0_w", value=0.25, transform="log"),
    dict(name="x0_i", value=y0, transform="fixed"),
    dict(name="sigx0_w", value=1.0, transform="fixed"),
    dict(name="sigx0_i", value=1.0, transform="fixed")
]

reg = Regressor(TwTi_RoRi(parameters, hold_order='foh'))
out = reg.fit(df=df, inputs=['T_ext', 'P_hea'], outputs='T_int')

dt = 60
tnew = np.arange(df.index[0], df.index[-1] + dt, dt)
y_mean_f, y_std_f = reg.predict(df=df, inputs=['T_ext', 'P_hea'], tpred=tnew)

y_mean_s, y_std_s = reg.predict(df=df,
                                outputs='T_int',
                                inputs=['T_ext', 'P_hea'],
                                tpred=tnew,
                                smooth=True)

# plot filtered and smoothed output
plt.close("all")
fig, axes = plt.subplots(nrows=1, ncols=1, sharex=True, sharey=True)
fig.suptitle("filtered vs. smoothed state ")

axes.plot(df.index, df['T_int'], 'kx')
axes.plot(tnew, y_mean_f, 'C0', lw=2)
axes.fill_between(tnew,
Ejemplo n.º 5
0
reg12.fit(df=df, outputs='y')

print("\nMatérn(3/2)")
print("-" * 11)
reg32.fit(df=df, outputs='y')

print("\nMatérn(5/2)")
print("-" * 11)
reg52.fit(df=df, outputs='y')

# new data
Nnew = 500
tnew = np.linspace(-0.1, 1.1, Nnew)

# interpolate
ym12, yd12 = reg12.predict(df=df, outputs='y', tpred=tnew, smooth=True)
ym32, yd32 = reg32.predict(df=df, outputs='y', tpred=tnew, smooth=True)
ym52, yd52 = reg52.predict(df=df, outputs='y', tpred=tnew, smooth=True)

# plot different kernels
plt.close("all")
fig, axes = plt.subplots(nrows=3, ncols=1, sharex=True, sharey=True)
fig.suptitle("Matérn covariances")
plt.xlim(-0.1, 1.1)

axes[0].set_title("smoothness = 1/2")
axes[0].plot(t, y, 'kx', mew=2)
axes[0].plot(tnew, ym12, 'C0', lw=2)
axes[0].fill_between(tnew, ym12 - 1.96 * yd12, ym12 + 1.96 * yd12,
                     color='C0', alpha=0.2)
Ejemplo n.º 6
0
# Set Matern(3/2)
parameters = [
    dict(name="mscale", value=0.5, transform="log"),
    dict(name="lscale", value=0.5, transform="log"),
    dict(name="sigv", value=0.1, transform="log")
]

reg = Regressor(Matern32(parameters))

results = reg.fit(df=df, outputs='y')

# new data
Nnew = 500
tnew = np.linspace(-0.1, 1.1, Nnew)
ym_f, ys_f = reg.predict(df=df, outputs='y', tpred=tnew, smooth=False)
ym_s, ys_s = reg.predict(df=df, outputs='y', tpred=tnew, smooth=True)

# plot filtered and smoothed output
plt.close("all")
fig, axes = plt.subplots(nrows=2, ncols=1, sharex=True, sharey=True)
fig.suptitle("filtered vs. smoothed state ")
plt.xlim(-0.1, 1.1)

axes[0].set_title("filtered")
axes[0].plot(t, y, 'kx', mew=2)
axes[0].plot(tnew, ym_f, 'C0', lw=2)
axes[0].fill_between(tnew,
                     ym_f - 1.96 * ys_f,
                     ym_f + 1.96 * ys_f,
                     color='C0',
Ejemplo n.º 7
0
]

par_Matern12 = [
    dict(name="mscale", value=1.0, transform="log"),
    dict(name="lscale", value=10.0, transform="log"),
    dict(name="sigv", value=0.1, transform="log")
]

quasi_periodic = GPProduct(Periodic(par_Periodic), Matern12(par_Matern12))

reg = Regressor(quasi_periodic)
results = reg.fit(df=data, outputs='y')

Nnew = 500
tnew = np.linspace(xlim[0], xlim[1] + 3, Nnew)
y_mean_f, y_std_f = reg.predict(df=data, outputs='y', tpred=tnew, smooth=False)
y_mean_s, y_std_s = reg.predict(df=data, outputs='y', tpred=tnew, smooth=True)

# plot filtered and smoothed output
plt.close("all")
fig, axes = plt.subplots(nrows=2, ncols=1, sharex=True, sharey=False)
fig.suptitle("filtered vs. smoothed state ")

axes[0].set_title("filtered")
axes[0].plot(t, y, 'kx', mew=2)
axes[0].plot(tnew, y_mean_f, 'C0', lw=2)
axes[0].fill_between(tnew,
                     y_mean_f - 1.96 * y_std_f,
                     y_mean_f + 1.96 * y_std_f,
                     color='C0',
                     alpha=0.2)
Ejemplo n.º 8
0
Nsamples = mh._N - mh.warmup
yhat = np.empty((Nsamples, df.index.shape[0]))

chains = np.empty((mh._Np + 1, Nsamples * mh._M))
for i in range(mh._Np + 1):
    chains[i, :] = mh.trace[:, i, mh.warmup:].ravel()

# chains[-1, :] = np.abs(chains[-1, :])
# posterior = ( chains[-1, :] - chains[-1, :].min() ) / ( chains[-1, :].max() - chains[-1, :].min() )
ecdf = ECDF(chains[-1, :])
min_ecdf = ecdf.x[np.where(ecdf.y <= 0.025)[0][-1]]
max_ecdf = ecdf.x[np.where(ecdf.y >= 0.975)[0][0]]

for i in range(Nsamples):
    reg.ss.parameters.eta = chains[:-1, i]
    yhat[i, :], _ = reg.predict(df=df, inputs=['T_ext', 'P_hea'])

fig2 = plt.figure(figsize=(9, 6), constrained_layout=True)
gs = fig2.add_gridspec(1, 1)
axes = fig2.add_subplot(gs[:, :])
PuBu = np.asarray(sns.color_palette("PuBu", 10))

for i in range(Nsamples):
    # idx = PuBu[int(posterior[i]*10), :]
    if chains[-1, i] > min_ecdf or chains[-1, i] < max_ecdf:
        axes.plot(df.index / 3600,
                  yhat[i, :],
                  color=sns.xkcd_rgb[cc[-1]],
                  lw=1.5)

axes.plot(df.index / 3600,