# Test for bandwidth parameter
# To see if our output dictionary has the means to approximate well the output functions
muout = 0.1
testridge = rffridge.RFFRidge(muout, rffs)
i = 6
testridge.fit(Vtrain["x"][i], Vtrain["y"][i])
pred = testridge.predict(timevec.reshape((501, 1)))
plt.figure()
plt.plot(timevec, pred, label="predicted")
plt.plot(Vtrain["x"][i], Vtrain["y"][i], label="real")
plt.legend()

# Fit
# Build regressor
l2 = losses.L2Loss()
lamb = 0.001
reg = coefsoncoefs.CoefsOnCoefs(kernels.GaussianKernel(sigma=3), rffsx, musmoothing, rffs, muout, lamb)

# Fit regressor
reg.fit(Xtrain, Vtrain)

# Predict

pred = reg.predict(Xtest, timevec)
i = 4

# Pred on test set
plt.figure()
plt.plot(timevec.flatten(), pred[i, :], label="predicted")
plt.plot(timevec.flatten(), Vtest["y"][i], label="real")
Exemple #2
0
test = np.array([data["y"][i][20] for i in range(data.get_T())])

# # Kernels for convolution
# gausskerx = kernels.GaussianKernel(sigma=10)
# gausskery = kernels.GaussianKernel(sigma=0.2)

# # Compute kernel matrices
# Kx = gausskerx.compute_K(data["x_flat"])
# Ky = gausskery.compute_K(data["y_flat"])
# convkers = kernels.ConvKernel(gausskerx, gausskery, Kx, Ky)
#
# # Compute convolution kernel matrix
# Ks = convkers.compute_K_from_mat(Ms)

# Define loss
loss = losses.L2Loss()

# Define regularizers and regularization params
spacereg = regularizers.TikhonovSpace()
timereg = regularizers.TikhonovTime()
mu = 0.001
lamb = 0.001

# Train/Test split
ntrain = 10
Strain_input = data.extract_subseq(0, 10)
Strain_output = data.extract_subseq(1, 11)
Strain = data.extract_subseq(0, 11)

# Kernels for convolution
gausskerx = kernels.GaussianKernel(sigma=10)