kernelx = kernels.GaussianKernel(sigma=0.075) Kxin = kernelx.compute_K(datain["x_flat"]) rffeats = rffridge.RandomFourierFeatures(sigma=10, D=40, d=1) kers = kernels.GaussianFuncKernel(5, rffeats, mu=0.05) Ks = kers.compute_K(datain["xy_tuple"]) # kernely = kernels.GaussianKernel(sigma=0.5) # Kyin = kernely.compute_K(datain["y_flat"]) # kers = kernels.ConvKernel(kernelx, kernely, Kxin, Kyin, sameloc=False) # Ks = kers.compute_K_from_mat(datain.Ms) # Build regressor l2 = losses.L2Loss() lamb = 0.001 mu = 0.01 smoothreg = regularizers.TikhonovSpace() globalreg = regularizers.TikhonovTime() regressor = regressors.DiffLocObsOnFuncReg(l2, smoothreg, globalreg, mu, lamb, kernelx, kers) # # Test with gradient descent # Kxout = repmat.RepSymMatrix(Kxin, (Ntrain, Ntrain)) # gd = gradientbased.GradientDescent(0.00001, 10, 1e-5, record=True) # obj = regressor.objective_func(dataout.Ms, dataout["y_flat"], Kxout, Ks) # grad = regressor.objective_grad_func(dataout.Ms, dataout["y_flat"], Kxout, Ks) # alpha0 = np.random.normal(0, 1, (Ntrain, Ntrain*nlocs)) # sol = gd(obj, grad, alpha0) # Fit regressor # Kxout = repmat.RepSymMatrix(Kxin, (Ntrain, Ntrain)) Kxout = kernelx.compute_K(dataout["x_flat"]) solu = regressor.fit(datain, dataout, Kx=Kxout, Ks=Ks, tol=1e-3)
# gausskery = kernels.GaussianKernel(sigma=0.2) # # Compute kernel matrices # Kx = gausskerx.compute_K(data["x_flat"]) # Ky = gausskery.compute_K(data["y_flat"]) # convkers = kernels.ConvKernel(gausskerx, gausskery, Kx, Ky) # # # Compute convolution kernel matrix # Ks = convkers.compute_K_from_mat(Ms) # Define loss loss = losses.L2Loss() # Define regularizers and regularization params spacereg = regularizers.TikhonovSpace() timereg = regularizers.TikhonovTime() mu = 0.001 lamb = 0.001 # Train/Test split ntrain = 10 Strain_input = data.extract_subseq(0, 10) Strain_output = data.extract_subseq(1, 11) Strain = data.extract_subseq(0, 11) # Kernels for convolution gausskerx = kernels.GaussianKernel(sigma=10) gausskery = kernels.GaussianKernel(sigma=0.2) # NEW REGRESSION CLASS ########################################################"""