def test_likelihood_gradients_importance(): X = inputs.draw_samples(100, "lhs") Y = myMap.evaluate(X, parallel=True) o = OptimalDesign(X, Y, myMap, inputs, normalize_Y=True) x_new = np.atleast_2d([1.0, 2.0]) kwargs_gmm = dict(n_components=4, covariance_type="full") likelihood = Likelihood(o.model, o.inputs, "importance", kwargs_gmm=kwargs_gmm) gm = GradientChecker(lambda x: likelihood.evaluate(x), lambda x: likelihood.jacobian(x), x_new, 'x') assert (gm.checkgrad())
def main(): np.random.seed(2) M, Q = 15, 3 X = np.random.rand(M,Q) Y = np.random.rand(M,1) ker = RBF(input_dim=Q, ARD=True, variance=1.34, lengthscale=np.random.rand(1,Q)) model = GPy.models.GPRegression(X=X, Y=Y, kernel=ker, normalizer=True) inputs = UniformInputs([[0,1]]*Q) likelihood = Likelihood(model, inputs) x_new = np.random.rand(2, Q) qcrit = LCB_LW(model, inputs, likelihood=likelihood) g = GradientChecker(lambda x: qcrit.evaluate(x), lambda x: qcrit.jacobian(x), x_new, 'x') assert(g.checkgrad()) a = qcrit.evaluate(x_new) b = a+0.0 for i in range(x_new.shape[0]): print(qcrit.evaluate(x_new[i,:])) print(a) a = qcrit.jacobian(x_new) b = a+0.0 for i in range(x_new.shape[0]): print(qcrit.jacobian(x_new[i,:])) print(a)
def main(): ndim = 2 np.random.seed(2) tf = 25 nsteps = 1000 u_init = [0, 0] noise = Noise([0, tf]) oscil = Oscillator(noise, tf, nsteps, u_init) myMap = BlackBox(map_def, args=(oscil, )) lam = noise.get_eigenvalues(ndim) mean = np.zeros(ndim) cov = np.diag(lam) domain = [[-a, a] for a in 6.0 * np.sqrt(np.diag(cov))] inputs = GaussianInputs(domain, mean, cov) X = inputs.draw_samples(15, "lhs") Y = myMap.evaluate(X) o = OptimalDesign(X, Y, myMap, inputs, normalize_Y=True) likelihood = Likelihood(o.model, o.inputs) x_new = np.atleast_2d([1.0, 2.0]) qcrit = Q(o.model, o.inputs, likelihood=likelihood) print(qcrit.evaluate(x_new)) print(qcrit.jacobian(x_new)) qcrit = QInt(o.model, o.inputs, ngrid=250, likelihood=likelihood) print(qcrit.evaluate(x_new)) print(qcrit.jacobian(x_new))
def main(): ndim = 2 np.random.seed(3) tf = 25 nsteps = 1000 u_init = [0, 0] noise = Noise([0, tf]) oscil = Oscillator(noise, tf, nsteps, u_init) myMap = BlackBox(map_def, args=(oscil,)) lam = noise.get_eigenvalues(ndim) mean = np.zeros(ndim) cov = np.diag(lam) domain = [ [-a, a] for a in 6.0*np.sqrt(np.diag(cov)) ] inputs = GaussianInputs(domain, mean, cov) #inputs = UniformInputs(domain) kwargs_gmm = dict(n_components=4, covariance_type="spherical") X = inputs.draw_samples(100, "lhs") Y = myMap.evaluate(X, parallel=True) o = OptimalDesign(X, Y, myMap, inputs, normalize_Y=True) likelihood = Likelihood(o.model, o.inputs, "nominal", kwargs_gmm=kwargs_gmm) x_new = np.atleast_2d([1.0,2.0]) gmm_y = likelihood.evaluate(x_new) print(jacobian_fdiff(likelihood, x_new)) print(likelihood.jacobian(x_new)) from GPy.models import GradientChecker gm = GradientChecker(lambda x: likelihood.evaluate(x), lambda x: likelihood.jacobian(x), x_new, 'x') assert(gm.checkgrad()) pts = inputs.draw_samples(n_samples=100, sample_method="grd") gmm_y = likelihood.evaluate(pts).flatten() pix = likelihood._evaluate_raw(pts).flatten() fig = plt.figure(figsize=(12,6)) plt.subplot(1,2,1) sc = plt.scatter(pts[:,0], pts[:,1], c=pix) plt.colorbar(sc) plt.title(r"$f_x/f_y$") plt.subplot(1,2,2) sc = plt.scatter(pts[:,0], pts[:,1], c=gmm_y) plt.colorbar(sc) plt.title("GMM fit") plt.show()
def main(): ndim = 2 np.random.seed(2) tf = 25 nsteps = 1000 u_init = [0, 0] noise = Noise([0, tf]) oscil = Oscillator(noise, tf, nsteps, u_init) myMap = BlackBox(map_def, args=(oscil, )) lam = noise.get_eigenvalues(ndim) mean = np.zeros(ndim) cov = np.diag(lam) acq_list = [(Q, QInt), (IVR_LW, IVR_LWInt), (IVR, IVRInt)] for (Acq, AcqInt) in acq_list: domain = [[-a, a] for a in 6.0 * np.sqrt(np.diag(cov))] inputs = GaussianInputs(mean, cov, domain) X = inputs.draw_samples(15, "lhs") Y = myMap.evaluate(X) o = OptimalDesign(X, Y, myMap, inputs, normalize_Y=True) x_new = np.atleast_2d([1.0, 2.0]) likelihood = Likelihood(o.model, o.inputs) if isinstance(Acq, type(AcquisitionWeighted)): acq = Acq(o.model, o.inputs, likelihood=likelihood) else: acq = Acq(o.model, o.inputs) print(acq.evaluate(x_new)) print(acq.jacobian(x_new)) domain = [[-a, a] for a in 20.0 * np.sqrt(np.diag(cov))] inputs.set_domain(domain) if isinstance(Acq, type(AcquisitionWeighted)): acqint = AcqInt(o.model, o.inputs, ngrid=250, likelihood=likelihood) else: acqint = AcqInt(o.model, o.inputs, ngrid=250) print(acqint.evaluate(x_new)) print(acqint.jacobian(x_new))