def test(self): fDebug = True # Enable debugging output - tensorflow print ops np.set_printoptions(suppress=True, precision=5) seed = 43 np.random.seed(seed=seed) # easy peasy reproducibeasy tf.random.set_seed(seed) # Data generation N = 20 t = np.linspace(0, 1, N) print(t) Y = np.zeros((N, 1)) idx = np.nonzero(t > 0.5)[0] idxA = idx[::2] idxB = idx[1::2] print(idx) print(idxA) print(idxB) Y[idxA, 0] = 2 * t[idxA] Y[idxB, 0] = -2 * t[idxB] globalBranchingLabels = np.ones(N) globalBranchingLabels[4::2] = 2 globalBranchingLabels[5::2] = 3 XExpanded, indices, _ = VBHelperFunctions.GetFunctionIndexListGeneral( t) phiInitial, phiPrior = FitBranchingModel.GetInitialConditionsAndPrior( globalBranchingLabels, 0.51, False) ptb = np.min([ np.min(t[globalBranchingLabels == 2]), np.min(t[globalBranchingLabels == 3]), ]) tree = bt.BinaryBranchingTree(0, 1, fDebug=False) tree.add(None, 1, np.ones((1, 1)) * ptb) # B can be anything here (fm1, _) = tree.GetFunctionBranchTensor() # Look at kernels fDebug = True Kbranch1 = bk.BranchKernelParam(gpflow.kernels.Matern32(), fm1, b=np.ones((1, 1)) * ptb, fDebug=fDebug) K1 = Kbranch1.K(XExpanded, XExpanded) Kbranch2 = bk.BranchKernelParam(gpflow.kernels.Matern32(), fm1, b=np.ones((1, 1)) * 0.20, fDebug=fDebug) _ = Kbranch2.K(XExpanded, XExpanded) Kbranch3 = bk.BranchKernelParam(gpflow.kernels.Matern32(), fm1, b=np.ones((1, 1)) * 0.22, fDebug=fDebug) _ = Kbranch3.K(XExpanded, XExpanded) # Look at model kb = (bk.BranchKernelParam( gpflow.kernels.Matern32(), fm1, b=np.zeros( (1, 1))) + gpflow.kernels.White()) kb.kernels[1].variance.assign( 1e-6 ) # controls the discontinuity magnitude, the gap at the branching point set_trainable(kb.kernels[1].variance, False) # jitter for numerics # m = assigngp_dense.AssignGP( # t, XExpanded, Y, kb, indices, np.ones((1, 1)), phiInitial=phiInitial, phiPrior=phiPrior # ) m = assigngp_dense.AssignGP( t, XExpanded, Y, kb, indices, np.ones((1, 1)), phiInitial=phiInitial, phiPrior=phiPrior, KConst=K1, fDebug=True, ) m.UpdateBranchingPoint(np.ones((1, 1)) * ptb, phiInitial.copy()) ptbLL = m.log_posterior_density() m.UpdateBranchingPoint(np.ones((1, 1)) * 0.20, phiInitial.copy()) eLL = m.log_posterior_density() m.UpdateBranchingPoint(np.ones((1, 1)) * 0.22, phiInitial.copy()) lll = m.log_posterior_density() print(eLL, ptbLL, lll) assert eLL < ptbLL assert np.allclose(ptbLL, lll)
def test(self): np.set_printoptions(suppress=True, precision=5) seed = 43 np.random.seed(seed=seed) # easy peasy reproducibeasy tf.set_random_seed(seed) # Data generation N = 20 t = np.linspace(0, 1, N) print(t) trueB = np.ones((1, 1)) * 0.5 Y = np.zeros((N, 1)) idx = np.nonzero(t > 0.5)[0] idxA = idx[::2] idxB = idx[1::2] print(idx) print(idxA) print(idxB) Y[idxA, 0] = 2 * t[idxA] Y[idxB, 0] = -2 * t[idxB] # Create tree structures tree = bt.BinaryBranchingTree(0, 1, fDebug=False) tree.add(None, 1, trueB) assert tree.getRoot().val == trueB assert tree.getRoot().idB == 1 (fm, _) = tree.GetFunctionBranchTensor() XExpanded, indices, _ = VBHelperFunctions.GetFunctionIndexListGeneral( t) # Create model Kbranch = bk.BranchKernelParam( gpflow.kernels.Matern32(1), fm, b=trueB.copy()) + gpflow.kernels.White(1) Kbranch.kernels[ 1].variance = 1e-6 # controls the discontinuity magnitude, the gap at the branching point Kbranch.kernels[1].variance.set_trainable(False) # jitter for numerics # Create model phiPrior = np.ones((N, 2)) * 0.5 # dont know anything phiInitial = np.ones((N, 2)) * 0.5 # dont know anything phiInitial[:, 0] = np.random.rand(N) phiInitial[:, 1] = 1 - phiInitial[:, 0] m = assigngp_dense.AssignGP(t, XExpanded, Y, Kbranch, indices, Kbranch.kernels[0].Bv.value, phiPrior=phiPrior, phiInitial=phiInitial) InitKernParams(m) m.likelihood.variance.set_trainable(False) print('Model before initialisation\n', m, '\n===========================') gpflow.train.ScipyOptimizer().minimize(m, maxiter=100) m.likelihood.variance.set_trainable(True) gpflow.train.ScipyOptimizer().minimize(m, maxiter=100) print('Model after initialisation\n', m, '\n===========================') ttestl, mul, varl = VBHelperFunctions.predictBranchingModel(m) _, _, covl = VBHelperFunctions.predictBranchingModel(m, full_cov=True) for i in range(len(varl)): assert np.all(covl[i].diagonal().flatten() == varl[i].flatten()) assert len(varl) == 3, 'Must have 3 predictions for 3 functions' assert np.all(varl[0] > 0), 'neg variances for variance function 0' assert np.all(varl[1] > 0), 'neg variances for variance function 1' assert np.all(varl[2] > 0), 'neg variances for variance function 2' PhiOptimised = m.GetPhi() print('phiPrior', phiPrior) print('PhiOptimised', PhiOptimised) assert np.allclose( PhiOptimised[idxA, 2], 1), 'PhiOptimised idxA=%s' % str(PhiOptimised[idxA, :]) assert np.allclose( PhiOptimised[idxB, 1], 1), 'PhiOptimised idxB=%s' % str(PhiOptimised[idxB, :]) # reset model and test informative KL prior m.UpdateBranchingPoint(Kbranch.kernels[0].Bv.value, phiInitial) # reset initial phi InitKernParams(m) ll_flatprior = m.compute_log_likelihood() phiInfPrior = np.ones((N, 2)) * 0.5 # dont know anything phiInfPrior[-1, :] = [0.99, 0.01] # phiInfPrior[-2, :] = [0.01, 0.99] m.UpdateBranchingPoint(Kbranch.kernels[0].Bv.value, phiInitial, prior=phiInfPrior) ll_betterprior = m.compute_log_likelihood() assert ll_betterprior > ll_flatprior, '%f <> %f' % (ll_betterprior, ll_flatprior)
def runSparseModel(self, M=None, atolPrediction=1e-3, atolLik=1): fDebug = True # Enable debugging output - tensorflow print ops np.set_printoptions(precision=4) # precision to print numpy array seed = 43 np.random.seed(seed=seed) # easy peasy reproducibeasy tf.set_random_seed(seed) # Data generation N = 20 t = np.linspace(0, 1, N) print(t) trueB = np.ones((1, 1)) * 0.5 Y = np.zeros((N, 1)) idx = np.nonzero(t > 0.5)[0] idxA = idx[::2] idxB = idx[1::2] print(idx) print(idxA) print(idxB) Y[idxA, 0] = 2 * t[idxA] Y[idxB, 0] = -2 * t[idxB] # Create tree structures tree = bt.BinaryBranchingTree(0, 1, fDebug=False) tree.add(None, 1, trueB) (fm, _) = tree.GetFunctionBranchTensor() XExpanded, indices, _ = VBHelperFunctions.GetFunctionIndexListGeneral( t) print('XExpanded', XExpanded.shape) print('indices', len(indices)) # Create model Kbranch = bk.BranchKernelParam( gpflow.kernels.Matern32(1), fm, b=trueB.copy()) + gpflow.kernels.White(1) Kbranch.branchkernelparam.kern.variance = 1 Kbranch.white.variance = 1e-6 # controls the discontinuity magnitude, the gap at the branching point Kbranch.white.variance.set_trainable(False) # jitter for numerics print('Kbranch matrix', Kbranch.compute_K(XExpanded, XExpanded)) print('Branching K free parameters', Kbranch.branchkernelparam) print('Branching K branching parameter', Kbranch.branchkernelparam.Bv.value) if (M is not None): ir = np.random.choice(XExpanded.shape[0], M) ZExpanded = XExpanded[ir, :] else: ZExpanded = XExpanded # Test on full data phiInitial = np.ones((N, 2)) * 0.5 # dont know anything mV = assigngp_denseSparse.AssignGPSparse( t, XExpanded, Y, Kbranch, indices, Kbranch.branchkernelparam.Bv.value, ZExpanded, phiInitial=phiInitial, fDebug=fDebug) self.InitParams(mV) mVFull = assigngp_dense.AssignGP(t, XExpanded, Y, Kbranch, indices, Kbranch.branchkernelparam.Bv.value, fDebug=fDebug, phiInitial=phiInitial) self.InitParams(mVFull) lsparse = mV.compute_log_likelihood() lfull = mVFull.compute_log_likelihood() print('Log likelihoods, sparse=%f, full=%f' % (lsparse, lfull)) self.assertTrue( np.allclose(lsparse, lfull, atol=atolLik), 'Log likelihoods not close, sparse=%f, full=%f' % (lsparse, lfull)) # check models identical assert np.all(mV.GetPhiExpanded() == mVFull.GetPhiExpanded()) assert mV.likelihood.variance.value == mVFull.likelihood.variance.value assert mV.kern is mVFull.kern # Test prediction Xtest = np.array([[0.6, 2], [0.6, 3]]) mu_f, var_f = mVFull.predict_f(Xtest) mu_s, var_s = mV.predict_f(Xtest) print('Sparse model mu=', mu_s, ' variance=', var_s) print('Full model mu=', mu_f, ' variance=', var_f) self.assertTrue( np.allclose(mu_s, mu_f, atol=atolPrediction), 'mu not close sparse=%s - full=%s ' % (str(mu_s), str(mu_f))) self.assertTrue( np.allclose(var_s, var_f, atol=atolPrediction), 'var not close sparse=%s - full=%s ' % (str(var_s), str(var_f))) return lsparse, lfull
def test(self): np.set_printoptions(suppress=True, precision=5) seed = 43 np.random.seed(seed=seed) # easy peasy reproducibeasy tf.random.set_seed(seed) # Data generation N = 20 t = np.linspace(0, 1, N) print(t) trueB = np.ones((1, 1)) * 0.5 Y = np.zeros((N, 1)) idx = np.nonzero(t > 0.5)[0] idxA = idx[::2] idxB = idx[1::2] print(idx) print(idxA) print(idxB) Y[idxA, 0] = 2 * t[idxA] Y[idxB, 0] = -2 * t[idxB] # Create tree structures tree = bt.BinaryBranchingTree(0, 1, fDebug=False) tree.add(None, 1, trueB) assert tree.getRoot().val == trueB assert tree.getRoot().idB == 1 (fm, _) = tree.GetFunctionBranchTensor() XExpanded, indices, _ = VBHelperFunctions.GetFunctionIndexListGeneral( t) # Create model Kbranch = (bk.BranchKernelParam( gpflow.kernels.Matern32(), fm, b=trueB.copy()) + gpflow.kernels.White()) Kbranch.kernels[1].variance.assign( 1e-6 ) # controls the discontinuity magnitude, the gap at the branching point gpflow.set_trainable(Kbranch.kernels[1].variance, False) # jitter for numerics # Create model phiPrior = np.ones((N, 2)) * 0.5 # dont know anything phiInitial = np.ones((N, 2)) * 0.5 # dont know anything phiInitial[:, 0] = np.random.rand(N) phiInitial[:, 1] = 1 - phiInitial[:, 0] m = assigngp_dense.AssignGP( t, XExpanded, Y, Kbranch, indices, Kbranch.kernels[0].Bv, phiPrior=phiPrior, phiInitial=phiInitial, ) InitKernParams(m) gpflow.set_trainable(m.likelihood.variance, False) print("Model before initialisation\n", m, "\n===========================") opt = gpflow.optimizers.Scipy() opt.minimize( m.training_loss, variables=m.trainable_variables, options=dict(disp=True, maxiter=100), ) gpflow.set_trainable(m.likelihood.variance, True) opt.minimize( m.training_loss, variables=m.trainable_variables, options=dict(disp=True, maxiter=100), ) print("Model after initialisation\n", m, "\n===========================") ttestl, mul, varl = VBHelperFunctions.predictBranchingModel(m) _, _, covl = VBHelperFunctions.predictBranchingModel(m, full_cov=True) assert len(varl) == 3, "Must have 3 predictions for 3 functions" assert np.all(varl[0] > 0), "neg variances for variance function 0" assert np.all(varl[1] > 0), "neg variances for variance function 1" assert np.all(varl[2] > 0), "neg variances for variance function 2" PhiOptimised = m.GetPhi() print("phiPrior", phiPrior) print("PhiOptimised", PhiOptimised) assert np.allclose( PhiOptimised[idxA, 2], 1), "PhiOptimised idxA=%s" % str(PhiOptimised[idxA, :]) assert np.allclose( PhiOptimised[idxB, 1], 1), "PhiOptimised idxB=%s" % str(PhiOptimised[idxB, :]) # reset model and test informative KL prior m.UpdateBranchingPoint(Kbranch.kernels[0].Bv, phiInitial) # reset initial phi InitKernParams(m) ll_flatprior = m.log_posterior_density() phiInfPrior = np.ones((N, 2)) * 0.5 # dont know anything phiInfPrior[-1, :] = [0.99, 0.01] # phiInfPrior[-2, :] = [0.01, 0.99] m.UpdateBranchingPoint(Kbranch.kernels[0].Bv, phiInitial, prior=phiInfPrior) ll_betterprior = m.log_posterior_density() assert ll_betterprior > ll_flatprior, "%f <> %f" % ( ll_betterprior, ll_flatprior, )