def test(self): N = 3 # how many points per function tree = bt.BinaryBranchingTree(0, 10, fDebug=False) # set to true to print debug messages tree.add(None, 1, 0.5) # single branching point (fm, fmb) = tree.GetFunctionBranchTensor() # print fmb tree.printTree() print('fm', fm) # print fmb t = np.linspace(0.01, 1, 10) (XForKernel, indicesBranch, Xtrue) = tree.GetFunctionIndexList(t, fReturnXtrue=True) # GP flow kernel Bvalues = np.expand_dims(np.asarray(tree.GetBranchValues()), 1) KbranchParam = bk.BranchKernelParam(gpflow.kernels.RBF(1), fm, b=Bvalues) KbranchParam.kern.lengthscales = 2 KbranchParam.kern.variance = 1 K = KbranchParam.compute_K(Xtrue, Xtrue) assert KbranchParam.Bv.value == 0.5 samples, L, K = bk.SampleKernel(KbranchParam, XForKernel, D=1, tol=1e-6, retChol=True) samples2 = bk.SampleKernel(KbranchParam, XForKernel, D=1, tol=1e-6, retChol=False) # Also try the independent kernel indKernel = bk.IndKern(gpflow.kernels.RBF(1)) samples3, L, K = bk.SampleKernel(indKernel, XForKernel, D=1, tol=1e-6, retChol=True) samples4 = KbranchParam.SampleKernel(XForKernel, b=Bvalues) XAssignments = bk.GetFunctionIndexSample(t) # assign to either branch randomly XAssignments[XAssignments[:, 0] <= tree.GetBranchValues(), 1] = 1 samples5 = KbranchParam.SampleKernelFromTree(XAssignments, b=tree.GetBranchValues())
def test(self): branchingPoint = 0.5 tree = bt.BinaryBranchingTree(0, 10, fDebug=False) # set to true to print debug messages tree.add(None, 1, branchingPoint) # single branching point (fm, fmb) = tree.GetFunctionBranchTensor() # Specify where to evaluate the kernel t = np.linspace(0.01, 1, 60) (XForKernel, indicesBranch, Xtrue) = tree.GetFunctionIndexList(t, fReturnXtrue=True) # Specify the kernel and its hyperparameters # These determine how smooth and variable the branching functions are Bvalues = np.expand_dims(np.asarray(tree.GetBranchValues()), 1) KbranchParam = bk.BranchKernelParam(gpflow.kernels.RBF(1), fm, b=Bvalues) KbranchParam.kern.lengthscales = 2 KbranchParam.kern.variance = 1 # Sample the kernel samples = bk.SampleKernel(KbranchParam, XForKernel) # Plot the sample bk.PlotSample(XForKernel, samples, B=Bvalues) # Fit model BgridSearch = [0.1, branchingPoint, 1.1] globalBranchingLabels = XForKernel[:, 1] # use correct labels for tests # could add a mistake print('Sparse model') d = FitBranchingModel.FitModel(BgridSearch, XForKernel[:, 0], samples, globalBranchingLabels, maxiter=20, priorConfidence=0.80, M=10) bmode = BgridSearch[np.argmax(d['loglik'])] assert bmode == branchingPoint, bmode # Plot model pred = d['prediction'] # prediction object from GP _=bplot.plotBranchModel(bmode, XForKernel[:, 0], samples, pred['xtest'], pred['mu'], pred['var'], d['Phi'], fPlotPhi=True, fColorBar=True, fPlotVar = True) _=bplot.PlotBGPFit(samples, XForKernel[:, 0], BgridSearch, d) print('Try dense model') d = FitBranchingModel.FitModel(BgridSearch, XForKernel[:, 0], samples, globalBranchingLabels, maxiter=20, priorConfidence=0.80, M=0) bmode = BgridSearch[np.argmax(d['loglik'])] assert bmode == branchingPoint, bmode print('Try sparse model with fixed inducing points') d = FitBranchingModel.FitModel(BgridSearch, XForKernel[:, 0], samples, globalBranchingLabels, maxiter=20, priorConfidence=0.80, M=20, fixInducingPoints=True) bmode = BgridSearch[np.argmax(d['loglik'])] assert bmode == branchingPoint, bmode print('Try sparse model with fixed hyperparameters') d = FitBranchingModel.FitModel(BgridSearch, XForKernel[:, 0], samples, globalBranchingLabels, maxiter=20, priorConfidence=0.80, M=15, likvar=1e-3, kerlen=2., kervar=1., fixHyperparameters=True) # You can rerun the same code as many times as you want and get different sample paths # We can also sample independent functions. This is the assumption in the overlapping mixtures of GPs model (OMGP) discussed in the paper. indKernel = bk.IndKern(gpflow.kernels.RBF(1)) samplesInd = bk.SampleKernel(indKernel, XForKernel)
def test(self): tree = bt.BinaryBranchingTree( 0, 10, fDebug=False) # set to true to print debug messages tree.add(None, 1, 0.5) # single branching point (fm, fmb) = tree.GetFunctionBranchTensor() # print fmb tree.printTree() print("fm", fm) # print fmb t = np.linspace(0.01, 1, 10) (XForKernel, indicesBranch, Xtrue) = tree.GetFunctionIndexList(t, fReturnXtrue=True) # GP flow kernel Bvalues = np.expand_dims(np.asarray(tree.GetBranchValues()), 1) KbranchParam = bk.BranchKernelParam( gpflow.kernels.SquaredExponential(), fm, b=Bvalues) KbranchParam.kern.lengthscales.assign(2) KbranchParam.kern.variance.assign(1) _ = KbranchParam.K(Xtrue, Xtrue) assert KbranchParam.Bv == 0.5 _ = bk.SampleKernel(KbranchParam, XForKernel, D=1, tol=1e-6, retChol=True) _ = bk.SampleKernel(KbranchParam, XForKernel, D=1, tol=1e-6, retChol=False) # Also try the independent kernel indKernel = bk.IndKern(gpflow.kernels.SquaredExponential()) _ = bk.SampleKernel(indKernel, XForKernel, D=1, tol=1e-6, retChol=True) _ = KbranchParam.SampleKernel(XForKernel, b=Bvalues) XAssignments = bk.GetFunctionIndexSample( t) # assign to either branch randomly XAssignments[XAssignments[:, 0] <= tree.GetBranchValues(), 1] = 1 _ = KbranchParam.SampleKernelFromTree(XAssignments, b=tree.GetBranchValues())
def test(self): branchingPoint = 0.5 tree = bt.BinaryBranchingTree( 0, 10, fDebug=False) # set to true to print debug messages tree.add(None, 1, branchingPoint) # single branching point (fm, fmb) = tree.GetFunctionBranchTensor() # Specify where to evaluate the kernel t = np.linspace(0.01, 1, 60) (XForKernel, indicesBranch, Xtrue) = tree.GetFunctionIndexList(t, fReturnXtrue=True) # Specify the kernel and its hyperparameters # These determine how smooth and variable the branching functions are Bvalues = np.expand_dims(np.asarray(tree.GetBranchValues()), 1) KbranchParam = bk.BranchKernelParam( gpflow.kernels.SquaredExponential(), fm, b=Bvalues) KbranchParam.kern.lengthscales.assign(2.0) KbranchParam.kern.variance.assign(1.0) # Sample the kernel samples = bk.SampleKernel(KbranchParam, XForKernel) # Plot the sample bk.PlotSample(XForKernel, samples, B=Bvalues) # Fit model BgridSearch = [0.0001, branchingPoint, 1.1] globalBranchingLabels = XForKernel[:, 1] # use correct labels for tests # could add a mistake print("Sparse model") d = FitBranchingModel.FitModel( BgridSearch, XForKernel[:, 0], samples, globalBranchingLabels, maxiter=40, priorConfidence=0.80, M=10, ) bmode = BgridSearch[np.argmax(d["loglik"])] print("tensorflow version", tf.__version__, "GPflow version", gpflow.__version__) print( "TestSamplingAndPlotting:: Sparse Log likelihood", d["loglik"], "BgridSearch", BgridSearch, ) assert bmode == branchingPoint, bmode # Plot model pred = d["prediction"] # prediction object from GP _ = bplot.plotBranchModel( bmode, XForKernel[:, 0], samples, pred["xtest"], pred["mu"], pred["var"], d["Phi"], fPlotPhi=True, fColorBar=True, fPlotVar=True, ) _ = bplot.PlotBGPFit(samples, XForKernel[:, 0], BgridSearch, d) d = FitBranchingModel.FitModel( BgridSearch, XForKernel[:, 0], samples, globalBranchingLabels, maxiter=40, priorConfidence=0.80, M=0, ) bmode = BgridSearch[np.argmax(d["loglik"])] print( "TestSamplingAndPlotting:: Dense Log likelihood", d["loglik"], "BgridSearch", BgridSearch, ) assert bmode == branchingPoint, bmode print("Try sparse model with fixed hyperparameters") d = FitBranchingModel.FitModel( BgridSearch, XForKernel[:, 0], samples, globalBranchingLabels, maxiter=20, priorConfidence=0.80, M=15, likvar=1e-3, kerlen=2.0, kervar=1.0, fixHyperparameters=True, ) # You can rerun the same code as many times as you want and get different sample paths # We can also sample independent functions. # This is the assumption in the overlapping mixtures of GPs model (OMGP) discussed in the paper. indKernel = bk.IndKern(gpflow.kernels.SquaredExponential()) _ = bk.SampleKernel(indKernel, XForKernel)
# %% [markdown] # Specify the kernel and its hyperparameters # These determine how smooth and variable the branching functions are # %% Bvalues = np.expand_dims(np.asarray(tree.GetBranchValues()), 1) KbranchParam = bk.BranchKernelParam(gpflow.kernels.RBF(1), fm, b=Bvalues) KbranchParam.kern.lengthscales = 2 KbranchParam.kern.variance = 1 # %% [markdown] # Sample the kernel # %% samples = bk.SampleKernel(KbranchParam, XForKernel) # %% [markdown] # Plot the sample # %% bk.PlotSample(XForKernel, samples) # %% [markdown] # You can rerun the same code as many times as you want and get different sample paths # %% [markdown] # We can also sample independent functions. This is the assumption in the overlapping mixtures of GPs model (OMGP) discussed in the paper. # %% indKernel = bk.IndKern(gpflow.kernels.RBF(1))