Пример #1
0
    def test(self):
        N = 3  # how many points per function
        tree = bt.BinaryBranchingTree(0, 10, fDebug=False)  # set to true to print debug messages
        tree.add(None, 1, 0.5)  # single branching point
        (fm, fmb) = tree.GetFunctionBranchTensor()
        # print fmb

        tree.printTree()
        print('fm', fm)
        # print fmb
        t = np.linspace(0.01, 1, 10)
        (XForKernel, indicesBranch, Xtrue) = tree.GetFunctionIndexList(t, fReturnXtrue=True)
        # GP flow kernel
        Bvalues = np.expand_dims(np.asarray(tree.GetBranchValues()), 1)
        KbranchParam = bk.BranchKernelParam(gpflow.kernels.RBF(1), fm, b=Bvalues)
        KbranchParam.kern.lengthscales = 2
        KbranchParam.kern.variance = 1

        K = KbranchParam.compute_K(Xtrue, Xtrue)
        assert KbranchParam.Bv.value == 0.5


        samples, L, K = bk.SampleKernel(KbranchParam, XForKernel, D=1, tol=1e-6, retChol=True)
        samples2 = bk.SampleKernel(KbranchParam, XForKernel, D=1, tol=1e-6, retChol=False)

        # Also try the independent kernel
        indKernel = bk.IndKern(gpflow.kernels.RBF(1))
        samples3, L, K = bk.SampleKernel(indKernel, XForKernel, D=1, tol=1e-6, retChol=True)

        samples4 = KbranchParam.SampleKernel(XForKernel, b=Bvalues)

        XAssignments = bk.GetFunctionIndexSample(t)  # assign to either branch randomly
        XAssignments[XAssignments[:, 0] <= tree.GetBranchValues(), 1] = 1
        samples5 = KbranchParam.SampleKernelFromTree(XAssignments, b=tree.GetBranchValues())
    def test(self):
        branchingPoint = 0.5
        tree = bt.BinaryBranchingTree(0, 10, fDebug=False)  # set to true to print debug messages
        tree.add(None, 1, branchingPoint)  # single branching point
        (fm, fmb) = tree.GetFunctionBranchTensor()
        # Specify where to evaluate the kernel
        t = np.linspace(0.01, 1, 60)
        (XForKernel, indicesBranch, Xtrue) = tree.GetFunctionIndexList(t, fReturnXtrue=True)
        # Specify the kernel and its hyperparameters
        # These determine how smooth and variable the branching functions are
        Bvalues = np.expand_dims(np.asarray(tree.GetBranchValues()), 1)
        KbranchParam = bk.BranchKernelParam(gpflow.kernels.RBF(1), fm, b=Bvalues)
        KbranchParam.kern.lengthscales = 2
        KbranchParam.kern.variance = 1
        # Sample the kernel
        samples = bk.SampleKernel(KbranchParam, XForKernel)
        # Plot the sample
        bk.PlotSample(XForKernel, samples, B=Bvalues)
        # Fit model
        BgridSearch = [0.1, branchingPoint, 1.1]
        globalBranchingLabels = XForKernel[:, 1]  # use correct labels for tests
        # could add a mistake
        print('Sparse model')
        d = FitBranchingModel.FitModel(BgridSearch, XForKernel[:, 0], samples, globalBranchingLabels,
                                       maxiter=20, priorConfidence=0.80, M=10)
        bmode = BgridSearch[np.argmax(d['loglik'])]
        assert bmode == branchingPoint, bmode
        # Plot model
        pred = d['prediction']  # prediction object from GP
        _=bplot.plotBranchModel(bmode, XForKernel[:, 0], samples, pred['xtest'], pred['mu'], pred['var'],
                                d['Phi'], fPlotPhi=True, fColorBar=True, fPlotVar = True)


        _=bplot.PlotBGPFit(samples, XForKernel[:, 0], BgridSearch, d)

        print('Try dense model')
        d = FitBranchingModel.FitModel(BgridSearch, XForKernel[:, 0], samples, globalBranchingLabels,
                                       maxiter=20, priorConfidence=0.80, M=0)
        bmode = BgridSearch[np.argmax(d['loglik'])]
        assert bmode == branchingPoint, bmode
        print('Try sparse model with fixed inducing points')
        d = FitBranchingModel.FitModel(BgridSearch, XForKernel[:, 0], samples, globalBranchingLabels,
                                       maxiter=20, priorConfidence=0.80, M=20, fixInducingPoints=True)
        bmode = BgridSearch[np.argmax(d['loglik'])]
        assert bmode == branchingPoint, bmode
        print('Try sparse model with fixed hyperparameters')
        d = FitBranchingModel.FitModel(BgridSearch, XForKernel[:, 0], samples, globalBranchingLabels,
                                       maxiter=20, priorConfidence=0.80, M=15,
                                       likvar=1e-3, kerlen=2., kervar=1., fixHyperparameters=True)

        # You can rerun the same code as many times as you want and get different sample paths
        # We can also sample independent functions. This is the assumption in the overlapping mixtures of GPs model (OMGP) discussed in the paper.
        indKernel = bk.IndKern(gpflow.kernels.RBF(1))
        samplesInd = bk.SampleKernel(indKernel, XForKernel)
Пример #3
0
 def test(self):
     tree = bt.BinaryBranchingTree(0, 1, fDebug=True)
     trueB = 0.2
     tree.add(None, 1, trueB)
     tree.add(1, 2, trueB + 0.1)
     tree.add(2, 3, trueB + 0.1 + 0.2)
     tree.add(1, 4, trueB + 0.1 + 0.3)
     assert tree.getRoot().idB == 1
     tree.getRoot().val == trueB
     tree.printTree()
     assert tree.findLCAPath(3, 4)[0] == 1
     fm, fmb = tree.GetFunctionBranchTensor()
     assert np.all(fm.shape == (9, 9, 4))
     assert np.all(fmb.shape == (9, 9, 4))
Пример #4
0
    def test(self):
        tree = bt.BinaryBranchingTree(
            0, 10, fDebug=False)  # set to true to print debug messages
        tree.add(None, 1, 0.5)  # single branching point
        (fm, fmb) = tree.GetFunctionBranchTensor()
        # print fmb

        tree.printTree()
        print("fm", fm)
        # print fmb
        t = np.linspace(0.01, 1, 10)
        (XForKernel, indicesBranch,
         Xtrue) = tree.GetFunctionIndexList(t, fReturnXtrue=True)
        # GP flow kernel
        Bvalues = np.expand_dims(np.asarray(tree.GetBranchValues()), 1)
        KbranchParam = bk.BranchKernelParam(
            gpflow.kernels.SquaredExponential(), fm, b=Bvalues)
        KbranchParam.kern.lengthscales.assign(2)
        KbranchParam.kern.variance.assign(1)

        _ = KbranchParam.K(Xtrue, Xtrue)
        assert KbranchParam.Bv == 0.5

        _ = bk.SampleKernel(KbranchParam,
                            XForKernel,
                            D=1,
                            tol=1e-6,
                            retChol=True)
        _ = bk.SampleKernel(KbranchParam,
                            XForKernel,
                            D=1,
                            tol=1e-6,
                            retChol=False)

        # Also try the independent kernel
        indKernel = bk.IndKern(gpflow.kernels.SquaredExponential())
        _ = bk.SampleKernel(indKernel, XForKernel, D=1, tol=1e-6, retChol=True)

        _ = KbranchParam.SampleKernel(XForKernel, b=Bvalues)

        XAssignments = bk.GetFunctionIndexSample(
            t)  # assign to either branch randomly
        XAssignments[XAssignments[:, 0] <= tree.GetBranchValues(), 1] = 1
        _ = KbranchParam.SampleKernelFromTree(XAssignments,
                                              b=tree.GetBranchValues())
Пример #5
0
    def test(self):
        fDebug = True  # Enable debugging output - tensorflow print ops
        np.set_printoptions(suppress=True, precision=5)
        seed = 43
        np.random.seed(seed=seed)  # easy peasy reproducibeasy
        tf.random.set_seed(seed)
        # Data generation
        N = 20
        t = np.linspace(0, 1, N)
        print(t)
        Y = np.zeros((N, 1))
        idx = np.nonzero(t > 0.5)[0]
        idxA = idx[::2]
        idxB = idx[1::2]
        print(idx)
        print(idxA)
        print(idxB)
        Y[idxA, 0] = 2 * t[idxA]
        Y[idxB, 0] = -2 * t[idxB]
        globalBranchingLabels = np.ones(N)
        globalBranchingLabels[4::2] = 2
        globalBranchingLabels[5::2] = 3

        XExpanded, indices, _ = VBHelperFunctions.GetFunctionIndexListGeneral(
            t)
        phiInitial, phiPrior = FitBranchingModel.GetInitialConditionsAndPrior(
            globalBranchingLabels, 0.51, False)
        ptb = np.min([
            np.min(t[globalBranchingLabels == 2]),
            np.min(t[globalBranchingLabels == 3]),
        ])
        tree = bt.BinaryBranchingTree(0, 1, fDebug=False)
        tree.add(None, 1, np.ones((1, 1)) * ptb)  # B can be anything here
        (fm1, _) = tree.GetFunctionBranchTensor()

        # Look at kernels
        fDebug = True
        Kbranch1 = bk.BranchKernelParam(gpflow.kernels.Matern32(),
                                        fm1,
                                        b=np.ones((1, 1)) * ptb,
                                        fDebug=fDebug)
        K1 = Kbranch1.K(XExpanded, XExpanded)

        Kbranch2 = bk.BranchKernelParam(gpflow.kernels.Matern32(),
                                        fm1,
                                        b=np.ones((1, 1)) * 0.20,
                                        fDebug=fDebug)
        _ = Kbranch2.K(XExpanded, XExpanded)

        Kbranch3 = bk.BranchKernelParam(gpflow.kernels.Matern32(),
                                        fm1,
                                        b=np.ones((1, 1)) * 0.22,
                                        fDebug=fDebug)
        _ = Kbranch3.K(XExpanded, XExpanded)

        # Look at model
        kb = (bk.BranchKernelParam(
            gpflow.kernels.Matern32(), fm1, b=np.zeros(
                (1, 1))) + gpflow.kernels.White())
        kb.kernels[1].variance.assign(
            1e-6
        )  # controls the discontinuity magnitude, the gap at the branching point
        set_trainable(kb.kernels[1].variance, False)  # jitter for numerics
        # m = assigngp_dense.AssignGP(
        #     t, XExpanded, Y, kb, indices, np.ones((1, 1)), phiInitial=phiInitial, phiPrior=phiPrior
        # )
        m = assigngp_dense.AssignGP(
            t,
            XExpanded,
            Y,
            kb,
            indices,
            np.ones((1, 1)),
            phiInitial=phiInitial,
            phiPrior=phiPrior,
            KConst=K1,
            fDebug=True,
        )

        m.UpdateBranchingPoint(np.ones((1, 1)) * ptb, phiInitial.copy())
        ptbLL = m.log_posterior_density()
        m.UpdateBranchingPoint(np.ones((1, 1)) * 0.20, phiInitial.copy())
        eLL = m.log_posterior_density()
        m.UpdateBranchingPoint(np.ones((1, 1)) * 0.22, phiInitial.copy())
        lll = m.log_posterior_density()
        print(eLL, ptbLL, lll)
        assert eLL < ptbLL
        assert np.allclose(ptbLL, lll)
Пример #6
0
    def runSparseModel(self, M=None, atolPrediction=1e-3, atolLik=1):
        fDebug = True  # Enable debugging output - tensorflow print ops
        np.set_printoptions(precision=4)  # precision to print numpy array
        seed = 43
        np.random.seed(seed=seed)  # easy peasy reproducibeasy
        tf.set_random_seed(seed)
        # Data generation
        N = 20
        t = np.linspace(0, 1, N)
        print(t)
        trueB = np.ones((1, 1)) * 0.5
        Y = np.zeros((N, 1))
        idx = np.nonzero(t > 0.5)[0]
        idxA = idx[::2]
        idxB = idx[1::2]
        print(idx)
        print(idxA)
        print(idxB)
        Y[idxA, 0] = 2 * t[idxA]
        Y[idxB, 0] = -2 * t[idxB]
        # Create tree structures
        tree = bt.BinaryBranchingTree(0, 1, fDebug=False)
        tree.add(None, 1, trueB)
        (fm, _) = tree.GetFunctionBranchTensor()
        XExpanded, indices, _ = VBHelperFunctions.GetFunctionIndexListGeneral(
            t)
        print('XExpanded', XExpanded.shape)
        print('indices', len(indices))  # Create model
        Kbranch = bk.BranchKernelParam(
            gpflow.kernels.Matern32(1), fm,
            b=trueB.copy()) + gpflow.kernels.White(1)
        Kbranch.branchkernelparam.kern.variance = 1
        Kbranch.white.variance = 1e-6  # controls the discontinuity magnitude, the gap at the branching point
        Kbranch.white.variance.set_trainable(False)  # jitter for numerics
        print('Kbranch matrix', Kbranch.compute_K(XExpanded, XExpanded))
        print('Branching K free parameters', Kbranch.branchkernelparam)
        print('Branching K branching parameter',
              Kbranch.branchkernelparam.Bv.value)
        if (M is not None):
            ir = np.random.choice(XExpanded.shape[0], M)
            ZExpanded = XExpanded[ir, :]
        else:
            ZExpanded = XExpanded  # Test on full data

        phiInitial = np.ones((N, 2)) * 0.5  # dont know anything
        mV = assigngp_denseSparse.AssignGPSparse(
            t,
            XExpanded,
            Y,
            Kbranch,
            indices,
            Kbranch.branchkernelparam.Bv.value,
            ZExpanded,
            phiInitial=phiInitial,
            fDebug=fDebug)
        self.InitParams(mV)

        mVFull = assigngp_dense.AssignGP(t,
                                         XExpanded,
                                         Y,
                                         Kbranch,
                                         indices,
                                         Kbranch.branchkernelparam.Bv.value,
                                         fDebug=fDebug,
                                         phiInitial=phiInitial)
        self.InitParams(mVFull)

        lsparse = mV.compute_log_likelihood()
        lfull = mVFull.compute_log_likelihood()
        print('Log likelihoods, sparse=%f, full=%f' % (lsparse, lfull))
        self.assertTrue(
            np.allclose(lsparse, lfull, atol=atolLik),
            'Log likelihoods not close, sparse=%f, full=%f' % (lsparse, lfull))

        # check models identical
        assert np.all(mV.GetPhiExpanded() == mVFull.GetPhiExpanded())
        assert mV.likelihood.variance.value == mVFull.likelihood.variance.value
        assert mV.kern is mVFull.kern

        # Test prediction
        Xtest = np.array([[0.6, 2], [0.6, 3]])
        mu_f, var_f = mVFull.predict_f(Xtest)
        mu_s, var_s = mV.predict_f(Xtest)
        print('Sparse model mu=', mu_s, ' variance=', var_s)
        print('Full model mu=', mu_f, ' variance=', var_f)
        self.assertTrue(
            np.allclose(mu_s, mu_f, atol=atolPrediction),
            'mu not close sparse=%s - full=%s ' % (str(mu_s), str(mu_f)))
        self.assertTrue(
            np.allclose(var_s, var_f, atol=atolPrediction),
            'var not close sparse=%s - full=%s ' % (str(var_s), str(var_f)))
        return lsparse, lfull
Пример #7
0
 def test(self):
     np.set_printoptions(suppress=True, precision=5)
     seed = 43
     np.random.seed(seed=seed)  # easy peasy reproducibeasy
     tf.set_random_seed(seed)
     # Data generation
     N = 20
     t = np.linspace(0, 1, N)
     print(t)
     trueB = np.ones((1, 1)) * 0.5
     Y = np.zeros((N, 1))
     idx = np.nonzero(t > 0.5)[0]
     idxA = idx[::2]
     idxB = idx[1::2]
     print(idx)
     print(idxA)
     print(idxB)
     Y[idxA, 0] = 2 * t[idxA]
     Y[idxB, 0] = -2 * t[idxB]
     # Create tree structures
     tree = bt.BinaryBranchingTree(0, 1, fDebug=False)
     tree.add(None, 1, trueB)
     assert tree.getRoot().val == trueB
     assert tree.getRoot().idB == 1
     (fm, _) = tree.GetFunctionBranchTensor()
     XExpanded, indices, _ = VBHelperFunctions.GetFunctionIndexListGeneral(
         t)
     # Create model
     Kbranch = bk.BranchKernelParam(
         gpflow.kernels.Matern32(1), fm,
         b=trueB.copy()) + gpflow.kernels.White(1)
     Kbranch.kernels[
         1].variance = 1e-6  # controls the discontinuity magnitude, the gap at the branching point
     Kbranch.kernels[1].variance.set_trainable(False)  # jitter for numerics
     # Create model
     phiPrior = np.ones((N, 2)) * 0.5  # dont know anything
     phiInitial = np.ones((N, 2)) * 0.5  # dont know anything
     phiInitial[:, 0] = np.random.rand(N)
     phiInitial[:, 1] = 1 - phiInitial[:, 0]
     m = assigngp_dense.AssignGP(t,
                                 XExpanded,
                                 Y,
                                 Kbranch,
                                 indices,
                                 Kbranch.kernels[0].Bv.value,
                                 phiPrior=phiPrior,
                                 phiInitial=phiInitial)
     InitKernParams(m)
     m.likelihood.variance.set_trainable(False)
     print('Model before initialisation\n', m,
           '\n===========================')
     gpflow.train.ScipyOptimizer().minimize(m, maxiter=100)
     m.likelihood.variance.set_trainable(True)
     gpflow.train.ScipyOptimizer().minimize(m, maxiter=100)
     print('Model after initialisation\n', m,
           '\n===========================')
     ttestl, mul, varl = VBHelperFunctions.predictBranchingModel(m)
     _, _, covl = VBHelperFunctions.predictBranchingModel(m, full_cov=True)
     for i in range(len(varl)):
         assert np.all(covl[i].diagonal().flatten() == varl[i].flatten())
     assert len(varl) == 3, 'Must have 3 predictions for 3 functions'
     assert np.all(varl[0] > 0), 'neg variances for variance function 0'
     assert np.all(varl[1] > 0), 'neg variances for variance function 1'
     assert np.all(varl[2] > 0), 'neg variances for variance function 2'
     PhiOptimised = m.GetPhi()
     print('phiPrior', phiPrior)
     print('PhiOptimised', PhiOptimised)
     assert np.allclose(
         PhiOptimised[idxA, 2],
         1), 'PhiOptimised idxA=%s' % str(PhiOptimised[idxA, :])
     assert np.allclose(
         PhiOptimised[idxB, 1],
         1), 'PhiOptimised idxB=%s' % str(PhiOptimised[idxB, :])
     # reset model and test informative KL prior
     m.UpdateBranchingPoint(Kbranch.kernels[0].Bv.value,
                            phiInitial)  # reset initial phi
     InitKernParams(m)
     ll_flatprior = m.compute_log_likelihood()
     phiInfPrior = np.ones((N, 2)) * 0.5  # dont know anything
     phiInfPrior[-1, :] = [0.99, 0.01]
     # phiInfPrior[-2, :] = [0.01, 0.99]
     m.UpdateBranchingPoint(Kbranch.kernels[0].Bv.value,
                            phiInitial,
                            prior=phiInfPrior)
     ll_betterprior = m.compute_log_likelihood()
     assert ll_betterprior > ll_flatprior, '%f <> %f' % (ll_betterprior,
                                                         ll_flatprior)
    def test(self):
        np.set_printoptions(suppress=True, precision=5)
        seed = 43
        np.random.seed(seed=seed)  # easy peasy reproducibeasy
        tf.random.set_seed(seed)
        # Data generation
        N = 20
        t = np.linspace(0, 1, N)
        print(t)
        trueB = np.ones((1, 1)) * 0.5
        Y = np.zeros((N, 1))
        idx = np.nonzero(t > 0.5)[0]
        idxA = idx[::2]
        idxB = idx[1::2]
        print(idx)
        print(idxA)
        print(idxB)
        Y[idxA, 0] = 2 * t[idxA]
        Y[idxB, 0] = -2 * t[idxB]
        # Create tree structures
        tree = bt.BinaryBranchingTree(0, 1, fDebug=False)
        tree.add(None, 1, trueB)
        assert tree.getRoot().val == trueB
        assert tree.getRoot().idB == 1
        (fm, _) = tree.GetFunctionBranchTensor()
        XExpanded, indices, _ = VBHelperFunctions.GetFunctionIndexListGeneral(
            t)
        # Create model
        Kbranch = (bk.BranchKernelParam(
            gpflow.kernels.Matern32(), fm, b=trueB.copy()) +
                   gpflow.kernels.White())
        Kbranch.kernels[1].variance.assign(
            1e-6
        )  # controls the discontinuity magnitude, the gap at the branching point
        gpflow.set_trainable(Kbranch.kernels[1].variance,
                             False)  # jitter for numerics
        # Create model
        phiPrior = np.ones((N, 2)) * 0.5  # dont know anything
        phiInitial = np.ones((N, 2)) * 0.5  # dont know anything
        phiInitial[:, 0] = np.random.rand(N)
        phiInitial[:, 1] = 1 - phiInitial[:, 0]
        m = assigngp_dense.AssignGP(
            t,
            XExpanded,
            Y,
            Kbranch,
            indices,
            Kbranch.kernels[0].Bv,
            phiPrior=phiPrior,
            phiInitial=phiInitial,
        )
        InitKernParams(m)
        gpflow.set_trainable(m.likelihood.variance, False)
        print("Model before initialisation\n", m,
              "\n===========================")
        opt = gpflow.optimizers.Scipy()
        opt.minimize(
            m.training_loss,
            variables=m.trainable_variables,
            options=dict(disp=True, maxiter=100),
        )
        gpflow.set_trainable(m.likelihood.variance, True)
        opt.minimize(
            m.training_loss,
            variables=m.trainable_variables,
            options=dict(disp=True, maxiter=100),
        )
        print("Model after initialisation\n", m,
              "\n===========================")
        ttestl, mul, varl = VBHelperFunctions.predictBranchingModel(m)
        _, _, covl = VBHelperFunctions.predictBranchingModel(m, full_cov=True)

        assert len(varl) == 3, "Must have 3 predictions for 3 functions"
        assert np.all(varl[0] > 0), "neg variances for variance function 0"
        assert np.all(varl[1] > 0), "neg variances for variance function 1"
        assert np.all(varl[2] > 0), "neg variances for variance function 2"
        PhiOptimised = m.GetPhi()
        print("phiPrior", phiPrior)
        print("PhiOptimised", PhiOptimised)
        assert np.allclose(
            PhiOptimised[idxA, 2],
            1), "PhiOptimised idxA=%s" % str(PhiOptimised[idxA, :])
        assert np.allclose(
            PhiOptimised[idxB, 1],
            1), "PhiOptimised idxB=%s" % str(PhiOptimised[idxB, :])
        # reset model and test informative KL prior
        m.UpdateBranchingPoint(Kbranch.kernels[0].Bv,
                               phiInitial)  # reset initial phi
        InitKernParams(m)
        ll_flatprior = m.log_posterior_density()
        phiInfPrior = np.ones((N, 2)) * 0.5  # dont know anything
        phiInfPrior[-1, :] = [0.99, 0.01]
        # phiInfPrior[-2, :] = [0.01, 0.99]
        m.UpdateBranchingPoint(Kbranch.kernels[0].Bv,
                               phiInitial,
                               prior=phiInfPrior)
        ll_betterprior = m.log_posterior_density()
        assert ll_betterprior > ll_flatprior, "%f <> %f" % (
            ll_betterprior,
            ll_flatprior,
        )
Пример #9
0
    def test(self):
        branchingPoint = 0.5
        tree = bt.BinaryBranchingTree(
            0, 10, fDebug=False)  # set to true to print debug messages
        tree.add(None, 1, branchingPoint)  # single branching point
        (fm, fmb) = tree.GetFunctionBranchTensor()
        # Specify where to evaluate the kernel
        t = np.linspace(0.01, 1, 60)
        (XForKernel, indicesBranch,
         Xtrue) = tree.GetFunctionIndexList(t, fReturnXtrue=True)
        # Specify the kernel and its hyperparameters
        # These determine how smooth and variable the branching functions are
        Bvalues = np.expand_dims(np.asarray(tree.GetBranchValues()), 1)
        KbranchParam = bk.BranchKernelParam(
            gpflow.kernels.SquaredExponential(), fm, b=Bvalues)
        KbranchParam.kern.lengthscales.assign(2.0)
        KbranchParam.kern.variance.assign(1.0)
        # Sample the kernel
        samples = bk.SampleKernel(KbranchParam, XForKernel)
        # Plot the sample
        bk.PlotSample(XForKernel, samples, B=Bvalues)
        # Fit model
        BgridSearch = [0.0001, branchingPoint, 1.1]
        globalBranchingLabels = XForKernel[:,
                                           1]  # use correct labels for tests
        # could add a mistake
        print("Sparse model")
        d = FitBranchingModel.FitModel(
            BgridSearch,
            XForKernel[:, 0],
            samples,
            globalBranchingLabels,
            maxiter=40,
            priorConfidence=0.80,
            M=10,
        )
        bmode = BgridSearch[np.argmax(d["loglik"])]
        print("tensorflow version", tf.__version__, "GPflow version",
              gpflow.__version__)
        print(
            "TestSamplingAndPlotting:: Sparse Log likelihood",
            d["loglik"],
            "BgridSearch",
            BgridSearch,
        )
        assert bmode == branchingPoint, bmode
        # Plot model
        pred = d["prediction"]  # prediction object from GP
        _ = bplot.plotBranchModel(
            bmode,
            XForKernel[:, 0],
            samples,
            pred["xtest"],
            pred["mu"],
            pred["var"],
            d["Phi"],
            fPlotPhi=True,
            fColorBar=True,
            fPlotVar=True,
        )

        _ = bplot.PlotBGPFit(samples, XForKernel[:, 0], BgridSearch, d)
        d = FitBranchingModel.FitModel(
            BgridSearch,
            XForKernel[:, 0],
            samples,
            globalBranchingLabels,
            maxiter=40,
            priorConfidence=0.80,
            M=0,
        )
        bmode = BgridSearch[np.argmax(d["loglik"])]
        print(
            "TestSamplingAndPlotting:: Dense Log likelihood",
            d["loglik"],
            "BgridSearch",
            BgridSearch,
        )
        assert bmode == branchingPoint, bmode
        print("Try sparse model with fixed hyperparameters")
        d = FitBranchingModel.FitModel(
            BgridSearch,
            XForKernel[:, 0],
            samples,
            globalBranchingLabels,
            maxiter=20,
            priorConfidence=0.80,
            M=15,
            likvar=1e-3,
            kerlen=2.0,
            kervar=1.0,
            fixHyperparameters=True,
        )

        # You can rerun the same code as many times as you want and get different sample paths
        # We can also sample independent functions.
        # This is the assumption in the overlapping mixtures of GPs model (OMGP) discussed in the paper.
        indKernel = bk.IndKern(gpflow.kernels.SquaredExponential())
        _ = bk.SampleKernel(indKernel, XForKernel)
Пример #10
0
import numpy as np
from matplotlib import pyplot as plt

from BranchedGP import BranchingTree as bt
from BranchedGP import branch_kernParamGPflow as bk

plt.style.use("ggplot")
# %matplotlib inline

# %% [markdown]
# ### Create the tree
# Specify where the branching point is

# %%
branchingPoint = 0.5
tree = bt.BinaryBranchingTree(
    0, 10, fDebug=False)  # set to true to print debug messages
tree.add(None, 1, branchingPoint)  # single branching point
(fm, fmb) = tree.GetFunctionBranchTensor()

# %% [markdown]
# Specify where to evaluate the kernel

# %%
t = np.linspace(0.01, 1, 10)
(XForKernel, indicesBranch,
 Xtrue) = tree.GetFunctionIndexList(t, fReturnXtrue=True)

# %% [markdown]
# Specify the kernel and its hyperparameters
# These determine how smooth and variable the branching functions are