Ejemplo n.º 1
0
 def marginal_plot(self, Ms):
     axs = self.get_ax()
     self.plot_data(axs)
     for x, ax in zip(self.X, axs):
         xnew = np.array([[t, x[1], x[2]] for t in self.tau])
         for M, c in zip(Ms, self.cols):
             mu, s2 = taylor_first_order(M, xnew)
             self.plot_prediction(ax, c, mu, s2)
     plt.show()
Ejemplo n.º 2
0
    def design_plot(self, Ms, DCs, designs):
        axs = self.get_ax(len(DCs), height=3)
        for x, dc, ax in zip(designs, DCs, axs):
            ax.set_title(dc)
            ax.plot([x[0], x[0]], [0, 1], c='k', linestyle='--')

            xnew = np.array([[t, x[1], x[2]] for t in self.tau])
            for M, c in zip(Ms, self.cols):
                mu, s2 = taylor_first_order(M, xnew)
                self.plot_prediction(ax, c, mu, s2)
        plt.show()
Ejemplo n.º 3
0
 def marginal_plot(self, Ms):
     axs = self.get_ax()
     self.plot_data(axs)
     for x, ax in zip(self.X, axs):
         xnew = np.array([[t, x[1], x[2]] for t in self.tau])
         for M, c in zip(Ms, self.cols):
             mu, s2 = taylor_first_order(M, xnew)
             self.plot_prediction(ax, c, mu, s2)
     axs[1].legend(self.legend, loc=3, fontsize=14)
     plt.suptitle('Marginal predictive distributions')
     plt.tight_layout(True)
     plt.subplots_adjust(top=0.85)
     plt.show()
Ejemplo n.º 4
0
    def design_plot(self, Ms, DCs, designs):
        axs = self.get_ax(len(DCs), height=3)
        for x, dc, ax in zip(designs, DCs, axs):
            ax.set_title(dc)
            ax.plot([x[0], x[0]], [0, 1], c='k', linestyle='--')

            xnew = np.array([[t, x[1], x[2]] for t in self.tau])
            for M, c in zip(Ms, self.cols):
                mu, s2 = taylor_first_order(M, xnew)
                self.plot_prediction(ax, c, mu, s2)
        plt.suptitle(
            'Marginal predictive distributions at suggested next designs')
        plt.tight_layout(True)
        plt.subplots_adjust(top=0.85)
        plt.show()
Ejemplo n.º 5
0
	def test_taylor_first_order (self):
		M      = SimpleModel()
		mu, s2 = taylor_first_order(M, Xs)
		assert mu.shape == (N,E)
		assert s2.shape == (N,E,E)
Ejemplo n.º 6
0
    for M in Ms:
        M.Sigma = laplace_approximation(M, X)
    # Compute the marginal predictive distributions of each model.
    graphics.marginal_plot(Ms)
    """
    Model posteriors
    """
    print("== Model posteriors ==")
    mu = np.zeros((len(X), len(Ms), E))  # Means
    s2 = np.zeros((len(X), len(Ms), E, E))  # Covariances
    D = np.array([M.dim_p
                  for M in Ms])  # No. of model parameters for each model

    for i, M in enumerate(Ms):
        # First-order Taylor approximation of marginal predictive distribution
        mu[:, i], s2[:, i] = taylor_first_order(M, X)

    # Compute normalised Akaike weights
    pis = akaike(Y, mu, s2, D)
    print(" - Normalised Akaike weights")
    for M, p in zip(Ms, pis):
        print("%s: %.5f" % (M.name, p))

    # Check for winner
    if np.any(pis >= 0.999):
        print("Model %s is the winner!" % Ms[np.argmax(pis)].name)
        break
    """
    Design the next experiment
    """
    Xtest = generate_x_mesh()  # Test points