示例#1
0
 def optimize(self, num_meta):
     print("first Optimizing variational parameters...")
     # self.update_param = self.init_var_params.copy()
     for i in range(num_meta):
         self.update_param = adam(
             self.gradient, self.update_param, step_size=0.1, num_iters=100, callback=self.callback
         )
示例#2
0
    def optimize_restarts(self, num_restarts=30, robust=False, verbose=True):
        print("Optimizing variational parameters...")

        #todo: reset the num_iter, num_iter = 1 for the ease of debug
        print("shape of x is: "+ str(self.X.shape))

        self.update_param = self.init_var_params.copy()

        print("current param is "+ str(self.init_var_params))

        for i in range(num_restarts):
            self.update_param = adam(self.gradient, self.update_param,
                                  step_size=0.1, num_iters=100, callback=self.callback)
        zs = func(np.concatenate([np.atleast_2d(X.ravel()), np.atleast_2d(Y.ravel())]).T)
        Z = zs.reshape(X.shape)
        plt.contour(X, Y, Z)
        ax.set_yticks([])
        ax.set_xticks([])

    # Set up figure.
    fig = plt.figure(figsize=(8, 8), facecolor="white")
    ax = fig.add_subplot(111, frameon=False)
    plt.ion()
    plt.show(block=False)

    def callback(params, t, g):
        print("Iteration {} lower bound {}".format(t, -objective(params, t)))

        plt.cla()
        target_distribution = lambda x: np.exp(log_posterior(x, t))
        plot_isocontours(ax, target_distribution)

        mean, log_std = unpack_params(params)
        variational_contour = lambda x: mvn.pdf(x, mean, np.diag(np.exp(2 * log_std)))
        plot_isocontours(ax, variational_contour)
        plt.draw()
        plt.pause(1.0 / 30.0)

    print("Optimizing variational parameters...")
    init_mean = -1 * np.ones(D)
    init_log_std = -5 * np.ones(D)
    init_var_params = np.concatenate([init_mean, init_log_std])
    variational_params = adam(gradient, init_var_params, step_size=0.1, num_iters=2000, callback=callback)