Esempio n. 1
0
def plot_gaussian_mixture(Mu, Sigma, weights=None, x=None, y=None):
    if x == None:
        x = np.arange(0, 1, 0.01)
    if y == None:
        y = np.arange(-0.5, 1.2, 0.01)

    if len(Mu) == len(Sigma) == len(weights):
        pass
    else:
        print("Error: Mu, Sigma and weights must have the same dimension")
        return

    X, Y = np.meshgrid(x, y)
    Pos = np.dstack((X, Y))
    Z = 0

    for i in range(len(Mu)):
        Z = Z + weights[i] * multivariate_normal(Mu[i].ravel(),
                                                 Sigma[i]).pdf(Pos)

    fig = plt.figure(figsize=(12, 7))
    ax = fig.gca(projection='3d')
    ax.plot_surface(X, Y, Z, cmap="copper", lw=0.5, rstride=1, cstride=1)
    ax.set_xlabel('X axis')
    ax.set_ylabel('Y axis')
    ax.set_zlabel('Z axis')
    plt.tight_layout()
    pml.savefig('mixgaussSurface.pdf')
    plt.show()
    def plot(self, n_row, n_col, file_name):
        '''
        Plots the mean of each Bernoulli distribution as an image.

        Parameters
        ----------
        n_row : int
            The number of rows of the figure
        n_col : int
            The number of columns of the figure
        file_name : str
            The path where the figure will be stored
        '''
        if n_row * n_col != len(self.mixing_coeffs):
            raise TypeError('The number of rows and columns does not match with the number of component distribution.')
        fig, axes = plt.subplots(n_row, n_col)

        for (coeff, mean), ax in zip(zip(self.mixing_coeffs, self.probs), axes.flatten()):
            ax.imshow(mean.reshape(28, 28), cmap=plt.cm.gray)
            ax.set_title("%1.2f" % coeff)
            ax.axis("off")

        fig.tight_layout(pad=1.0)
        pml.savefig(f"{file_name}.pdf")
        plt.show()
Esempio n. 3
0
def make_convergence_plots():
    X = np.random.randn(2, 1000)
    X = X - np.mean(X, axis=1).reshape(2, 1)

    theta_init = np.array([[1], [-1]])
    sf = 3

    #theta_trajectory_steepest = theta_init.dot(np.ones((1, 10000)))
    theta_trajectory_steepest = theta_init.dot(np.ones((1, 1000)))
    theta_trajectory_natural = theta_trajectory_steepest.copy()
    L_trajectory_steepest = np.zeros(
        (1, theta_trajectory_steepest.shape[1] - 1))
    L_trajectory_natural = np.zeros_like(L_trajectory_steepest)

    eps_steep = 1 / (sf**2) / 5
    eps_nat = eps_steep * sf**2

    for i in range(1, theta_trajectory_steepest.shape[1]):
        L, dL, G = L_dL_G(theta_trajectory_steepest[:, i - 1], X, sf)
        L_trajectory_steepest[:, i - 1] = L
        theta_trajectory_steepest[:,
                                  i] = theta_trajectory_steepest[:, i -
                                                                 1] - eps_steep * dL
        L, dL, G = L_dL_G(theta_trajectory_natural[:, i - 1], X, sf)
        L_trajectory_natural[:, i - 1] = L
        theta_trajectory_natural[:,
                                 i] = theta_trajectory_natural[:, i -
                                                               1] - eps_nat * (
                                                                   np.linalg.
                                                                   lstsq(
                                                                       G,
                                                                       dL)[0])

    plt.plot(theta_trajectory_steepest[0, :].T,
             theta_trajectory_steepest[1, :].T,
             '+r',
             label="Steepest descent")
    plt.plot(theta_trajectory_natural[0, :].T,
             theta_trajectory_natural[1, :].T,
             'xb',
             label="Natural gradient descent")
    plt.xlabel(r"$\theta_1$")
    plt.ylabel(r"$\theta_2$")
    plt.title("Parameter trajectories")
    plt.legend()
    #pml.savefig("DescentPathsSteepestNGDescent.pdf")
    pml.savefig("natgrad_descent_params.pdf")
    plt.show()

    plt.loglog(L_trajectory_steepest.flatten(), '+r', label="Steepest descent")
    plt.loglog(L_trajectory_natural.flatten(),
               'xb',
               label="Natural gradient descent")
    plt.xlabel("Number of update steps")
    plt.ylabel("KL divergence")
    plt.title("KL divergence vs. update step")
    plt.legend()
    #pml.savefig("KLDivergenceSteepestNGDescent.pdf")
    pml.savefig("natgrad_descent_kl.pdf")
    plt.show()
Esempio n. 4
0
def plot_pca_vectors(a, b, vectors, w):
    mu_a, mu_b = a.mean(axis=0).reshape(-1, 1), b.mean(axis=0).reshape(-1, 1)
    mid_point = (mu_a + mu_b) / 2

    vector = vectors[:, 0]
    slope_pca = vector[1] / vector[0]
    c_pca = mid_point[1] - slope_pca * mid_point[0]

    slope = w[1] / w[0]
    c = mid_point[1] - slope * mid_point[0]

    x = np.linspace(xmin + 1, xmax + 1, 100)
    z = np.linspace(xmin + 1, xmax + 1, 100)

    plt.figure()
    plt.xlim(xmin, xmax)
    plt.ylim(ymin, ymax)
    plt.plot(a[:, 0], a[:, 1], 'b.', b[:, 0], b[:, 1], 'r+')
    #plt.plot(x, slope*x + c)
    plt.plot(z, slope_pca * z + c_pca)
    #plt.plot(mu_a, mu_b, 'black')
    plt.legend(['Male', 'Female', 'PCA vector'])
    #plt.legend(['Male', 'Female', 'FisherLDA vector', 'PCA vector', 'Means'])
    pml.savefig("fisher_lda_lines_pca.pdf")
    plt.show()
Esempio n. 5
0
def callback(X_next, Y_next, i):
    global X_sample, Y_sample
    # Plot samples, surrogate function, noise-free objective and next sampling location
    #plt.subplot(n_iter, 2, 2 * i + 1)
    plt.figure()
    plot_approximation(gpr,
                       X,
                       Y,
                       X_sample,
                       Y_sample,
                       X_next,
                       show_legend=i == 0)
    plt.title(f'Iteration {i+1}')
    if save_figures: pml.savefig('bayes-opt-surrogate-{}.pdf'.format(i + 1))
    plt.show()

    plt.figure()
    #plt.subplot(n_iter, 2, 2 * i + 2)
    plot_acquisition(X,
                     expected_improvement(X, X_sample, Y_sample, gpr),
                     X_next,
                     show_legend=i == 0)
    if save_figures: pml.savefig('bayes-opt-acquisition-{}.pdf'.format(i + 1))
    plt.show()

    # Add sample to previous samples
    X_sample = np.append(X_sample, np.atleast_2d(X_next), axis=0)
    Y_sample = np.append(Y_sample, np.atleast_2d(Y_next), axis=0)
def MakePlot(ypreds, SaveN, Title, lowerb=None, upperb=None):
    #Function for creating and saving plots
    fig, ax = plt.subplots()
    ax.scatter(xtrain,
               ytrain,
               s=140,
               facecolors='none',
               edgecolors='r',
               label='training data')
    #plt.ylim([-10,80])
    #plt.xlim([-8,8])
    Errlogi = lowerb is not None or upperb is not None  #Determines where we will be plotting error bars as well
    if Errlogi:
        errspacing = [
            int(round(s)) for s in np.linspace(0, xtest.shape[0] - 1, 30)
        ]
        ax.errorbar(xtest[errspacing],
                    ypreds[errspacing, 0],
                    yerr=[lowerb[errspacing], upperb[errspacing]])
    for j in range(ypreds.shape[1]):
        ax.plot(xtest,
                ypreds[:, j],
                color='k',
                linewidth=2.0,
                label='prediction',
                alpha=vis)
    if Errlogi:
        plt.legend(loc=2)
    plt.title(Title)
    pml.savefig(SaveN + '.pdf')
def MakeG(Probs,SaveN):
    fig, ax = plt.subplots()
    ax.bar(X, Probs, align='center')
    plt.xlim([min(X) - .5, max(X) + .5])
    plt.xticks(X)
    plt.yticks(np.linspace(0, 1, 5))
    pml.savefig(SaveN)
Esempio n. 8
0
def main():
    cmap = create_colormap()
    X = np.loadtxt(rawdata)
    # Normalise data
    X = (X - X.mean(axis=0)) / (X.std(axis=0))
    mu1 = np.array([-1.5, 1.5])
    mu2 = np.array([1.5, -1.5])

    # Initial configuration
    Sigma1 = np.identity(2) * 0.1
    Sigma2 = np.identity(2) * 0.1
    pi = [0.5, 0.5]
    mu = [mu1, mu2]
    Sigma = [Sigma1, Sigma2]

    res = gmm_lib.apply_em(X, pi, mu, Sigma)

    # Create grid-plot
    hist_index = [0, 10, 25, 30, 35, 40]
    fig, ax = plt.subplots(2, 3)
    ax = ax.ravel()
    for ix, axi in zip(hist_index, ax):
        pi, mu, Sigma = res["coeffs"][ix]
        r = res["rvals"][ix]
        if ix == 0:
            r = np.ones_like(r)

        colors = cmap if ix > 0 else "Dark2"
        gmm_lib.plot_mixtures(X, mu, pi, Sigma, r, cmap=colors, ax=axi)
        axi.set_title("Iteration {ix}".format(ix=ix))

    plt.tight_layout()
    pml.savefig('gmm_faithful.pdf', dpi=300)
    plt.show()
Esempio n. 9
0
def plot_data(a, b):
    plt.figure()
    plt.plot(a[:, 0], a[:, 1], 'b.', b[:, 0], b[:, 1], 'r+')
    #plt.plot(mu_a, mu_b, 'black')
    plt.legend(['Male', 'Female', 'Means'])
    pml.savefig("fisher_lda_data.pdf")
    plt.show()
Esempio n. 10
0
def plot_sigma_vector(Mu, Sigma):
    n = len(Mu)
    plt.figure(figsize=(12, 7))
    for i in range(n):
        plot_sigma_levels(Mu[i], Sigma[i])
    plt.tight_layout()
    pml.savefig('mixgaussSurface.pdf')
    plt.show()
Esempio n. 11
0
def plot_signals(signals, suptitle, file_name):
  plt.figure(figsize=(8, 4))
  for i, signal in enumerate(signals, 1):
    plt.subplot(n_signals, 1, i)
    plt.plot(signal)
    plt.xlim([0, N])
    plt.tight_layout()
  plt.suptitle(suptitle)
  plt.subplots_adjust(top=0.85)
  pml.savefig(f'{file_name}.pdf')
  plt.show()
Esempio n. 12
0
def plot_samples(S, title, file_name):
    min_x, max_x = -4, 4
    min_y, max_y = -3, 3
    plt.scatter(S[:, 0], S[:, 1], marker='o', s=16)
    plt.hlines(0, min_x, max_x, linewidth=2)
    plt.vlines(0, min_y, max_y, linewidth=2)
    plt.xlim(min_x, max_x)
    plt.ylim(min_y, max_y)
    plt.title(title)
    pml.savefig(f'{file_name}.pdf')
    plt.show()
Esempio n. 13
0
def plot_intermediate_steps_single(method, fwd_func, intermediate_steps, xtest, mu_hist, Sigma_hist):
    """
    Plot the intermediate steps of the training process, each one in a different plot.
    """
    for step in intermediate_steps:
        W_step, SW_step = mu_hist[step], Sigma_hist[step]
        x_step, y_step = x[:step], y[:step]
        _, axi = plt.subplots()
        plot_mlp_prediction(key, x_step, y_step, xtest, fwd_func, W_step, SW_step, axi)
        axi.set_title(f"step={step}")
        plt.tight_layout()
        pml.savefig(f'{method}-mlp-step-{step}.pdf')
Esempio n. 14
0
def MakeGraph(Data,SaveName):
    prior = MakeBeta(Data['prior'])(x)
    likelihood = MakeBeta(Data['lik'])(x)
    posterior = MakeBeta(Data['post'])(x)

    fig, ax = plt.subplots()
    ax.plot(x, prior, 'r', label=MakeLabel(Data, "prior"), linewidth=2.0)
    ax.plot(x, likelihood, 'k--', label=MakeLabel(Data, "lik"), linewidth=2.0)
    ax.plot(x, posterior, 'b--', label=MakeLabel(Data, "post"), linewidth=2.0)
    ax.legend(loc='upper left', shadow=True)
    pml.savefig(SaveName)
    plt.show()
def demo(priorVar, plot_num):
    np.random.seed(1)
    colors = dict(mcolors.BASE_COLORS, **mcolors.CSS4_COLORS)
    N = 10  # number of interior observed points
    D = 150  # number of points we evaluate function at
    xs = np.linspace(0, 1, D)
    allNdx = np.arange(0, D, 1)
    perm = np.random.permutation(D)
    obsNdx = perm[:N]
    obsNdx = np.concatenate((np.array([0]), obsNdx, np.array([D - 1])))
    Nobs = len(obsNdx)
    hidNdx = np.setdiff1d(allNdx, obsNdx)
    Nhid = len(hidNdx)
    xobs = np.random.randn(Nobs)
    obsNoiseVar = 1
    y = xobs + np.sqrt(obsNoiseVar) * np.random.randn(Nobs)
    L = (0.5 * scipy.sparse.diags([-1, 2, -1], [0, 1, 2],
                                  (D - 2, D))).toarray()

    Lambda = 1 / priorVar
    L = L * Lambda
    L1 = L[:, hidNdx]
    L2 = L[:, obsNdx]

    B11 = np.dot(np.transpose(L1), L1)
    B12 = np.dot(np.transpose(L1), L2)
    B21 = np.transpose(B12)

    mu = np.zeros(D)
    mu[hidNdx] = -np.dot(np.dot(np.linalg.inv(B11), B12), xobs)
    mu[obsNdx] = xobs
    inverseB11 = np.linalg.inv(B11)

    Sigma = np.zeros((D, D))
    # https://stackoverflow.com/questions/22927181/selecting-specific-rows-and-columns-from-numpy-array/22927889#22927889
    Sigma[hidNdx[:, None], hidNdx] = inverseB11

    plt.figure()
    plt.plot(obsNdx, xobs, 'bo', markersize=10)
    plt.plot(allNdx, mu, 'r-')

    S2 = np.diag(Sigma)
    upper = (mu + 2 * np.sqrt(S2))
    lower = (mu - 2 * np.sqrt(S2))
    plt.fill_between(allNdx, lower, upper, alpha=0.2)

    for i in range(0, 3):
        fs = np.random.multivariate_normal(mu, Sigma)
        plt.plot(allNdx, fs, 'k-', alpha=0.7)

    plt.title(f'prior variance {priorVar:0.2f}')
    pml.savefig(f'gaussian_interpolation_1d_{plot_num}.pdf')
Esempio n. 16
0
def sample_plot_mh(x0, τ, π, μ, σ, n_iterations, xmin, xmax):
    x_hist = metropolis_sample(x0, τ, π, μ, σ, n_iterations)

    fig = plt.figure()
    axs = plt.axes(projection="3d")
    plot_gmm_3d_trace(x_hist, π, μ, σ, f"MH with $N(0,{τ}^2)$ proposal", xmin, xmax, axs)
    style3d(axs, 1.5, 1, 0.8)
    plt.subplots_adjust(left=0.001, bottom=0.208)
    pml.savefig(f"mh_trace_{τ}tau.pdf", pad_inches=0, bbox_inches="tight")

    fig, axs = plt.subplots()
    sm.graphics.tsa.plot_acf(x_hist, lags=45, alpha=None, title=f"MH with $N(0,{τ}^2)$ proposal", ax=axs)
    pml.savefig(f"mh_autocorrelation_{τ}tau.pdf")
def main():
    cmap = create_colormap()
    colors = ["tab:red", "tab:blue"]

    observations = np.loadtxt("../data/faithful.txt")
    # Normalize data
    observations = (observations -
                    observations.mean(axis=0)) / (observations.std(axis=0))
    # Initial configuration

    mixing_coeffs = jnp.array([0.5, 0.5])

    means = jnp.vstack([jnp.array([-1.5, 1.5]), jnp.array([1.5, -1.5])])

    covariances = jnp.array([jnp.eye(2) * 0.1, jnp.eye(2) * 0.1])

    gmm = GMM(mixing_coeffs, means, covariances)

    num_epochs = 2000
    history = gmm.fit_sgd(jnp.array(observations),
                          batch_size=observations.shape[0],
                          num_epochs=num_epochs)
    ll_hist, mix_dist_probs_hist, comp_dist_loc_hist, comp_dist_cov_hist, responsibility_hist = history

    # Create grid-plot
    hist_index = [0, 10, 125, 320, 1450, 1999]
    fig, ax = plt.subplots(2, 3)
    ax = ax.ravel()

    for idx, axi in zip(hist_index, ax):
        means = comp_dist_loc_hist[idx]
        covariances = comp_dist_cov_hist[idx]
        responsibility = responsibility_hist[idx]

        if idx == 0:
            responsibility = jnp.ones_like(responsibility)

        color_map = cmap if idx > 0 else "Dark2"
        gmm.plot(observations,
                 means,
                 covariances,
                 responsibility[:, 0],
                 cmap=color_map,
                 colors=colors,
                 ax=axi)
        axi.set_title(f"Iteration {idx}")

    plt.tight_layout()
    pml.savefig('../figures/gmm_faithful.pdf')
    plt.show()
Esempio n. 18
0
def plt_patterns(patterns, ndisplay=None, figsize=30, name=None):

    assert patterns.nimages >= ndisplay, "number of images in the datset cannot \
  be less than number of images to be displayed"

    if not ndisplay:
        ndisplay = self.nimages
    fig, axs = plt.subplots(1, ndisplay, figsize=(figsize, figsize * ndisplay))
    fig.suptitle(f'{name}', fontsize=16, y=0.55)
    for i in range(ndisplay):
        axs[i].imshow(patterns[:, i * patterns.width:(i + 1) * patterns.width],
                      cmap="Greys")
    pml.savefig(f'{name}.pdf')
    plt.show()
Esempio n. 19
0
def plot_var_em_bound(x, true_log_ll, lower_bound, title):

    plt.plot(x, true_log_ll, '-b', linewidth=3)
    plt.text(2.5, y1[-1] + 0.02, 'true log-likelihood', fontweight='bold')

    plt.plot(x, lower_bound, ':r', linewidth=3)
    plt.text(2.8, 0.9, 'lower bound', fontweight='bold')

    plt.xlim([0, 4])
    plt.ylim([0.5, np.max(y1) + 0.05])
    plt.xlabel('training time', fontweight='bold')
    plt.xticks([])
    plt.yticks([])
    pml.savefig(f'{title}.pdf', dpi=300)
    plt.show()
def MakeDirSampleFig(alpha):
    AlphaVec = np.repeat(alpha, NSamples)
    samps = np.random.dirichlet(AlphaVec, NSamples)
    fig, ax = plt.subplots(NSamples)
    fig.suptitle('Samples from Dir (alpha=' + str(alpha) + ')', y=1)
    fig.tight_layout()

    for i in range(NSamples):
        ax[i].bar(X, samps[i, :], align='center')
        ax[i].set_ylim([0, 1])
        ax[i].yaxis.set_ticks([0, .5, 1])
        ax[i].set_xlim([min(X) - .5, max(X) + .5])

    plt.draw()
    SaveN = "dirSample" + str(int(np.round(10 * alpha))) + ".pdf"
    pml.savefig(SaveN)
def main():
    cmap = create_colormap()
    colors = ["tab:red", "tab:blue"]

    url = 'https://raw.githubusercontent.com/probml/probml-data/main/data/faithful.txt'
    response = requests.get(url)
    rawdata = BytesIO(response.content)
    observations = np.loadtxt(rawdata)
    # Normalize data
    observations = (observations - observations.mean(axis=0)) / (observations.std(axis=0))
    # Initial configuration

    mixing_coeffs = jnp.array([0.5, 0.5])

    means = jnp.vstack([jnp.array([-1.5, 1.5]),
                       jnp.array([1.5, -1.5])])

    covariances = jnp.array([jnp.eye(2) * 0.1,
                            jnp.eye(2) * 0.1])

    gmm = GMM(mixing_coeffs, means, covariances)
    num_of_iters = 50
    history = gmm.fit_em(observations, num_of_iters=num_of_iters)
    ll_hist, mix_dist_probs_hist, comp_dist_loc_hist, comp_dist_cov_hist, responsibility_hist = history

    # Create grid-plot
    hist_index = [0, 10, 25, 30, 35, 40]
    fig, ax = plt.subplots(2, 3)
    ax = ax.ravel()

    for idx, axi in zip(hist_index, ax):
        means = comp_dist_loc_hist[idx]
        covariances = comp_dist_cov_hist[idx]
        responsibility = responsibility_hist[idx]

        if idx == 0:
            responsibility = np.ones_like(responsibility)

        color_map = cmap if idx > 0 else "Dark2"
        gmm.plot(observations, means, covariances, responsibility, cmap=color_map, colors=colors, ax=axi)
        axi.set_title(f"Iteration {idx}")

    plt.tight_layout()
    pml.savefig('gmm_faithful.pdf')
    plt.show()
Esempio n. 22
0
def plot_proj(name, argument):
    plt.figure()
    if name == 'pca':
        Xproj_pca_male = argument[:nMale]
        Xproj_pca_female = argument[nMale:nFemale]
        plt.hist(Xproj_pca_male, color='red', ec='black')
        plt.hist(Xproj_pca_female, color='blue', ec='black')
        plt.title('Projection of points onto PCA vector')
        pml.savefig("fisher_lda_pca.pdf")
        plt.show()
    else:
        Xproj_fish_male = argument[:nMale]
        Xproj_fish_female = argument[nMale:nFemale]
        plt.hist(Xproj_fish_male, color='red', ec='black')
        plt.hist(Xproj_fish_female, color='blue', ec='black')
        plt.title('Projection of points onto Fisher vector')
        pml.savefig("fisher_lda_flda.pdf")
        plt.show()
Esempio n. 23
0
def plot_data(data, assignments, title, data_type):
    fig = plt.figure()
    ax = fig.add_subplot(1, 1, 1)
    ax.plot(data[assignments == 0, 0],
            data[assignments == 0, 1],
            'o',
            color='r')
    ax.plot(data[assignments == 1, 0],
            data[assignments == 1, 1],
            'o',
            color='b')
    ax.set_xlabel('x')
    ax.set_ylabel('y')
    ax.axis('square')
    ax.grid(True)
    ax.set_title(title)
    plt.tight_layout()
    pml.savefig(f"{data_type}_{title.replace(' ', '_')}.pdf")
Esempio n. 24
0
def plot_surface(clf, X, y, filename, xnames, ynames):
    n_classes = 3
    plot_step = 0.02
    markers = ['o', 's', '^']

    plt.figure()
    x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
    y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
    xx, yy = np.meshgrid(np.arange(x_min, x_max, plot_step),
                         np.arange(y_min, y_max, plot_step))
    plt.tight_layout(h_pad=0.5, w_pad=0.5, pad=2.5)

    Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
    Z = Z.reshape(xx.shape)

    #cmap=plt.cm.jet
    #cmap=plt.cm.RdYlBu
    #cmap = ListedColormap(['orange', 'green', 'purple'])
    cmap = ListedColormap(['blue', 'orange', 'green'])

    cs = plt.contourf(xx, yy, Z, cmap=cmap, alpha=0.5)

    #plot_colors = "ryb"
    #plot_colors = "byg"
    plot_colors = [cmap(i) for i in range(3)]

    plt.xlabel(xnames[0])
    plt.ylabel(xnames[1])

    # Plot the training points
    for i, color, marker in zip(range(n_classes), plot_colors, markers):
        idx = np.where(y == i)
        plt.scatter(X[idx, 0],
                    X[idx, 1],
                    label=ynames[i],
                    edgecolor='black',
                    color=color,
                    s=50,
                    cmap=cmap,
                    marker=marker)
    plt.legend()
    pml.savefig(filename)
    plt.show()
def main():
    np.random.seed(12)
    data_dim = 8
    n_data = 10
    threshold_missing = 0.5
    mu = np.random.randn(data_dim, 1)
    sigma = make_spd_matrix(
        n_dim=data_dim)  # Generate a random positive semi-definite matrix
    # test if the matrix is positive definite
    # print(is_pos_def(sigma))
    x_full = gauss.gauss_sample(mu, sigma, n_data)
    missing = np.random.rand(n_data, data_dim) < threshold_missing
    x_miss = np.copy(x_full)
    x_miss[missing] = np.nan
    x_imputed = gauss.gauss_impute(mu, sigma, x_miss)
    #Create a matrix from x_miss by replacing the NaNs with 0s to display the hinton_diagram
    xmiss0 = np.copy(x_miss)
    for g in np.argwhere(np.isnan(x_miss)):
        xmiss0[g[0], g[1]] = 0

    plot_1 = plt.figure(1)
    pml.hinton_diagram(xmiss0, ax=plot_1.gca())
    plot_1.suptitle('Observed')
    pml.savefig("gauss_impute_observed.pdf", dpi=300)

    plot_2 = plt.figure(2)
    pml.hinton_diagram(x_full, ax=plot_2.gca())
    plot_2.suptitle('Hidden truth')
    pml.savefig("gauss_impute_truth.pdf", dpi=300)

    plot_3 = plt.figure(3)
    pml.hinton_diagram(x_imputed, ax=plot_3.gca())
    plot_3.suptitle('Imputation with true params')
    pml.savefig("gauss_impute_pred.pdf", dpi=300)
    plt.show()
Esempio n. 26
0
def sample_plot_gibbs(x0, z0, kv, π, μ, σ, n_iterations, xmin, xmax):
    x_hist, z_hist = gibbs_sample(x0, z0, kv, π, μ, σ, n_iterations)
    colors = ["tab:blue" if z else "tab:red" for z in z_hist]

    fig, axs = plt.subplots()
    axs.scatter(np.arange(n_iterations),
                x_hist,
                s=20,
                facecolors="none",
                edgecolors=colors)
    pml.savefig("gibbs_scatter.pdf")

    fig = plt.figure()
    axs = plt.axes(projection="3d")
    plot_gmm_3d_trace(x_hist, π, μ, σ, "Gibbs sampling", xmin, xmax, axs)
    pml.style3d(axs, 1.5, 1, 0.8)
    plt.subplots_adjust(left=0.001, bottom=0.208, right=0.7)
    pml.savefig("gibbs_trace.pdf", pad_inches=0, bbox_inches="tight")

    fig, axs = plt.subplots()
    sm.graphics.tsa.plot_acf(x_hist,
                             lags=45,
                             alpha=None,
                             title="Gibbs",
                             ax=axs)
    pml.savefig("gibbs_autocorrelation.pdf")
Esempio n. 27
0
def make_graph(data, save_name):
    prior = beta.pdf(x, a=data["prior"]["a"], b=data["prior"]["b"])
    n_0 = data["likelihood"]["n_0"]
    n_1 = data["likelihood"]["n_1"]
    samples = jnp.concatenate([jnp.zeros(n_0), jnp.ones(n_1)])
    likelihood_function = jnp.vectorize(
        lambda p: jnp.exp(bernoulli.logpmf(samples, p).sum()))
    likelihood = likelihood_function(x)
    posterior = beta.pdf(x, a=data["posterior"]["a"], b=data["posterior"]["b"])

    fig, ax = plt.subplots()
    axt = ax.twinx()
    fig1 = ax.plot(
        x,
        prior,
        "k",
        label=f"prior Beta({data['prior']['a']}, {data['prior']['b']})",
        linewidth=2.0,
    )
    fig2 = axt.plot(x,
                    likelihood,
                    "r:",
                    label=f"likelihood Bernoulli",
                    linewidth=2.0)
    fig3 = ax.plot(
        x,
        posterior,
        "b-.",
        label=
        f"posterior Beta({data['posterior']['a']}, {data['posterior']['b']})",
        linewidth=2.0,
    )
    fig_list = fig1 + fig2 + fig3
    labels = [fig.get_label() for fig in fig_list]
    ax.legend(fig_list, labels, loc="upper left", shadow=True)
    axt.set_ylabel("Likelihood")
    ax.set_ylabel("Prior/Posterior")
    ax.set_title(f"$N_0$:{n_0}, $N_1$:{n_1}")
    pml.savefig(save_name)
Esempio n. 28
0
def make_vector_field_plots():
    # initialize the theta domain
    theta1, theta2 = np.meshgrid(np.linspace(-1, 1, 9), np.linspace(-1, 1, 9))
    theta = np.array([theta1.T.flatten(), theta2.T.flatten()])
    sf = 3

    # get random values and subtract their mean
    X = np.random.randn(2, 10000)
    X = X - np.mean(X, axis=1).reshape(2, 1)
    dL = np.zeros_like(theta)
    for i in range(0, theta.shape[1]):
        _, dL[:, i], G = L_dL_G(theta[:, i], X, sf)

    # change derivative to get steepest descent
    dL = -dL
    plt.quiver(theta[0, :], theta[1, :], dL[0, :], dL[1, :])
    plt.xlabel(r"$\theta_1$")
    plt.ylabel(r"$\theta_2$")
    plt.title("Steepest descent vectors in original parameter space")
    #pml.savefig("SDOriginalParam.pdf")
    pml.savefig("natgrad_descent_vectors_orig.pdf")
    plt.show()

    phi = theta.copy()
    theta = np.linalg.inv(sqrtm(G)).dot(phi)
    dL = np.zeros_like(theta)
    for i in range(0, theta.shape[1]):
        _, dL[:, i], G = L_dL_G(theta[:, i], X, sf)
    dL = -dL

    dLphi = sqrtm(np.linalg.inv(G)).dot(dL)
    plt.quiver(phi[0, :], phi[1, :], dLphi[0, :], dLphi[1, :])
    plt.xlabel(r"$\phi_1$")
    plt.ylabel(r"$\phi_2$")
    plt.title("Steepest descent vectors in natural parameter space")
    #pml.savefig("SDNaturalParam.pdf")
    pml.savefig("natgrad_descent_vectors_natural.pdf")
    plt.show()
Esempio n. 29
0
def main():
    base = plt.gca().transData
    rot = tr.Affine2D().rotate_deg(30)
    plot_ellipses(0, 0, 9, 81, 3, c="tab:red", linewidth=3.0, transform=rot + base)
    plt.arrow(-1.2, 0, 2.4, 0, length_includes_head=True, width=0.015, head_width=0.05, head_length=0.05, color='blue',
              transform=rot + base)
    plt.arrow(0, -0.5, 0, 1, length_includes_head=True, width=0.015, head_width=0.05, head_length=0.05, color='blue',
              transform=rot + base)
    plt.arrow(-1.2, 0, 0, 0.5, length_includes_head=True, width=0.015, head_width=0.05, head_length=0.05,
              color='black', transform=rot + base)
    plt.arrow(0, -0.5, 1.2, 0, length_includes_head=True, width=0.015, head_width=0.05, head_length=0.05,
              color='black', transform=rot + base)
    plt.arrow(-1.2, 0.5, 0, -0.5, length_includes_head=True, width=0.015, head_width=0.05, head_length=0.05,
              color='black', transform=rot + base)
    plt.arrow(1.2, -0.5, -1.2, 0, length_includes_head=True, width=0.015, head_width=0.05, head_length=0.05,
              color='black', transform=rot + base)
    plt.arrow(1, 0.3, 0, 0.3, length_includes_head=True, width=0.015, head_width=0.05, head_length=0.05, color='black',
              transform=rot + base)
    plt.text(0.658, 1, r"$u_2$", fontsize=14)
    plt.text(1, 0.94, r"$u_1$", fontsize=14)
    plt.text(-1.4, -0.48, r"$λ_2^{1/2}$", fontsize=14)
    plt.text(0.85, -0.3, r"$λ_1^{1/2}$", fontsize=14)
    plt.arrow(1, 0.3, 0.3, 0, length_includes_head=True, width=0.015, head_width=0.05, head_length=0.05, color='black',
              transform=rot + base)
    plt.xticks([])
    plt.yticks([])
    plt.xlabel("$x_1$", fontsize=14)
    plt.ylabel("$x_2$", fontsize=14)
    xmin, xmax = plt.xlim()
    ymin, ymax = plt.ylim()
    scale_factor = 1.2
    plt.xlim(xmin * scale_factor, xmax * scale_factor)
    plt.ylim(ymin * scale_factor, ymax * scale_factor)
    plt.tight_layout()
    pml.savefig("gaussEvec.pdf", dpi=300)

    plt.show()
Esempio n. 30
0
def sensor_fusion():

    sigmas = [0.01 * np.eye(2), 0.01 * np.eye(2)]
    helper(sigmas)
    pml.savefig("demoGaussBayes2dEqualSpherical.pdf")
    plt.show()

    sigmas = [0.05 * np.eye(2), 0.01 * np.eye(2)]
    helper(sigmas)
    pml.savefig("demoGaussBayes2dUnequalSpherical.pdf")
    plt.show()

    sigmas = [
        0.01 * np.array([[10, 1], [1, 1]]), 0.01 * np.array([[1, 1], [1, 10]])
    ]
    helper(sigmas)
    pml.savefig("demoGaussBayes2dUnequal.pdf")
    plt.show()