Exemple #1
0
def compare_to_matlab(measurement='Near'):
    prior = GaussianMixture(
        weights=[1, 1, 1, 1, 1],
        means=[
            [-2, -4],  # GM1 mean
            [-1, -2],  # GM2 mean
            [0, 0],  # GM3 mean
            [1, -2],  # GM4 mean
            [2, -4],  # GM5 mean
        ],
        covariances=[
            [
                [0.1, 0],  # GM1 mean
                [0, 0.1]
            ],
            [
                [0.2, 0],  # GM2 mean
                [0, 0.2]
            ],
            [
                [0.3, 0],  # GM3 mean
                [0, 0.3]
            ],
            [
                [0.2, 0],  # GM4 mean
                [0, 0.2]
            ],
            [
                [0.1, 0],  # GM5 mean
                [0, 0.1]
            ],
        ])

    # prior = GaussianMixture(weights=[1],
    #                         means=[[-2, -4],  # GM1 mean
    #                                ],
    #                         covariances=[[[0.1, 0],  # GM1 mean
    #                                       [0, 0.1]
    #                                       ],
    #                                      ])

    # Define sensor likelihood
    brm = range_model()

    file_ = open('/Users/nick/Downloads/VBIS GM Fusion/nick_output.csv', 'w')
    for i in range(30):
        # Do a VBIS update
        logging.info('Starting VB update...')
        vb = VariationalBayes()
        mu_hat, var_hat, beta_hat = vb.update(measurement, brm, prior)

        # Flatten values
        flat = np.hstack((beta_hat, mu_hat.flatten(), var_hat.flatten()))

        # Save Flattened values
        np.savetxt(file_, np.atleast_2d(flat), delimiter=',')
    file_.close()
Exemple #2
0
def gmm_sm_test(measurement='Outside'):

    # Define prior
    # prior = GaussianMixture(weights=[1, 4, 5],
    #                         means=[[0.5, 1.3],  # GM1 mean
    #                                [-0.7, -0.6],  # GM2 mean
    #                                [0.2, -3],  # GM3 mean
    #                                ],
    #                         covariances=[[[0.4, 0.3],  # GM1 mean
    #                                       [0.3, 0.4]
    #                                       ],
    #                                      [[0.3, 0.1],  # GM2 mean
    #                                       [0.1, 0.3]
    #                                       ],
    #                                      [[0.5, 0.4],  # GM3 mean
    #                                       [0.4, 0.5]],
    #                                      ])
    prior = GaussianMixture(
        weights=[1, 1, 1, 1, 1],
        means=[
            [-2, -4],  # GM1 mean
            [-1, -2],  # GM2 mean
            [0, 0],  # GM3 mean
            [1, -2],  # GM4 mean
            [2, -4],  # GM5 mean
        ],
        covariances=[
            [
                [0.1, 0],  # GM1 mean
                [0, 0.1]
            ],
            [
                [0.2, 0],  # GM2 mean
                [0, 0.2]
            ],
            [
                [0.3, 0],  # GM3 mean
                [0, 0.3]
            ],
            [
                [0.2, 0],  # GM4 mean
                [0, 0.2]
            ],
            [
                [0.1, 0],  # GM5 mean
                [0, 0.1]
            ],
        ])
    # prior = GaussianMixture(weights=[1],
    #                         means=[[-2, -4],  # GM1 mean
    #                                ],
    #                         covariances=[[[0.1, 0],  # GM1 mean
    #                                       [0, 0.1]
    #                                       ],
    #                                      ])
    # Define sensor likelihood
    brm = range_model()

    # Do a VBIS update
    logging.info('Starting VB update...')
    vb = VariationalBayes()
    mu_hat, var_hat, beta_hat = vb.update(measurement,
                                          brm,
                                          prior,
                                          use_LWIS=True)
    vbis_posterior = GaussianMixture(weights=beta_hat,
                                     means=mu_hat,
                                     covariances=var_hat)

    # Define gridded space for graphing
    min_x, max_x = -5, 5
    min_y, max_y = -5, 5
    res = 100
    x_space, y_space = np.mgrid[min_x:max_x:1 / res, min_y:max_y:1 / res]
    pos = np.empty(x_space.shape + (2, ))
    pos[:, :, 0] = x_space
    pos[:, :, 1] = y_space

    levels_res = 50
    max_prior = np.max(prior.pdf(pos))
    prior_levels = np.linspace(0, max_prior, levels_res)

    brm.probability()
    max_lh = np.max(brm.probs)
    lh_levels = np.linspace(0, max_lh, levels_res)
    max_post = np.max(vbis_posterior.pdf(pos))
    post_levels = np.linspace(0, max_post, levels_res)

    # Plot results
    fig = plt.figure()
    likelihood_label = 'Likelihood of \'{}\''.format(measurement)

    prior_ax = plt.subplot2grid((2, 32), (0, 0), colspan=14)
    prior_cax = plt.subplot2grid((2, 32), (0, 14), colspan=1)
    prior_c = prior_ax.contourf(x_space,
                                y_space,
                                prior.pdf(pos),
                                levels=prior_levels)
    cbar = plt.colorbar(prior_c, cax=prior_cax)
    prior_ax.set_xlabel('x1')
    prior_ax.set_ylabel('x2')
    prior_ax.set_title('Prior Distribution')

    lh_ax = plt.subplot2grid((2, 32), (0, 17), colspan=14)
    lh_cax = plt.subplot2grid((2, 32), (0, 31), colspan=1)
    brm.classes[measurement].plot(ax=lh_ax,
                                  label=likelihood_label,
                                  ls='--',
                                  levels=lh_levels,
                                  show_plot=False,
                                  plot_3D=False)
    # plt.colorbar(sm.probs, cax=lh_cax)
    lh_ax.set_title(likelihood_label)

    posterior_ax = plt.subplot2grid((2, 32), (1, 0), colspan=31)
    posterior_cax = plt.subplot2grid((2, 32), (1, 31), colspan=1)
    posterior_c = posterior_ax.contourf(x_space,
                                        y_space,
                                        vbis_posterior.pdf(pos),
                                        levels=post_levels)
    plt.colorbar(posterior_c, cax=posterior_cax)
    posterior_ax.set_xlabel('x1')
    posterior_ax.set_ylabel('x2')
    posterior_ax.set_title('VBIS Posterior Distribution')

    logging.info(
        'Prior Weights: \n {} \n Means: \n {} \n Variances: \n {} \n'.format(
            prior.weights, prior.means, prior.covariances))
    logging.info(
        'Posterior Weights: \n {} \n Means: \n {} \n Variances: \n {} \n'.
        format(vbis_posterior.weights, vbis_posterior.means,
               vbis_posterior.covariances))

    plt.show()