Exemple #1
0
    def lwis_update(self, prior):
        """

        clustering:
            pairwise greedy merging - compare means, weights & variances
            salmond's method and runnals' method (better)

        """
        prior_mean = np.asarray(prior.means[0])
        prior_var = np.asarray(prior.covariances[0])

        # Importance distribution
        q = GaussianMixture(1, prior_mean, prior_var)

        # Importance sampling correction
        w = np.zeros(num_samples)  # Importance weights
        x = q.rvs(size=num_samples)  # Sampled points
        x = np.asarray(x)
        if hasattr(likelihood, 'subclasses'):
            measurement_class = likelihood.subclasses[measurement]
        else:
            measurement_class = likelihood.classes[measurement]

        for i in range(num_samples):
            w[i] = prior.pdf(x[i]) \
                * measurement_class.probability(state=x[i])\
                / q.pdf(x[i])
        w /= np.sum(w)  # Normalize weights

        mu_hat = np.zeros_like(np.asarray(mu_VB))
        for i in range(num_samples):
            x_i = np.asarray(x[i])
            mu_hat = mu_hat + x_i.dot(w[i])

        var_hat = np.zeros_like(np.asarray(var_VB))
        for i in range(num_samples):
            x_i = np.asarray(x[i])
            var_hat = var_hat + w[i] * np.outer(x_i, x_i)
        var_hat -= np.outer(mu_hat, mu_hat)

        if mu_hat.size == 1 and mu_hat.ndim > 0:
            mu_lwis = mu_hat[0]
        else:
            mu_lwis = mu_hat
        if var_hat.size == 1:
            var_lwis = var_hat[0][0]
        else:
            var_lwis = var_hat

        logging.debug(
            'LWIS update found mean of {} and variance of {}.'.format(
                mu_lwis, var_lwis))

        return mu_lwis, var_lwis, log_c_hat
Exemple #2
0
    class camera_tester(object):
        """docstring for merged_gm"""
        def __init__(self,
                     prior,
                     detection_model,
                     trajectory,
                     num_std=1,
                     bounds=None):
            self.fig = plt.figure(figsize=(16, 8))
            self.gm = prior
            self.detection_model = detection_model
            self.trajectory = itertools.cycle(trajectory)
            self.vb = VariationalBayes()
            self.num_std = num_std
            if bounds is None:
                self.bounds = [-5, -5, 5, 5]
            else:
                self.bounds = bounds

        def update(self, i=0):
            self.camera_pose = next(self.trajectory)
            logging.info('Moving to pose {}.'.format(self.camera_pose))
            self.detection_model.move(self.camera_pose)

            # Do a VBIS update
            mu, sigma, beta = self.vb.update(measurement='No Detection',
                                             likelihood=detection_model,
                                             prior=self.gm,
                                             use_LWIS=True,
                                             poly=detection_model.poly,
                                             num_std=self.num_std)
            self.gm = GaussianMixture(weights=beta,
                                      means=mu,
                                      covariances=sigma)
            # Log what's going on
            logging.info(self.gm)
            logging.info('Weight sum: {}'.format(beta.sum()))

            self.remove()
            self.plot()

        def plot(self):
            levels_res = 50
            self.levels = np.linspace(0, np.max(self.gm.pdf(self.pos)),
                                      levels_res)
            self.contourf = self.ax.contourf(self.xx,
                                             self.yy,
                                             self.gm.pdf(self.pos),
                                             levels=self.levels,
                                             cmap=plt.get_cmap('jet'))
            # Plot camera
            self.cam_patch = PolygonPatch(self.detection_model.poly,
                                          facecolor='none',
                                          linewidth=2,
                                          edgecolor='white')
            self.ax.add_patch(self.cam_patch)

            # Plot ellipses
            self.ellipse_patches = self.gm.plot_ellipses(
                poly=self.detection_model.poly)

        def plot_setup(self):
            # Define gridded space for graphing
            min_x, max_x = self.bounds[0], self.bounds[2]
            min_y, max_y = self.bounds[1], self.bounds[3]
            res = 30
            self.xx, self.yy = np.mgrid[min_x:max_x:1 / res,
                                        min_y:max_y:1 / res]
            pos = np.empty(self.xx.shape + (2, ))
            pos[:, :, 0] = self.xx
            pos[:, :, 1] = self.yy
            self.pos = pos

            # Plot setup
            self.ax = self.fig.add_subplot(111)

            self.ax.set_title('VBIS with camera detection test')
            plt.axis('scaled')
            self.ax.set_xlim([min_x, max_x])
            self.ax.set_ylim([min_y, max_y])

            levels_res = 50
            self.levels = np.linspace(0, np.max(self.gm.pdf(self.pos)),
                                      levels_res)
            cax = self.contourf = self.ax.contourf(self.xx,
                                                   self.yy,
                                                   self.gm.pdf(self.pos),
                                                   levels=self.levels,
                                                   cmap=plt.get_cmap('jet'))
            self.fig.colorbar(cax)

        def remove(self):
            if hasattr(self, 'cam_patch'):
                self.cam_patch.remove()
                del self.cam_patch

            if hasattr(self, 'ellipse_patches'):
                for patch in self.ellipse_patches:
                    patch.remove()
                del self.ellipse_patches

            if hasattr(self, 'contourf'):
                for collection in self.contourf.collections:
                    collection.remove()
                del self.contourf
Exemple #3
0
def gmm_sm_test(measurement='Outside'):

    # Define prior
    # prior = GaussianMixture(weights=[1, 4, 5],
    #                         means=[[0.5, 1.3],  # GM1 mean
    #                                [-0.7, -0.6],  # GM2 mean
    #                                [0.2, -3],  # GM3 mean
    #                                ],
    #                         covariances=[[[0.4, 0.3],  # GM1 mean
    #                                       [0.3, 0.4]
    #                                       ],
    #                                      [[0.3, 0.1],  # GM2 mean
    #                                       [0.1, 0.3]
    #                                       ],
    #                                      [[0.5, 0.4],  # GM3 mean
    #                                       [0.4, 0.5]],
    #                                      ])
    prior = GaussianMixture(
        weights=[1, 1, 1, 1, 1],
        means=[
            [-2, -4],  # GM1 mean
            [-1, -2],  # GM2 mean
            [0, 0],  # GM3 mean
            [1, -2],  # GM4 mean
            [2, -4],  # GM5 mean
        ],
        covariances=[
            [
                [0.1, 0],  # GM1 mean
                [0, 0.1]
            ],
            [
                [0.2, 0],  # GM2 mean
                [0, 0.2]
            ],
            [
                [0.3, 0],  # GM3 mean
                [0, 0.3]
            ],
            [
                [0.2, 0],  # GM4 mean
                [0, 0.2]
            ],
            [
                [0.1, 0],  # GM5 mean
                [0, 0.1]
            ],
        ])
    # prior = GaussianMixture(weights=[1],
    #                         means=[[-2, -4],  # GM1 mean
    #                                ],
    #                         covariances=[[[0.1, 0],  # GM1 mean
    #                                       [0, 0.1]
    #                                       ],
    #                                      ])
    # Define sensor likelihood
    brm = range_model()

    # Do a VBIS update
    logging.info('Starting VB update...')
    vb = VariationalBayes()
    mu_hat, var_hat, beta_hat = vb.update(measurement,
                                          brm,
                                          prior,
                                          use_LWIS=True)
    vbis_posterior = GaussianMixture(weights=beta_hat,
                                     means=mu_hat,
                                     covariances=var_hat)

    # Define gridded space for graphing
    min_x, max_x = -5, 5
    min_y, max_y = -5, 5
    res = 100
    x_space, y_space = np.mgrid[min_x:max_x:1 / res, min_y:max_y:1 / res]
    pos = np.empty(x_space.shape + (2, ))
    pos[:, :, 0] = x_space
    pos[:, :, 1] = y_space

    levels_res = 50
    max_prior = np.max(prior.pdf(pos))
    prior_levels = np.linspace(0, max_prior, levels_res)

    brm.probability()
    max_lh = np.max(brm.probs)
    lh_levels = np.linspace(0, max_lh, levels_res)
    max_post = np.max(vbis_posterior.pdf(pos))
    post_levels = np.linspace(0, max_post, levels_res)

    # Plot results
    fig = plt.figure()
    likelihood_label = 'Likelihood of \'{}\''.format(measurement)

    prior_ax = plt.subplot2grid((2, 32), (0, 0), colspan=14)
    prior_cax = plt.subplot2grid((2, 32), (0, 14), colspan=1)
    prior_c = prior_ax.contourf(x_space,
                                y_space,
                                prior.pdf(pos),
                                levels=prior_levels)
    cbar = plt.colorbar(prior_c, cax=prior_cax)
    prior_ax.set_xlabel('x1')
    prior_ax.set_ylabel('x2')
    prior_ax.set_title('Prior Distribution')

    lh_ax = plt.subplot2grid((2, 32), (0, 17), colspan=14)
    lh_cax = plt.subplot2grid((2, 32), (0, 31), colspan=1)
    brm.classes[measurement].plot(ax=lh_ax,
                                  label=likelihood_label,
                                  ls='--',
                                  levels=lh_levels,
                                  show_plot=False,
                                  plot_3D=False)
    # plt.colorbar(sm.probs, cax=lh_cax)
    lh_ax.set_title(likelihood_label)

    posterior_ax = plt.subplot2grid((2, 32), (1, 0), colspan=31)
    posterior_cax = plt.subplot2grid((2, 32), (1, 31), colspan=1)
    posterior_c = posterior_ax.contourf(x_space,
                                        y_space,
                                        vbis_posterior.pdf(pos),
                                        levels=post_levels)
    plt.colorbar(posterior_c, cax=posterior_cax)
    posterior_ax.set_xlabel('x1')
    posterior_ax.set_ylabel('x2')
    posterior_ax.set_title('VBIS Posterior Distribution')

    logging.info(
        'Prior Weights: \n {} \n Means: \n {} \n Variances: \n {} \n'.format(
            prior.weights, prior.means, prior.covariances))
    logging.info(
        'Posterior Weights: \n {} \n Means: \n {} \n Variances: \n {} \n'.
        format(vbis_posterior.weights, vbis_posterior.means,
               vbis_posterior.covariances))

    plt.show()
Exemple #4
0
def comparison_2d():
    # Define prior
    prior_mean = np.array([2.3, 1.2])
    prior_var = np.array([[2, 0.6], [0.6, 2]])
    prior = GaussianMixture(1, prior_mean, prior_var)

    # Define sensor likelihood
    sm = intrinsic_space_model()
    measurement = 'Front'
    measurement_i = sm.classes[measurement].id

    # Do a VB update
    init_mean = np.zeros((1, 2))
    init_var = np.eye(2)
    init_alpha = 0.5
    init_xi = np.ones(5)

    vb = VariationalBayes()
    vb_mean, vb_var, _ = vb.vb_update(measurement, sm, prior, init_mean,
                                      init_var, init_alpha, init_xi)

    nisar_vb_mean = np.array([1.795546121012238, 2.512627005425541])
    nisar_vb_var = np.array([[0.755723395661314, 0.091742424424428],
                             [0.091742424424428, 0.747611340151417]])
    diff_vb_mean = vb_mean - nisar_vb_mean
    diff_vb_var = vb_var - nisar_vb_var
    logging.info(
        'Nisar\'s VB update had mean difference: \n {}\n and var difference: \n {}\n'
        .format(diff_vb_mean, diff_vb_var))

    vb_mean, vb_var, _ = vb.vbis_update(measurement, sm, prior, init_mean,
                                        init_var, init_alpha, init_xi)
    vb_posterior = GaussianMixture(1, vb_mean, vb_var)

    # Define gridded space for graphing
    min_x, max_x = -5, 5
    min_y, max_y = -5, 5
    res = 200
    x_space, y_space = np.mgrid[min_x:max_x:1 / res, min_y:max_y:1 / res]
    pos = np.empty(x_space.shape + (2, ))
    pos[:, :, 0] = x_space
    pos[:, :, 1] = y_space

    levels_res = 30
    max_prior = np.max(prior.pdf(pos))
    prior_levels = np.linspace(0, max_prior, levels_res)

    sm.probability()
    max_lh = np.max(sm.probs)
    lh_levels = np.linspace(0, max_lh, levels_res)

    max_post = np.max(vb_posterior.pdf(pos))
    post_levels = np.linspace(0, max_post, levels_res)

    # Plot results
    fig = plt.figure()
    likelihood_label = 'Likelihood of \'{}\''.format(measurement)

    prior_ax = plt.subplot2grid((2, 32), (0, 0), colspan=14)
    prior_cax = plt.subplot2grid((2, 32), (0, 14), colspan=1)
    prior_c = prior_ax.contourf(x_space,
                                y_space,
                                prior.pdf(pos),
                                levels=prior_levels)
    cbar = plt.colorbar(prior_c, cax=prior_cax)
    prior_ax.set_xlabel('x1')
    prior_ax.set_ylabel('x2')
    prior_ax.set_title('Prior Distribution')

    lh_ax = plt.subplot2grid((2, 32), (0, 17), colspan=14)
    lh_cax = plt.subplot2grid((2, 32), (0, 31), colspan=1)
    sm.classes[measurement].plot(ax=lh_ax,
                                 label=likelihood_label,
                                 plot_3D=False,
                                 levels=lh_levels)
    # plt.colorbar(sm.probs, cax=lh_cax)
    lh_ax.set_title(likelihood_label)

    posterior_ax = plt.subplot2grid((2, 32), (1, 0), colspan=31)
    posterior_cax = plt.subplot2grid((2, 32), (1, 31), colspan=1)
    posterior_c = posterior_ax.contourf(x_space,
                                        y_space,
                                        vb_posterior.pdf(pos),
                                        levels=post_levels)
    plt.colorbar(posterior_c, cax=posterior_cax)
    posterior_ax.set_xlabel('x1')
    posterior_ax.set_ylabel('x2')
    posterior_ax.set_title('VB Posterior Distribution')

    plt.show()
Exemple #5
0
def comparison_1d():

    # Define prior
    prior_mean, prior_var = 0.3, 0.01
    min_x, max_x = -5, 5
    res = 10000

    prior = GaussianMixture(1, prior_mean, prior_var)
    x_space = np.linspace(min_x, max_x, res)

    # Define sensor likelihood
    sm = speed_model()
    measurement = 'Slow'
    measurement_i = sm.class_labels.index(measurement)

    # Do a VB update
    init_mean, init_var = 0, 1
    init_alpha, init_xi = 0.5, np.ones(4)

    vb = VariationalBayes()
    vb_mean, vb_var, _ = vb.vb_update(measurement, sm, prior, init_mean,
                                      init_var, init_alpha, init_xi)
    vb_posterior = GaussianMixture(1, vb_mean, vb_var)

    nisar_vb_mean = 0.131005297841171
    nisar_vb_var = 6.43335516254277e-05
    diff_vb_mean = vb_mean - nisar_vb_mean
    diff_vb_var = vb_var - nisar_vb_var
    logging.info(
        'Nisar\'s VB update had mean difference {} and var difference {}\n'.
        format(diff_vb_mean, diff_vb_var))

    # Do a VBIS update
    vbis_mean, vbis_var, _ = vb.vbis_update(measurement, sm, prior, init_mean,
                                            init_var, init_alpha, init_xi)
    vbis_posterior = GaussianMixture(1, vbis_mean, vbis_var)

    nisar_vbis_mean = 0.154223416817080
    nisar_vbis_var = 0.00346064073274943
    diff_vbis_mean = vbis_mean - nisar_vbis_mean
    diff_vbis_var = vbis_var - nisar_vbis_var
    logging.info(
        'Nisar\'s VBIS update had mean difference {} and var difference {}\n'.
        format(diff_vbis_mean, diff_vbis_var))

    # Plot results
    likelihood_label = 'Likelihood of \'{}\''.format(measurement)
    fig = plt.figure()
    ax = fig.add_subplot(111)
    sm.classes[measurement].plot(ax=ax,
                                 fill_between=False,
                                 label=likelihood_label,
                                 ls='--')
    ax.plot(x_space,
            prior.pdf(x_space),
            lw=1,
            label='prior pdf',
            c='grey',
            ls='--')

    ax.plot(x_space,
            vb_posterior.pdf(x_space),
            lw=2,
            label='VB posterior',
            c='r')
    ax.fill_between(x_space,
                    0,
                    vb_posterior.pdf(x_space),
                    alpha=0.2,
                    facecolor='r')
    ax.plot(x_space,
            vbis_posterior.pdf(x_space),
            lw=2,
            label='VBIS Posterior',
            c='g')
    ax.fill_between(x_space,
                    0,
                    vbis_posterior.pdf(x_space),
                    alpha=0.2,
                    facecolor='g')

    ax.set_title('VBIS Update')
    ax.legend()
    ax.set_xlim([0, 0.4])
    ax.set_ylim([0, 7])
    plt.show()
Exemple #6
0
    def vbis_update(self,
                    measurement,
                    likelihood,
                    prior,
                    init_mean=0,
                    init_var=1,
                    init_alpha=0.5,
                    init_xi=1,
                    num_samples=None,
                    use_LWIS=False):
        """VB update with importance sampling for Gaussian and Softmax.
        """
        if num_samples is None:
            num_samples = self.num_importance_samples

        if use_LWIS:
            q_mu = np.asarray(prior.means[0])
            log_c_hat = np.nan
        else:
            # Use VB update
            q_mu, var_VB, log_c_hat = self.vb_update(measurement, likelihood,
                                                     prior, init_mean,
                                                     init_var, init_alpha,
                                                     init_xi)

        q_var = np.asarray(prior.covariances[0])

        # Importance distribution
        q = GaussianMixture(1, q_mu, q_var)

        # Importance sampling correction
        w = np.zeros(num_samples)  # Importance weights
        x = q.rvs(size=num_samples)  # Sampled points
        x = np.asarray(x)
        if hasattr(likelihood, 'subclasses'):
            measurement_class = likelihood.subclasses[measurement]
        else:
            measurement_class = likelihood.classes[measurement]

        # Compute parameters using samples
        w = prior.pdf(x) * measurement_class.probability(state=x) / q.pdf(x)
        w /= np.sum(w)  # Normalize weights

        mu_hat = np.sum(x.T * w, axis=-1)

        # <>TODO: optimize this
        var_hat = np.zeros_like(np.asarray(q_var))
        for i in range(num_samples):
            x_i = np.asarray(x[i])
            var_hat = var_hat + w[i] * np.outer(x_i, x_i)
        var_hat -= np.outer(mu_hat, mu_hat)

        # Ensure properly formatted output
        if mu_hat.size == 1 and mu_hat.ndim > 0:
            mu_post_vbis = mu_hat[0]
        else:
            mu_post_vbis = mu_hat
        if var_hat.size == 1:
            var_post_vbis = var_hat[0][0]
        else:
            var_post_vbis = var_hat

        logging.debug(
            'VBIS update found mean of {} and variance of {}.'.format(
                mu_post_vbis, var_post_vbis))

        return mu_post_vbis, var_post_vbis, log_c_hat