Пример #1
0
    def _twofilter_smoothing_ON(self, t, ti, info, phi, lwinfo, return_ess,
                                modif_forward, modif_info):
        """O(N) version of two-filter smoothing.

        This method should not be called directly, see twofilter_smoothing.
        """
        if modif_info is not None:
            lwinfo += modif_info
        Winfo = rs.exp_and_normalise(lwinfo)
        I = rs.multinomial(Winfo)
        if modif_forward is not None:
            lw = self.wgts[t].lw + modif_forward
            W = rs.exp_and_normalise(lw)
        else:
            W = self.wgts[t].W
        J = rs.multinomial(W)
        log_omega = self.fk.logpt(t + 1, self.X[t][J], info.hist.X[ti][I])
        if modif_forward is not None:
            log_omega -= modif_forward[J]
        if modif_info is not None:
            log_omega -= modif_info[I]
        Om = rs.exp_and_normalise(log_omega)
        est = np.average(phi(self.X[t][J], info.hist.X[ti][I]),
                         axis=0,
                         weights=Om)
        if return_ess:
            return (est, 1. / np.sum(Om**2))
        else:
            return est
Пример #2
0
    def backward(self):
        """Backward recursion. 

        Upon completion, the following list of length T is available: 
        * smth: marginal smoothing probabilities

        Note
        ----
        Performs the forward step in case it has not been performed before. 
        """
        if not self.filt:
            self.forward()
        self.smth = [self.filt[-1]]
        log_trans = np.log(self.hmm.trans_mat)
        ctg = np.zeros(
            self.hmm.dim)  # cost to go (log-lik of y_{t+1:T} given x_t=k)
        for filt, next_ft in reversed(list(zip(self.filt[:-1],
                                               self.logft[1:]))):
            new_ctg = np.empty(self.hmm.dim)
            for k in range(self.hmm.dim):
                new_ctg[k] = rs.log_sum_exp(log_trans[k, :] + next_ft + ctg)
            ctg = new_ctg
            smth = rs.exp_and_normalise(np.log(filt) + ctg)
            self.smth.append(smth)
        self.smth.reverse()
Пример #3
0
    def backward_sampling_qmc(self, M):
        """QMC version of backward sampling.

        Parameters
        ----------
        M : int
            number of trajectories

        Note
        ----
        Use this only on the history of a SQMC algorithm.
        """
        self._check_h_orders()
        u = qmc.sobol(M, self.T)
        # the final particles have not been sorted
        hT = hilbert.hilbert_sort(self.X[-1])
        # searchsorted to avoid having to sort in place u according to u[:,T-1]
        idx = np.searchsorted(np.cumsum(self.wgts[-1].W[hT]), u[:, -1])
        paths = [
            self.X[-1][hT][idx],
        ]
        for t in reversed(range(self.T - 1)):
            idx = np.empty(M, 'int')
            for m, xn in enumerate(paths[-1]):
                lwm = self.wgts[t].lw + self.fk.logpt(t + 1, self.X[t], xn)
                # use ordered version here
                cw = np.cumsum(rs.exp_and_normalise(lwm[self.h_orders[t]]))
                idx[m] = np.searchsorted(cw, u[m, t])
            paths.append(self.X[t][self.h_orders[t]][idx])
        paths.reverse()
        return paths
Пример #4
0
 def update(self, smc):
     prev_Phi = self.Phi.copy()
     for n in range(smc.N):
         lwXn = (self.prev_logw 
                 + smc.fk.logpt(smc.t, self.prev_X, smc.X[n]))
         WXn = rs.exp_and_normalise(lwXn)
         self.Phi[n] = np.average(
             prev_Phi + smc.fk.add_func(smc.t, self.prev_X, smc.X[n]),
             axis=0, weights=WXn)
Пример #5
0
 def update_path_sampling_est(self, x, delta):
     grid_size = 10
     binwidth = delta / (grid_size - 1)
     new_ps_est = x.path_sampling[-1]
     for i, e in enumerate(np.linspace(0., delta, grid_size)):
         mult = 0.5 if i==0 or i==grid_size-1 else 1.
         new_ps_est += (mult * binwidth * 
                        np.average(x.llik, 
                                   weights=rs.exp_and_normalise(e * x.llik)))
     x.path_sampling.append(new_ps_est)
Пример #6
0
    def _backward_sampling_ON2(self, M, idx):
        """O(N^2) version of backward sampling.

        not meant to be called directly, see backward_sampling
        """
        for m in range(M):
            for t in reversed(range(self.T - 1)):
                lwm = (self.wgts[t].lw + self.fk.logpt(
                    t + 1, self.X[t], self.X[t + 1][idx[t + 1, m]]))
                idx[t, m] = rs.multinomial_once(rs.exp_and_normalise(lwm))
Пример #7
0
def chi_squared_distance(old_lw: np.ndarray, new_lw: np.ndarray) -> float:
    # tested
    """Estimates the chi-squared distance between two distributions from their discrete representations on the same support.
    """
    assert old_lw.shape == new_lw.shape
    old_W = rs.exp_and_normalise(old_lw)
    with np.errstate(invalid='ignore'):
        logG = new_lw - old_lw
        logG[np.isnan(logG)] = 0
    if np.max(logG) == -np.inf:
        return np.inf
    G_tilde = np.exp(logG - np.max(logG))
    return np.sum(old_W * G_tilde**2) / np.sum(old_W * G_tilde)**2 - 1
Пример #8
0
    def sample(self, N=1):
        """Sample N trajectories from the posterior. 
        
        Note
        ----
        Performs the forward step in case it has not been performed. 

        """
        if not self.filt:
            self.forward()
        paths = np.empty((len(self.filt), N), np.int)
        paths[-1, :] = rs.multinomial(self.filt[-1], M=N)
        log_trans = np.log(self.hmm.trans_mat)
        for t, f in reversed(list(enumerate(self.filt[:-1]))):
            for n in range(N):
                probs = rs.exp_and_normalise(log_trans[:, paths[t + 1, n]] + np.log(f))
                paths[t, n] = rs.multinomial_once(probs)
        return paths
Пример #9
0
def tv_distance(x, y):
    """ TV distance between two discrete distributions. 

    x, y: the weights
    """
    return 0.5 * sum(abs(x - y))


results = {key: np.zeros((ntrials, len(taus))) for key in rs_schemes}

for i in range(ntrials):
    x = stats.norm.rvs(size=N)
    for j, tau in enumerate(taus):
        lw = -.5 * tau * (bias - x)**2
        W = rs.exp_and_normalise(lw)
        for scheme in rs_schemes:
            A = rs.resampling(scheme, W)
            counts = np.bincount(A, minlength=N)
            # counts start at 0
            results[scheme][i, j] = tv_distance(W, counts / N)

# PLOTS
# =====
savefigs = True
plt.style.use('ggplot')
sb.set_palette(sb.dark_palette("lightgray", n_colors=4, reverse=True))

# Actual figure
plt.figure()
for k, scheme in enumerate(rs_schemes):