Пример #1
0
    def backward_sampling_qmc(self, M):
        """QMC version of backward sampling.

        Parameters
        ----------
        M : int
            number of trajectories

        Note
        ----
        Use this only on the history of a SQMC algorithm.
        """
        self._check_h_orders()
        u = qmc.sobol(M, self.T)
        # the final particles have not been sorted
        hT = hilbert.hilbert_sort(self.X[-1])
        # searchsorted to avoid having to sort in place u according to u[:,T-1]
        idx = np.searchsorted(np.cumsum(self.wgts[-1].W[hT]), u[:, -1])
        paths = [
            self.X[-1][hT][idx],
        ]
        for t in reversed(range(self.T - 1)):
            idx = np.empty(M, 'int')
            for m, xn in enumerate(paths[-1]):
                lwm = self.wgts[t].lw + self.fk.logpt(t + 1, self.X[t], xn)
                # use ordered version here
                cw = np.cumsum(rs.exp_and_normalise(lwm[self.h_orders[t]]))
                idx[m] = np.searchsorted(cw, u[m, t])
            paths.append(self.X[t][self.h_orders[t]][idx])
        paths.reverse()
        return paths
Пример #2
0
 def generate_particles(self):
     if self.qmc:
         u = qmc.sobol(self.N, self.fk.du).squeeze()
         # squeeze: must be (N,) if du=1
         self.X = self.fk.Gamma0(u)
     else:
         self.X = self.fk.M0(self.N)
Пример #3
0
 def resample_move_qmc(self):
     self.rs_flag = True  # we *always* resample in SQMC
     u = qmc.sobol(self.N, self.fk.du + 1)
     tau = np.argsort(u[:, 0])
     self.h_order = hilbert.hilbert_sort(self.X)
     self.A = self.h_order[rs.inverse_cdf(u[tau, 0],
                                          self.aux.W[self.h_order])]
     self.Xp = self.X[self.A]
     v = u[tau, 1:].squeeze()
     # v is (N,) if du=1, (N,d) otherwise
     self.X = self.fk.Gamma(self.t, self.Xp, v)
     self.reset_weights()
Пример #4
0
    def rsample(self, sample_shape=torch.Size()):
        shape = self._extended_shape(sample_shape)

        n_samples = int(torch.prod(torch.tensor(shape)))
        if n_samples == 1:
            print("Warning: RQMC sample size should be greater than 1.")
        rand = qmc.sobol(N=n_samples, dim=self.dim, scrambled=1)  # scrambled=0
        #rand = rqmc_py.random_sequence_rqmc(size_mv=self.dim, i=0, n=n_samples)
        # rand = qmc_py.sobol_sequence(N=n_samples, DIMEN=self.dim, IFLAG=1,
        #                              iSEED=np.random.randint(10**5))  # .transpose()
        if self.dim == 1:
            rand = FloatTensor(rand).reshape(shape)
        else:
            rand = FloatTensor(rand).reshape(shape + torch.Size([self.dim]))

        return self.low + rand * (self.high - self.low)