Esempio n. 1
0
    def __call__(self, model, particle_weights, particle_locations):
        ## TODO: docstring.

        # Allocate new arrays to hold the weights and locations.
        new_weights = np.empty(particle_weights.shape)
        new_locs = np.empty(particle_locations.shape)

        # Loop over clusters, calling the secondary resampler for each.
        # The loop should include -1 if noise was found.
        for cluster_label, cluster_particles in clustering.particle_clusters(
                particle_locations,
                particle_weights,
                eps=self.eps,
                min_particles=self.min_particles,
                metric=self.metric,
                weighted=self.weighted,
                w_pow=self.w_pow,
                quiet=self.quiet):

            # If we are resampling the NOISE label, we must use the global moments.
            if cluster_label == clustering.NOISE:
                extra_args = {
                    "precomputed_mean":
                    particle_meanfn(particle_weights, particle_locations,
                                    lambda x: x),
                    "precomputed_cov":
                    particle_covariance_mtx(particle_weights,
                                            particle_locations)
                }
            else:
                extra_args = {}

            # Pass the particles in that cluster to the secondary resampler
            # and record the new weights and locations.
            cluster_ws, cluster_locs = self.secondary_resampler(
                model, particle_weights[cluster_particles],
                particle_locations[cluster_particles], **extra_args)

            # Renormalize the weights of each resampled particle by the total
            # weight of the cluster to which it belongs.
            cluster_ws /= np.sum(particle_weights[cluster_particles])

            # Store the updated cluster.
            new_weights[cluster_particles] = cluster_ws
            new_locs[cluster_particles] = cluster_locs

        # Assert that we have not introduced any NaNs or Infs by resampling.
        assert np.all(
            np.logical_not(
                np.logical_or(np.isnan(new_locs), np.isinf(new_locs))))

        return new_weights, new_locs
Esempio n. 2
0
    def __call__(self, model, particle_weights, particle_locations):
        ## TODO: docstring.
        
        # Allocate new arrays to hold the weights and locations.        
        new_weights = np.empty(particle_weights.shape)
        new_locs    = np.empty(particle_locations.shape)
        
        # Loop over clusters, calling the secondary resampler for each.
        # The loop should include -1 if noise was found.
        for cluster_label, cluster_particles in clustering.particle_clusters(
                particle_locations, particle_weights,
                eps=self.eps, min_particles=self.min_particles, metric=self.metric,
                weighted=self.weighted, w_pow=self.w_pow,
                quiet=self.quiet
        ):
        
            # If we are resampling the NOISE label, we must use the global moments.
            if cluster_label == clustering.NOISE:
                extra_args = {
                    "precomputed_mean": particle_meanfn(particle_weights, particle_locations, lambda x: x),
                    "precomputed_cov":  particle_covariance_mtx(particle_weights, particle_locations)
                }
            else:
                extra_args = {}
            
            # Pass the particles in that cluster to the secondary resampler
            # and record the new weights and locations.
            cluster_ws, cluster_locs = self.secondary_resampler(model,
                particle_weights[cluster_particles],
                particle_locations[cluster_particles],
                **extra_args
            )
            
            # Renormalize the weights of each resampled particle by the total
            # weight of the cluster to which it belongs.
            cluster_ws /= np.sum(particle_weights[cluster_particles])
            
            # Store the updated cluster.
            new_weights[cluster_particles] = cluster_ws
            new_locs[cluster_particles]    = cluster_locs

        # Assert that we have not introduced any NaNs or Infs by resampling.
        assert np.all(np.logical_not(np.logical_or(
                np.isnan(new_locs), np.isinf(new_locs)
            )))
            
        return new_weights, new_locs
Esempio n. 3
0
    def __call__(self, model, particle_weights, particle_locations, precomputed_mean=None, precomputed_cov=None):
        """
        Resample the particles according to algorithm given in 
        [LW01]_.
        """
        
        # Give shorter names to weights and locations.
        w, l = particle_weights, particle_locations
        
        # Possibly recompute moments, if not provided.
        if precomputed_mean is None:
            mean = particle_meanfn(w, l, lambda x: x)
        else:
            mean = precomputed_mean
        if precomputed_cov is None:
            cov = particle_covariance_mtx(w, l)
        else:
            cov = precomputed_cov
        
        # parameters in the Liu and West algorithm            
        a, h = self._a, self._h
        if la.norm(cov, 'fro') == 0:
            # The norm of the square root of S is literally zero, such that
            # the error estimated in the next step will not make sense.
            # We fix that by adding to the covariance a tiny bit of the
            # identity.
            warnings.warn(
                "Covariance has zero norm; adding in small covariance in "
                "resampler. Consider increasing n_particles to improve covariance "
                "estimates.",
                ResamplerWarning
            )
            cov = self._zero_cov_comp * np.eye(cov.shape[0])
        S, S_err = la.sqrtm(cov, disp=False)
        if not np.isfinite(S_err):
            raise ResamplerError(
                "Infinite error in computing the square root of the "
                "covariance matrix. Check that n_ess is not too small.")
    	S = np.real(h * S)
        n_ms, n_mp = l.shape
        
        new_locs = np.empty(l.shape)        
        cumsum_weights = np.cumsum(w)
        
        idxs_to_resample = np.arange(n_ms)
        
        # Preallocate js and mus so that we don't have rapid allocation and
        # deallocation.
        js = np.empty(idxs_to_resample.shape, dtype=int)
        mus = np.empty(l.shape, dtype=l.dtype)
        
        # Loop as long as there are any particles left to resample.
        n_iters = 0
            
        # Draw j with probability self.particle_weights[j].
        # We do this by drawing random variates uniformly on the interval
        # [0, 1], then see where they belong in the CDF.
        js[:] = cumsum_weights.searchsorted(
            np.random.random((idxs_to_resample.size,)),
            side='right'
        )
        
        while idxs_to_resample.size and n_iters < self._maxiter:
            # Keep track of how many iterations we used.
            n_iters += 1
            
            # Set mu_i to a x_j + (1 - a) mu.
            mus[...] = a * l[js,:] + (1 - a) * mean
            
            # Draw x_i from N(mu_i, S).
            new_locs[idxs_to_resample, :] = mus + np.dot(S, np.random.randn(n_mp, mus.shape[0])).T
            
            # Now we remove from the list any valid models.
            # We write it out in a longer form than is strictly necessary so
            # that we can validate assertions as we go. This is helpful for
            # catching models that may not hold to the expected postconditions.
            resample_locs = new_locs[idxs_to_resample, :]
            if self._postselect:
                valid_mask = model.are_models_valid(resample_locs)
            else:
                valid_mask = np.ones((resample_locs.shape[0],), dtype=bool)
            
            assert valid_mask.ndim == 1, "are_models_valid returned tensor, expected vector."
            
            n_invalid = np.sum(np.logical_not(valid_mask))
            
            if self._debug and n_invalid > 0:
                logger.debug(
                    "LW resampler found {} invalid particles; repeating.".format(
                        n_invalid
                    )
                )
            
            assert (
                (
                    len(valid_mask.shape) == 1
                    or len(valid_mask.shape) == 2 and valid_mask.shape[-1] == 1
                ) and valid_mask.shape[0] == resample_locs.shape[0]
            ), (
                "are_models_valid returned wrong shape {} "
                "for input of shape {}."
            ).format(valid_mask.shape, resample_locs.shape)
            
            idxs_to_resample = idxs_to_resample[np.nonzero(np.logical_not(
                valid_mask
            ))[0]]

            # This may look a little weird, but it should delete the unused
            # elements of js, so that we don't need to reallocate.
            js = js[np.logical_not(valid_mask)]
            mus = mus[:idxs_to_resample.size, :]
            
        if idxs_to_resample.size:
            # We failed to force all models to be valid within maxiter attempts.
            # This means that we could be propagating out invalid models, and
            # so we should warn about that.
            warnings.warn((
                "Liu-West resampling failed to find valid models for {} "
                "particles within {} iterations."
            ).format(idxs_to_resample.size, self._maxiter), ResamplerWarning)
            
        if self._debug:
            logger.debug("LW resampling completed in {} iterations.".format(n_iters))

        # Now we reset the weights to be uniform, letting the density of
        # particles represent the information that used to be stored in the
        # weights. This is done by SMCUpdater, and so we simply need to return
        # the new locations here.
        return np.ones((w.shape[0],)) / w.shape[0], new_locs
Esempio n. 4
0
    def __call__(self, model, particle_weights, particle_locations, precomputed_mean=None, precomputed_cov=None):
        """
        Resample the particles according to algorithm given in 
        [LW01]_.
        """
        
        # Give shorter names to weights and locations.
        w, l = particle_weights, particle_locations
        
        # Possibly recompute moments, if not provided.
        if precomputed_mean is None:
            mean = particle_meanfn(w, l, lambda x: x)
        else:
            mean = precomputed_mean
        if precomputed_cov is None:
            cov = particle_covariance_mtx(w, l)
        else:
            cov = precomputed_cov
        
        # parameters in the Liu and West algorithm            
        a, h = self._a, self._h
        S, S_err = la.sqrtm(cov, disp=False)
    	S = np.real(h * S)
        n_ms, n_mp = l.shape
        
        new_locs = np.empty(l.shape)        
        cumsum_weights = np.cumsum(w)
        
        idxs_to_resample = np.arange(n_ms)
        
        # Preallocate js and mus so that we don't have rapid allocation and
        # deallocation.
        js = np.empty(idxs_to_resample.shape, dtype=int)
        mus = np.empty(l.shape, dtype=l.dtype)
        
        # Loop as long as there are any particles left to resample.
        n_iters = 0
        while idxs_to_resample.size and n_iters < self._maxiter:
            # Keep track of how many iterations we used.
            n_iters += 1
            
            # Draw j with probability self.particle_weights[j].
            # We do this by drawing random variates uniformly on the interval
            # [0, 1], then see where they belong in the CDF.
            js[:] = cumsum_weights.searchsorted(
                np.random.random((idxs_to_resample.size,)),
                side='right'
            )
            
            # Set mu_i to a x_j + (1 - a) mu.
            mus[...] = a * l[js,:] + (1 - a) * mean
            
            # Draw x_i from N(mu_i, S).
            new_locs[idxs_to_resample, :] = mus + np.dot(S, np.random.randn(n_mp, mus.shape[0])).T
            
            # Now we remove from the list any valid models.
            idxs_to_resample = idxs_to_resample[np.nonzero(np.logical_not(
                model.are_models_valid(new_locs[idxs_to_resample, :])
            ))[0]]

            # This may look a little weird, but it should delete the unused
            # elements of js, so that we don't need to reallocate.
            js = js[:idxs_to_resample.size]
            mus = mus[:idxs_to_resample.size, :]
            
        if idxs_to_resample.size:
            # We failed to force all models to be valid within maxiter attempts.
            # This means that we could be propagating out invalid models, and
            # so we should warn about that.
            warnings.warn((
                "Liu-West resampling failed to find valid models for {} "
                "particles within {} iterations."
            ).format(idxs_to_resample.size, self._maxiter), ResamplerWarning)

        # Now we reset the weights to be uniform, letting the density of
        # particles represent the information that used to be stored in the
        # weights. This is done by SMCUpdater, and so we simply need to return
        # the new locations here.
        return np.ones((w.shape[0],)) / w.shape[0], new_locs
Esempio n. 5
0
    def __call__(self,
                 model,
                 particle_weights,
                 particle_locations,
                 precomputed_mean=None,
                 precomputed_cov=None):
        """
        Resample the particles according to algorithm given in 
        [LW01]_.
        """

        # Give shorter names to weights and locations.
        w, l = particle_weights, particle_locations

        # Possibly recompute moments, if not provided.
        if precomputed_mean is None:
            mean = particle_meanfn(w, l, lambda x: x)
        else:
            mean = precomputed_mean
        if precomputed_cov is None:
            cov = particle_covariance_mtx(w, l)
        else:
            cov = precomputed_cov

        # parameters in the Liu and West algorithm
        a, h = self._a, self._h
        if la.norm(cov, 'fro') == 0:
            # The norm of the square root of S is literally zero, such that
            # the error estimated in the next step will not make sense.
            # We fix that by adding to the covariance a tiny bit of the
            # identity.
            warnings.warn(
                "Covariance has zero norm; adding in small covariance in "
                "resampler. Consider increasing n_particles to improve covariance "
                "estimates.", ResamplerWarning)
            cov = self._zero_cov_comp * np.eye(cov.shape[0])
        S, S_err = la.sqrtm(cov, disp=False)
        if not np.isfinite(S_err):
            raise ResamplerError(
                "Infinite error in computing the square root of the "
                "covariance matrix. Check that n_ess is not too small.")
        S = np.real(h * S)
        n_ms, n_mp = l.shape

        new_locs = np.empty(l.shape)
        cumsum_weights = np.cumsum(w)

        idxs_to_resample = np.arange(n_ms)

        # Preallocate js and mus so that we don't have rapid allocation and
        # deallocation.
        js = np.empty(idxs_to_resample.shape, dtype=int)
        mus = np.empty(l.shape, dtype=l.dtype)

        # Loop as long as there are any particles left to resample.
        n_iters = 0

        # Draw j with probability self.particle_weights[j].
        # We do this by drawing random variates uniformly on the interval
        # [0, 1], then see where they belong in the CDF.
        js[:] = cumsum_weights.searchsorted(np.random.random(
            (idxs_to_resample.size, )),
                                            side='right')

        while idxs_to_resample.size and n_iters < self._maxiter:
            # Keep track of how many iterations we used.
            n_iters += 1

            # Set mu_i to a x_j + (1 - a) mu.
            mus[...] = a * l[js, :] + (1 - a) * mean

            # Draw x_i from N(mu_i, S).
            new_locs[idxs_to_resample, :] = mus + np.dot(
                S, self._kernel(n_mp, mus.shape[0])).T

            # Now we remove from the list any valid models.
            # We write it out in a longer form than is strictly necessary so
            # that we can validate assertions as we go. This is helpful for
            # catching models that may not hold to the expected postconditions.
            resample_locs = new_locs[idxs_to_resample, :]
            if self._postselect:
                valid_mask = model.are_models_valid(resample_locs)
            else:
                valid_mask = np.ones((resample_locs.shape[0], ), dtype=bool)

            assert valid_mask.ndim == 1, "are_models_valid returned tensor, expected vector."

            n_invalid = np.sum(np.logical_not(valid_mask))

            if self._debug and n_invalid > 0:
                logger.debug(
                    "LW resampler found {} invalid particles; repeating.".
                    format(n_invalid))

            assert ((len(valid_mask.shape) == 1 or len(valid_mask.shape) == 2
                     and valid_mask.shape[-1] == 1)
                    and valid_mask.shape[0] == resample_locs.shape[0]), (
                        "are_models_valid returned wrong shape {} "
                        "for input of shape {}.").format(
                            valid_mask.shape, resample_locs.shape)

            idxs_to_resample = idxs_to_resample[np.nonzero(
                np.logical_not(valid_mask))[0]]

            # This may look a little weird, but it should delete the unused
            # elements of js, so that we don't need to reallocate.
            js = js[np.logical_not(valid_mask)]
            mus = mus[:idxs_to_resample.size, :]

        if idxs_to_resample.size:
            # We failed to force all models to be valid within maxiter attempts.
            # This means that we could be propagating out invalid models, and
            # so we should warn about that.
            warnings.warn(
                ("Liu-West resampling failed to find valid models for {} "
                 "particles within {} iterations.").format(
                     idxs_to_resample.size, self._maxiter), ResamplerWarning)

        if self._debug:
            logger.debug(
                "LW resampling completed in {} iterations.".format(n_iters))

        # Now we reset the weights to be uniform, letting the density of
        # particles represent the information that used to be stored in the
        # weights. This is done by SMCUpdater, and so we simply need to return
        # the new locations here.
        return np.ones((w.shape[0], )) / w.shape[0], new_locs
Esempio n. 6
0
    def __call__(self,
                 model,
                 particle_weights,
                 particle_locations,
                 precomputed_mean=None,
                 precomputed_cov=None):
        """
        Resample the particles according to algorithm given in 
        [LW01]_.
        """

        # Give shorter names to weights and locations.
        w, l = particle_weights, particle_locations

        # Possibly recompute moments, if not provided.
        if precomputed_mean is None:
            mean = particle_meanfn(w, l, lambda x: x)
        else:
            mean = precomputed_mean
        if precomputed_cov is None:
            cov = particle_covariance_mtx(w, l)
        else:
            cov = precomputed_cov

        # parameters in the Liu and West algorithm
        a, h = self._a, self._h
        S, S_err = la.sqrtm(cov, disp=False)
        S = np.real(h * S)
        n_ms, n_mp = l.shape

        new_locs = np.empty(l.shape)
        cumsum_weights = np.cumsum(w)

        idxs_to_resample = np.arange(n_ms)

        # Preallocate js and mus so that we don't have rapid allocation and
        # deallocation.
        js = np.empty(idxs_to_resample.shape, dtype=int)
        mus = np.empty(l.shape, dtype=l.dtype)

        # Loop as long as there are any particles left to resample.
        n_iters = 0
        while idxs_to_resample.size and n_iters < self._maxiter:
            # Keep track of how many iterations we used.
            n_iters += 1

            # Draw j with probability self.particle_weights[j].
            # We do this by drawing random variates uniformly on the interval
            # [0, 1], then see where they belong in the CDF.
            js[:] = cumsum_weights.searchsorted(np.random.random(
                (idxs_to_resample.size, )),
                                                side='right')

            # Set mu_i to a x_j + (1 - a) mu.
            mus[...] = a * l[js, :] + (1 - a) * mean

            # Draw x_i from N(mu_i, S).
            new_locs[idxs_to_resample, :] = mus + np.dot(
                S, np.random.randn(n_mp, mus.shape[0])).T

            # Now we remove from the list any valid models.
            idxs_to_resample = idxs_to_resample[np.nonzero(
                np.logical_not(
                    model.are_models_valid(new_locs[idxs_to_resample, :])))[0]]

            # This may look a little weird, but it should delete the unused
            # elements of js, so that we don't need to reallocate.
            js = js[:idxs_to_resample.size]
            mus = mus[:idxs_to_resample.size, :]

        if idxs_to_resample.size:
            # We failed to force all models to be valid within maxiter attempts.
            # This means that we could be propagating out invalid models, and
            # so we should warn about that.
            warnings.warn(
                ("Liu-West resampling failed to find valid models for {} "
                 "particles within {} iterations.").format(
                     idxs_to_resample.size, self._maxiter), ResamplerWarning)

        # Now we reset the weights to be uniform, letting the density of
        # particles represent the information that used to be stored in the
        # weights. This is done by SMCUpdater, and so we simply need to return
        # the new locations here.
        return np.ones((w.shape[0], )) / w.shape[0], new_locs