Example #1
0
# respectively. These require a :class:`~.TransitionModel` and :class:`~.MeasurementModel` as
# before.
# To cope with sample sparsity we also include a resampler, in this instance
# :class:`~.SystematicResampler`, which is passed to the updater. It should be noted that there are
# many resampling schemes, and almost as many choices as to when to undertake resampling. The
# systematic resampler is described in [#]_, and in what follows below resampling is undertaken
# at each time-step.
from stonesoup.predictor.particle import ParticlePredictor

predictor = ParticlePredictor(transition_model)
from stonesoup.resampler.particle import SystematicResampler

resampler = SystematicResampler()
from stonesoup.updater.particle import ParticleUpdater

updater = ParticleUpdater(measurement_model, resampler)

# %%
# Initialise a prior
# ^^^^^^^^^^^^^^^^^^
# To start we create a prior estimate. This is a set of :class:`~.Particle` and we sample from
# Gaussian distribution (using the same parameters we had in the previous examples).

from scipy.stats import multivariate_normal

from stonesoup.types.particle import Particles
from stonesoup.types.numeric import Probability  # Similar to a float type
from stonesoup.types.state import ParticleState
from stonesoup.types.array import StateVectors

number_particles = 1000
Example #2
0
# will use a Particle Filter as this enables us to handle the non-linear nature of the imaging sensor. In this example
# we will use an inflated constant noise model to account for target motion uncertainty.
#
# Note that we don't add a measurement model to the updater, this is because each sensor adds their measurement model to
# each detection they generate. The tracker handles this internally by checking for a measurement model with each
# detection it receives and applying only the relevant measurement model.

target_transition_model = CombinedLinearGaussianTransitionModel(
    [ConstantVelocity(5), ConstantVelocity(5), ConstantVelocity(1)])

# First add a Particle Predictor
predictor = ParticlePredictor(target_transition_model)

# Now create a resampler and particle updater
resampler = SystematicResampler()
updater = ParticleUpdater(measurement_model=None,
                          resampler=resampler)

# Create a particle initiator
from stonesoup.initiator.simple import GaussianParticleInitiator, SinglePointInitiator
single_point_initiator = SinglePointInitiator(
    GaussianState([[0], [-40], [2000], [0], [8000], [0]], np.diag([10000, 1000, 10000, 1000, 10000, 1000])),
    None)

initiator = GaussianParticleInitiator(number_particles=500,
                                      initiator=single_point_initiator)

hypothesiser = DistanceHypothesiser(predictor, updater, measure=Mahalanobis(), missed_distance=np.inf)
data_associator = GNNWith2DAssignment(hypothesiser)

from stonesoup.deleter.time import UpdateTimeStepsDeleter
deleter = UpdateTimeStepsDeleter(time_steps_since_update=10)
samples = multivariate_normal.rvs(prediction.state_vector.ravel(),
                                  prediction.covar,
                                  size=number_particles)
particles = [
    Particle(sample.reshape(-1, 1), weight=Probability(1 / number_particles))
    for sample in samples
]
# Create prior particle state.
pred_samples = ParticleState(particles, timestamp=start_time)

from stonesoup.resampler.particle import SystematicResampler

resampler = SystematicResampler()
from stonesoup.updater.particle import ParticleUpdater

pupdater = ParticleUpdater(measurement_model, resampler)

predict_meas_samples = pupdater.predict_measurement(pred_samples)

# %%
# Don't worry what all this means for the moment. It's a convenient way of showing the 'true'
# distribution of the predicted measurement - which is rendered as a blue cloud. Note that
# no noise is added by the :meth:`~.UnscentedKalmanUpdater.predict_measurement` method so we add
# some noise below. This is additive Gaussian in the sensor coordinates.
from matplotlib import pyplot as plt

fig = plt.figure(figsize=(10, 6), tight_layout=True)
ax = fig.add_subplot(1, 1, 1, polar=True)
ax.set_ylim(0, 30)
ax.set_xlim(0, np.radians(180))
                                            mapping=(0, 2),
                                            noise_covar=np.diag(
                                                [np.radians(0.5), 1]))
measurement = Detection(measurement_model.function(truth.state, noise=True),
                        timestamp=truth.state.timestamp,
                        measurement_model=measurement_model)

transition_model = CombinedLinearGaussianTransitionModel(
    [ConstantVelocity(0.05), ConstantVelocity(0.05)])

p_predictor = ParticlePredictor(transition_model)
pfk_predictor = ParticleFlowKalmanPredictor(
    transition_model)  # By default, parallels EKF
predictors = [p_predictor, p_predictor, pfk_predictor]

p_updater = ParticleUpdater(measurement_model)
f_updater = GromovFlowParticleUpdater(measurement_model)
pfk_updater = GromovFlowKalmanParticleUpdater(
    measurement_model)  # By default, parallels EKF
updaters = [p_updater, f_updater, pfk_updater]

number_particles = 1000
samples = multivariate_normal.rvs(np.array([0, 1, 0, 1]),
                                  np.diag([1.5, 0.5, 1.5, 0.5]),
                                  size=number_particles)
# Note weights not used in particle flow, so value won't effect it.
weight = Probability(1 / number_particles)
particles = [
    Particle(sample.reshape(-1, 1), weight=weight) for sample in samples
]