Beispiel #1
0
 def test_mmparticles_trivial(self):
     init_array = np.zeros((3, 6))
     init_array += np.arange(3).reshape(3, 1)
     mmp = MMParticles(init_array)
     weights = np.array([0, 1, 0])
     mmp_resampled = resampling.multinomial(mmp, weights)
     for i in range(3):
         npt.assert_array_equal(mmp_resampled[i],
                                np.array([[0, 1, 1, 1, 1, 1, 1, 0]]))
Beispiel #2
0
 def test_mmparticles_repeated(self):
     init_array = np.zeros((10, 6))
     init_array += np.arange(10).reshape(10, 1)
     mmp = MMParticles(init_array)
     weights = np.arange(1, 11)
     weights = weights / weights.sum()
     repeated_resample = [
         resampling.multinomial(mmp, weights) for _ in range(10000)
     ]
     repeated_resample_arr = np.array(
         [p.particles for p in repeated_resample])[:, :, 0, 1]
     empirical_weights = np.array([(repeated_resample_arr == i).mean()
                                   for i in np.arange(10)])
     npt.assert_array_almost_equal(weights, empirical_weights, decimal=2)
Beispiel #3
0
def initiate_particles(
        graph: MultiDiGraph,
        first_observation: np.ndarray,
        n_samps: int,
        mm_model: MapMatchingModel = ExponentialMapMatchingModel(),
        d_refine: float = 1,
        d_truncate: float = None,
        ess_all: bool = True,
        filter_store: bool = True) -> MMParticles:
    """
    Initiate start of a trajectory by sampling points around the first observation.
    Note that coordinate system of inputs must be the same, typically a UTM projection (not longtitude-latitude!).
    :param graph: encodes road network, simplified and projected to UTM
    :param mm_model: MapMatchingModel
    :param first_observation: cartesian coordinate in UTM
    :param n_samps: number of samples to generate
    :param d_refine: metres, resolution of distance discretisation
    :param d_truncate: metres, distance beyond which to assume zero likelihood probability
        defaults to 5 * mm_model.gps_sd
    :param ess_all: if true initiate effective sample size for each particle for each observation
        otherwise initiate effective sample size only for each observation
    :param filter_store: whether to initiate storage of filter particles and weights
    :return: MMParticles object
    """
    gps_sd = mm_model.gps_sd

    if d_truncate is None:
        d_truncate = gps_sd * 5

    start = tm()

    # Discretize edges within truncation
    dis_points, dists_to_first_obs = edges.get_truncated_discrete_edges(
        graph, first_observation, d_refine, d_truncate, True)

    if dis_points.size == 0:
        raise ValueError(
            "No edges found near initial observation: try increasing the initial_truncation"
        )

    # Likelihood weights
    weights = np.exp(-0.5 / gps_sd**2 * dists_to_first_obs**2)
    weights /= np.sum(weights)

    # Sample indices according to weights
    sampled_indices = np.random.choice(len(weights),
                                       n_samps,
                                       replace=True,
                                       p=weights)

    # Output
    out_particles = MMParticles(dis_points[sampled_indices])

    # Initiate ESS
    if ess_all:
        out_particles.ess_stitch = np.ones(
            (1, out_particles.n)) * out_particles.n
    out_particles.ess_pf = np.array([out_particles.n])

    if filter_store:
        out_particles.filter_particles = [out_particles.copy()]
        out_particles.filter_weights = np.ones((1, n_samps)) / n_samps

    end = tm()
    out_particles.time += end - start

    return out_particles
Beispiel #4
0
def propose_particles(proposal_func: Callable,
                      resample_weights: Union[None, np.ndarray],
                      graph: MultiDiGraph,
                      particles: MMParticles,
                      new_observation: np.ndarray,
                      time_interval: float,
                      mm_model: MapMatchingModel,
                      full_smoothing: bool = True,
                      store_norm_quants: bool = False,
                      **kwargs) -> Tuple[MMParticles, np.ndarray, np.ndarray]:
    """
    Samples a single particle from the (distance discretised) optimal proposal.
    :param proposal_func: function to proposal single particle
    :param resample_weights: weights for resampling, None for no resampling
    :param graph: encodes road network, simplified and projected to UTM
    :param particles: all particles at last observation time
    :param new_observation: cartesian coordinate in UTM
    :param time_interval: time between last observation and newly received observation
    :param mm_model: MapMatchingModel
    :param full_smoothing: if True returns full trajectory
        otherwise returns only x_t-1 to x_t
    :param store_norm_quants: whether to additionally return quantities needed for gradient EM step
        assuming deviation prior is used
    :return: particle, unnormalised weight, prior_norm(_quants)
    """
    n_samps = particles.n
    out_particles = particles.copy()

    if resample_weights is not None:
        resample_inds = np.random.choice(n_samps,
                                         n_samps,
                                         replace=True,
                                         p=resample_weights)
        not_prop_inds = np.arange(
            n_samps)[~np.isin(np.arange(n_samps), resample_inds)]
    else:
        resample_inds = np.arange(n_samps)
        not_prop_inds = []

    weights = np.zeros(n_samps)
    prior_norms = np.zeros((n_samps, len(mm_model.distance_params) +
                            2)) if store_norm_quants else np.zeros(n_samps)
    for j in range(n_samps):
        in_particle = particles[resample_inds[j]]
        in_particle = in_particle.copy() if in_particle is not None else None
        out_particles[j], weights[j], prior_norms[
            resample_inds[j]] = proposal_func(
                graph,
                in_particle,
                new_observation,
                time_interval,
                mm_model,
                full_smoothing=full_smoothing,
                store_norm_quants=store_norm_quants,
                **kwargs)
    for k in not_prop_inds:
        if particles[k] is not None:
            prior_norms[k] = proposal_func(graph,
                                           particles[k],
                                           None,
                                           time_interval,
                                           mm_model,
                                           full_smoothing=False,
                                           store_norm_quants=store_norm_quants,
                                           only_norm_const=True,
                                           **kwargs)
        else:
            prior_norms[k] = np.zeros(len(mm_model.distance_params) +
                                      2) if store_norm_quants else 0

    return out_particles, weights, prior_norms
Beispiel #5
0
def update_particles_flbs(graph: MultiDiGraph,
                          particles: MMParticles,
                          new_observation: np.ndarray,
                          time_interval: float,
                          mm_model: MapMatchingModel,
                          proposal_func: Callable,
                          lag: int = 3,
                          max_rejections: int = 20,
                          ess_threshold: float = 1.,
                          **kwargs) -> MMParticles:
    """
    Joint fixed-lag update in light of a newly received observation, uses partial backward simulation runs for stitching
    Propose + reweight then backward simulation + fixed-lag stitching.
    :param graph: encodes road network, simplified and projected to UTM
    :param particles: unweighted particle approximation up to the previous observation time
    :param new_observation: cartesian coordinate in UTM
    :param time_interval: time between last observation and newly received observation
    :param mm_model: MapMatchingModel
    :param proposal_func: function to propagate and weight particles
    :param lag: fixed lag for resampling/stitching
    :param max_rejections: number of rejections to attempt before doing full fixed-lag stitching
        0 will do full fixed-lag stitching and track ess_stitch
    :param ess_threshold: in [0,1], particle filter resamples if ess < ess_threshold * n_samps
    :param kwargs:
        any additional arguments to be passed to proposal
        i.e. d_refine or d_max for optimal proposal
    :return: MMParticles object
    """
    start = tm()

    filter_particles = particles.filter_particles

    # Extract basic quantities
    n = particles.n
    observation_times = np.append(
        particles.observation_times,
        particles.observation_times[-1] + time_interval)
    m = len(observation_times) - 1
    stitching_required = m > lag

    # Initiate particle output
    out_particles = particles.copy()

    # Which particles to propose from (out_particles have been resampled, filter_particles haven't)
    previous_resample = particles.ess_pf[-1] < ess_threshold * n
    base_particles = out_particles if previous_resample else particles.filter_particles[
        -1].copy()

    latest_filter_particles, weights, temp_prior_norm = propose_particles(
        proposal_func,
        None,
        graph,
        base_particles,
        new_observation,
        time_interval,
        mm_model,
        full_smoothing=False,
        store_norm_quants=False,
        **kwargs)

    filter_particles[-1].prior_norm = temp_prior_norm

    # Update weights if not resampled
    if not previous_resample:
        weights *= particles.filter_weights[-1]

    # Normalise weights
    weights /= sum(weights)

    # Append new filter particles and weights, discard old ones
    start_point = 1 if stitching_required else 0
    filter_particles = particles.filter_particles[start_point:] + [
        latest_filter_particles
    ]
    out_particles.filter_weights = np.append(
        out_particles.filter_weights[start_point:],
        weights[np.newaxis],
        axis=0)

    # Store ESS
    out_particles.ess_pf = np.append(out_particles.ess_pf,
                                     1 / np.sum(weights**2))

    # Update time intervals
    out_particles.time_intervals = np.append(out_particles.time_intervals,
                                             time_interval)

    # Run backward simulation
    backward_particles = backward_simulate(
        graph,
        filter_particles,
        out_particles.filter_weights,
        out_particles.time_intervals[-lag:] if lag != 0 else [],
        mm_model,
        max_rejections,
        store_ess_back=False,
        store_norm_quants=True)
    backward_particles.prior_norm = backward_particles.dev_norm_quants[0]
    del backward_particles.dev_norm_quants

    if stitching_required:
        # Largest time not to be resampled
        max_fixed_time = observation_times[m - lag - 1]

        # Extract fixed particles
        fixed_particles = out_particles.copy()
        for j in range(n):
            if out_particles[j] is None:
                continue
            max_fixed_time_index = np.where(
                out_particles[j][:, 0] == max_fixed_time)[0][0]
            fixed_particles[j] = out_particles[j][:(max_fixed_time_index + 1)]

        # Stitch
        out_particles = fixed_lag_stitch_post_split(graph, fixed_particles,
                                                    backward_particles,
                                                    np.ones(n) / n, mm_model,
                                                    max_rejections)

    else:
        out_particles.particles = backward_particles.particles

    out_particles.filter_particles = filter_particles

    end = tm()
    out_particles.time += end - start

    return out_particles
Beispiel #6
0
def fixed_lag_stitching(graph: MultiDiGraph, mm_model: MapMatchingModel,
                        particles: MMParticles, weights: np.ndarray, lag: int,
                        max_rejections: int) -> MMParticles:
    """
    Split particles and resample (with stitching) coordinates after a certain time - defined by the lag parameter.
    :param graph: encodes road network, simplified and projected to UTM
    :param mm_model: MapMatchingModel
    :param particles: MMParticles object
    :param weights: shape = (n,) weights at latest observation time
    :param lag: fixed lag for resampling/stitching
        None indicates full multinomial resampling
    :param max_rejections: number of rejections to attempt before doing full fixed-lag stitching
        0 will do full fixed-lag stitching and track ess_stitch
    :return: MMParticles object
    """
    # Bool whether to store ESS stitch quantities
    full_fixed_lag_resample = max_rejections == 0

    # Check weights are normalised
    weights_sum = np.sum(weights)
    if weights_sum != 1:
        weights /= weights_sum

    # Extract basic quantities
    observation_times = particles.observation_times
    m = len(observation_times) - 1
    n = particles.n
    ess_pf = 1 / np.sum(weights**2)

    # Initiate output
    out_particles = particles.copy()

    # If not reached lag yet do standard resampling
    if lag is None or m <= lag:
        if full_fixed_lag_resample:
            out_particles.ess_stitch = np.append(particles.ess_stitch,
                                                 np.ones((1, n)) * ess_pf,
                                                 axis=0)
        return multinomial(out_particles, weights)

    # Largest time not to be resampled
    max_fixed_time = observation_times[m - lag - 1]

    # Pre-process a bit
    fixed_particles = out_particles.copy()
    new_particles = out_particles.copy()
    max_fixed_time_indices = [0] * n

    for j in range(n):
        if out_particles[j] is None:
            continue

        max_fixed_time_indices[j] = np.where(
            out_particles[j][:, 0] == max_fixed_time)[0][0]
        fixed_particles[j] = out_particles[j][:(max_fixed_time_indices[j] + 1)]
        new_particles[j] = out_particles[j][max_fixed_time_indices[j]:]

    new_particles.prior_norm = out_particles.prior_norm[m - lag - 1]

    # Stitch
    out_particles = fixed_lag_stitch_post_split(graph, fixed_particles,
                                                new_particles, weights,
                                                mm_model, max_rejections)

    return out_particles
Beispiel #7
0
def fixed_lag_stitch_post_split(graph: MultiDiGraph,
                                fixed_particles: MMParticles,
                                new_particles: MMParticles,
                                new_weights: np.ndarray,
                                mm_model: MapMatchingModel,
                                max_rejections: int) -> MMParticles:
    """
    Stitch together fixed_particles with samples from new_particles according to joint fixed-lag posterior
    :param graph: encodes road network, simplified and projected to UTM
    :param fixed_particles: trajectories before stitching time (won't be changed)
    :param new_particles: trajectories after stitching time (to be resampled)
        one observation time overlap with fixed_particles
    :param new_weights: weights applied to new_particles
    :param mm_model: MapMatchingModel
    :param max_rejections: number of rejections to attempt before doing full fixed-lag stitching
        0 will do full fixed-lag stitching and track ess_stitch
    :return: MMParticles object
    """

    n = len(fixed_particles)
    full_fixed_lag_resample = max_rejections == 0

    min_resample_time = new_particles.observation_times[1]
    min_resample_time_indices = [
        np.where(particle[:, 0] == min_resample_time)[0][0]
        if particle is not None else 0 for particle in new_particles
    ]
    originial_stitching_distances = np.array([
        new_particles[j][min_resample_time_indices[j],
                         -1] if new_particles[j] is not None else 0
        for j in range(n)
    ])

    max_fixed_time = fixed_particles._first_non_none_particle[-1, 0]

    stitch_time_interval = min_resample_time - max_fixed_time

    distance_prior_evals = mm_model.distance_prior_evaluate(
        originial_stitching_distances, stitch_time_interval)

    fixed_last_coords = np.array([
        part[0, 5:7] if part is not None else [0, 0] for part in new_particles
    ])
    new_coords = np.array([
        new_particles[j][min_resample_time_indices[j],
                         5:7] if new_particles[j] is not None else [0, 0]
        for j in range(n)
    ])
    deviation_prior_evals = mm_model.deviation_prior_evaluate(
        fixed_last_coords, new_coords, originial_stitching_distances)

    original_prior_evals = np.zeros(n)
    pos_inds = new_particles.prior_norm > 1e-5
    original_prior_evals[pos_inds] = distance_prior_evals[pos_inds] \
                                     * deviation_prior_evals[pos_inds] \
                                     * new_particles.prior_norm[pos_inds]

    out_particles = fixed_particles

    # Initiate some required quantities depending on whether to do rejection sampling or not
    if full_fixed_lag_resample:
        ess_stitch_track = np.zeros(n)

        # distance_prior_bound = None
        # adjusted_weights = None
    else:
        ess_stitch_track = None

        pos_prior_bound = mm_model.pos_distance_prior_bound(
            stitch_time_interval)
        prior_bound = mm_model.distance_prior_bound(stitch_time_interval)
        store_out_parts = fixed_particles.copy()

    adjusted_weights = new_weights.copy()
    adjusted_weights[original_prior_evals > 1e-5] /= original_prior_evals[
        original_prior_evals > 1e-5]
    adjusted_weights[original_prior_evals < 1e-5] = 0
    adjusted_weights /= np.sum(adjusted_weights)

    resort_to_full = False

    # Iterate through particles
    for j in range(n):
        fixed_particle = fixed_particles[j]

        # Check if particle is None
        # i.e. fixed lag approx has failed
        if fixed_particle is None:
            out_particles[j] = None
            if full_fixed_lag_resample:
                ess_stitch_track[j] = 0
            continue

        last_edge_fixed = fixed_particle[-1]
        last_edge_fixed_geom = get_geometry(graph, last_edge_fixed[1:4])
        last_edge_fixed_length = last_edge_fixed_geom.length

        if full_fixed_lag_resample:
            # Full resampling
            out_particles[j], ess_stitch_track[j] = full_fixed_lag_stitch(
                fixed_particle, last_edge_fixed, last_edge_fixed_length,
                new_particles, adjusted_weights, stitch_time_interval,
                min_resample_time_indices, mm_model, True)

        else:
            # Rejection sampling
            out_particles[j] = rejection_fixed_lag_stitch(
                fixed_particle,
                last_edge_fixed,
                last_edge_fixed_length,
                new_particles,
                adjusted_weights,
                stitch_time_interval,
                min_resample_time_indices,
                pos_prior_bound,
                mm_model,
                max_rejections,
                break_on_zero=True)
            if out_particles[j] is None:
                # Rejection sampling reached max_rejections -> try full resampling
                out_particles[j] = full_fixed_lag_stitch(
                    fixed_particle, last_edge_fixed, last_edge_fixed_length,
                    new_particles, adjusted_weights, stitch_time_interval,
                    min_resample_time_indices, mm_model, False)

            if isinstance(out_particles[j], int) and out_particles[j] == 0:
                resort_to_full = True
                break

    if resort_to_full:
        for j in range(n):
            fixed_particle = store_out_parts[j]

            # Check if particle is None
            # i.e. fixed lag approx has failed
            if fixed_particle is None:
                out_particles[j] = None
                if full_fixed_lag_resample:
                    ess_stitch_track[j] = 0
                continue

            last_edge_fixed = fixed_particle[-1]
            last_edge_fixed_geom = get_geometry(graph, last_edge_fixed[1:4])
            last_edge_fixed_length = last_edge_fixed_geom.length

            # Rejection sampling with full bound
            out_particles[j] = rejection_fixed_lag_stitch(
                fixed_particle, last_edge_fixed, last_edge_fixed_length,
                new_particles, adjusted_weights, stitch_time_interval,
                min_resample_time_indices, prior_bound, mm_model,
                max_rejections)
            if out_particles[j] is None:
                # Rejection sampling reached max_rejections -> try full resampling
                out_particles[j] = full_fixed_lag_stitch(
                    fixed_particle, last_edge_fixed, last_edge_fixed_length,
                    new_particles, adjusted_weights, stitch_time_interval,
                    min_resample_time_indices, mm_model, False)

    if full_fixed_lag_resample:
        out_particles.ess_stitch = np.append(out_particles.ess_stitch,
                                             np.atleast_2d(ess_stitch_track),
                                             axis=0)

    # Do full resampling where fixed lag approx broke
    none_inds = np.array([p is None for p in out_particles])
    good_inds = ~none_inds
    n_good = good_inds.sum()

    if n_good == 0:
        raise ValueError(
            "Map-matching failed: all stitching probabilities zero,"
            "try increasing the lag or number of particles")

    if n_good < n:
        none_inds_res_indices = np.random.choice(n,
                                                 n - n_good,
                                                 p=good_inds / n_good)
        for i, j in enumerate(np.where(none_inds)[0]):
            out_particles[j] = out_particles[none_inds_res_indices[i]]
        if full_fixed_lag_resample:
            out_particles.ess_stitch[-1,
                                     none_inds] = 1 / (new_weights**2).sum()

    return out_particles