Esempio n. 1
0
def get_all_possible_routes_overshoot(graph: MultiDiGraph,
                                      in_edge: np.ndarray,
                                      d_max: float,
                                      num_inter_cut_off: int = np.inf) -> list:
    in_edge_geom = get_geometry(graph, in_edge[-1, 1:4])
    in_edge_length = in_edge_geom.length
    extra_dist = (1 - in_edge[-1, 4]) * in_edge_length

    if extra_dist > d_max:
        return get_possible_routes(graph, in_edge, d_max, all_routes=True, num_inter_cut_off=num_inter_cut_off)

    all_possible_routes_overshoot = get_possible_routes_all_cached(graph, *in_edge[-1, 1:4],
                                                                   d_max, num_inter_cut_off)

    out_routes = []
    for i in range(len(all_possible_routes_overshoot)):
        temp_route = all_possible_routes_overshoot[i].copy()
        temp_route[:, -1] += extra_dist
        out_routes.append(temp_route)
    return out_routes
Esempio n. 2
0
def get_possible_routes(graph: MultiDiGraph,
                        in_route: np.ndarray,
                        dist: float,
                        all_routes: bool = False,
                        num_inter_cut_off: int = np.inf) -> list:
    """
    Given a route so far and maximum distance to travel, calculate and return all possible routes on graph.

    :param graph: encodes road network, simplified and projected to UTM
    :param in_route: shape = (_, 9)
        columns: t, u, v, k, alpha, x, y, n_inter, d
        t: float, time
        u: int, edge start node
        v: int, edge end node
        k: int, edge key
        alpha: in [0,1], position along edge
        x: float, metres, cartesian x coordinate
        y: float, metres, cartesian y coordinate
        d: metres, distance travelled
    :param dist: metres, maximum possible distance to travel
    :param all_routes: if true return all routes possible <= d
        otherwise return only routes of length d
    :param num_inter_cut_off: maximum number of intersections to cross in the time interval
    :return: list of arrays
        each array with shape = (_, 9) as in_route
        each array describes a possible route
    """
    # Extract final position from inputted route
    start_edge_and_position = in_route[-1]

    # Extract edge geometry
    start_edge_geom = get_geometry(graph, start_edge_and_position[1:4])
    start_edge_geom_length = start_edge_geom.length

    # Distance left on edge before intersection
    # Use NetworkX length rather than OSM length
    distance_left_on_edge = (1 - start_edge_and_position[4]) * start_edge_geom_length

    if distance_left_on_edge > dist:
        # Remain on edge
        # Propagate and return
        start_edge_and_position[4] += dist / start_edge_geom_length
        start_edge_and_position[-1] += dist
        return [in_route]

    # Reach intersection at end of edge
    # Propagate to intersection and recurse
    dist -= distance_left_on_edge
    start_edge_and_position[4] = 1.
    start_edge_and_position[-1] += distance_left_on_edge

    intersection_edges = get_out_edges(graph, start_edge_and_position[2]).copy()

    if intersection_edges.shape[1] == 0 or len(in_route) >= num_inter_cut_off:
        # Dead-end and one-way or exceeded max intersections
        if all_routes:
            return [in_route]
        else:
            return [None]

    if len(intersection_edges) == 1 and intersection_edges[0][1] == start_edge_and_position[1] \
            and intersection_edges[0][2] == start_edge_and_position[3]:
        # Dead-end and two-way -> Only option is u-turn
        if all_routes:
            return [in_route]
    else:
        new_routes = []
        for new_edge in intersection_edges:
            # If not u-turn or loop continue route search on new edge
            if (not (new_edge[1] == start_edge_and_position[1] and new_edge[2] == start_edge_and_position[3])) \
                    and not (new_edge == in_route[:, 1:4]).all(1).any():
                add_edge = np.array([[0, *new_edge, 0, 0, 0, start_edge_and_position[-1]]])
                new_route = np.append(in_route,
                                      add_edge,
                                      axis=0)

                new_routes += get_possible_routes(graph, new_route, dist, all_routes, num_inter_cut_off)
        if all_routes:
            return [in_route] + new_routes
        else:
            return new_routes
Esempio n. 3
0
def backward_simulate(graph: MultiDiGraph,
                      filter_particles: MMParticles,
                      filter_weights: np.ndarray,
                      time_interval_arr: np.ndarray,
                      mm_model: MapMatchingModel,
                      max_rejections: int,
                      verbose: bool = False,
                      store_ess_back: bool = None,
                      store_norm_quants: bool = False) -> MMParticles:
    """
    Given particle filter output, run backwards simulation to output smoothed trajectories
    :param graph: encodes road network, simplified and projected to UTM
    :param filter_particles: marginal outputs from particle filter
    :param filter_weights: weights
    :param time_interval_arr: times between observations, must be length one less than filter_particles
    :param mm_model: MapMatchingModel
    :param max_rejections: number of rejections to attempt before doing full fixed-lag stitching
        0 will do full backward simulation and track ess_back
    :param verbose: print ess_pf or ess_back
    :param store_ess_back: whether to store ess_back (if possible) in MMParticles object
    :param store_norm_quants: if True normalisation quantities returned in out_particles
    :return: MMParticles object
    """
    n_samps = filter_particles[-1].n
    num_obs = len(filter_particles)

    if len(time_interval_arr) + 1 != num_obs:
        raise ValueError(
            "time_interval_arr must be length one less than that of filter_particles"
        )

    full_sampling = max_rejections == 0
    if store_ess_back is None:
        store_ess_back = full_sampling

    # Multinomial resample end particles if weighted
    if np.all(filter_weights[-1] == filter_weights[-1][0]):
        out_particles = filter_particles[-1].copy()
    else:
        out_particles = multinomial(filter_particles[-1], filter_weights[-1])
    if full_sampling:
        ess_back = np.zeros((num_obs, n_samps))
        ess_back[0] = 1 / (filter_weights[-1]**2).sum()
    else:
        ess_back = None

    if num_obs < 2:
        return out_particles

    if store_norm_quants:
        norm_quants = np.zeros(
            (num_obs - 1, *filter_particles[0].prior_norm.shape))

    for i in range(num_obs - 2, -1, -1):
        next_time = filter_particles[i + 1].latest_observation_time

        if not full_sampling:
            pos_prior_bound = mm_model.pos_distance_prior_bound(
                time_interval_arr[i])
            prior_bound = mm_model.distance_prior_bound(time_interval_arr[i])
            store_out_parts = out_particles.copy()

        if filter_particles[i].prior_norm.ndim == 2:
            prior_norm = filter_particles[i].prior_norm[:, 0]
        else:
            prior_norm = filter_particles[i].prior_norm
        adjusted_weights = filter_weights[i].copy()
        good_inds = np.logical_and(adjusted_weights != 0, prior_norm != 0)
        adjusted_weights[good_inds] /= prior_norm[good_inds]
        adjusted_weights[~good_inds] = 0
        adjusted_weights /= adjusted_weights.sum()

        if store_norm_quants:
            sampled_inds = np.zeros(n_samps, dtype=int)

        resort_to_full = False
        for j in range(n_samps):
            fixed_particle = out_particles[j].copy()
            first_edge_fixed = fixed_particle[0]
            first_edge_fixed_geom = get_geometry(graph, first_edge_fixed[1:4])
            first_edge_fixed_length = first_edge_fixed_geom.length
            fixed_next_time_index = np.where(
                fixed_particle[:, 0] == next_time)[0][0]

            if full_sampling:
                back_output = full_backward_sample(
                    fixed_particle,
                    first_edge_fixed,
                    first_edge_fixed_length,
                    filter_particles[i],
                    adjusted_weights,
                    time_interval_arr[i],
                    fixed_next_time_index,
                    mm_model,
                    return_ess_back=True,
                    return_sampled_index=store_norm_quants)

                if store_norm_quants:
                    out_particles[j], ess_back[
                        i, j], sampled_inds[j] = back_output
                else:
                    out_particles[j], ess_back[i, j] = back_output

            else:
                back_output = rejection_backward_sample(
                    fixed_particle,
                    first_edge_fixed,
                    first_edge_fixed_length,
                    filter_particles[i],
                    adjusted_weights,
                    time_interval_arr[i],
                    fixed_next_time_index,
                    pos_prior_bound,
                    mm_model,
                    max_rejections,
                    return_sampled_index=store_norm_quants,
                    break_on_zero=True)

                first_back_output = back_output[
                    0] if store_norm_quants else back_output

                if first_back_output is None:
                    back_output = full_backward_sample(
                        fixed_particle,
                        first_edge_fixed,
                        first_edge_fixed_length,
                        filter_particles[i],
                        adjusted_weights,
                        time_interval_arr[i],
                        fixed_next_time_index,
                        mm_model,
                        return_ess_back=False,
                        return_sampled_index=store_norm_quants)

                if isinstance(first_back_output,
                              int) and first_back_output == 0:
                    resort_to_full = True
                    break

                if store_norm_quants:
                    out_particles[j], sampled_inds[j] = back_output
                else:
                    out_particles[j] = back_output

        if resort_to_full:
            if store_norm_quants:
                sampled_inds = np.zeros(n_samps, dtype=int)
            for j in range(n_samps):
                fixed_particle = store_out_parts[j]
                first_edge_fixed = fixed_particle[0]
                first_edge_fixed_geom = get_geometry(graph,
                                                     first_edge_fixed[1:4])
                first_edge_fixed_length = first_edge_fixed_geom.length
                fixed_next_time_index = np.where(
                    fixed_particle[:, 0] == next_time)[0][0]

                back_output = rejection_backward_sample(
                    fixed_particle,
                    first_edge_fixed,
                    first_edge_fixed_length,
                    filter_particles[i],
                    adjusted_weights,
                    time_interval_arr[i],
                    fixed_next_time_index,
                    prior_bound,
                    mm_model,
                    max_rejections,
                    return_sampled_index=store_norm_quants,
                    break_on_zero=False)

                first_back_output = back_output[
                    0] if store_norm_quants else back_output

                if first_back_output is None:
                    back_output = full_backward_sample(
                        fixed_particle,
                        first_edge_fixed,
                        first_edge_fixed_length,
                        filter_particles[i],
                        adjusted_weights,
                        time_interval_arr[i],
                        fixed_next_time_index,
                        mm_model,
                        return_ess_back=False,
                        return_sampled_index=store_norm_quants)

                if store_norm_quants:
                    out_particles[j], sampled_inds[j] = back_output
                else:
                    out_particles[j] = back_output

        if store_norm_quants:
            norm_quants[i] = filter_particles[i].prior_norm[sampled_inds]

        none_inds = np.array([p is None or None in p for p in out_particles])
        good_inds = ~none_inds
        n_good = good_inds.sum()
        if n_good < n_samps:
            none_inds_res_indices = np.random.choice(n_samps,
                                                     n_samps - n_good,
                                                     p=good_inds / n_good)
            for i_none, j_none in enumerate(np.where(none_inds)[0]):
                out_particles[j_none] = out_particles[
                    none_inds_res_indices[i_none]].copy()
                if store_norm_quants:
                    norm_quants[:, j_none] = norm_quants[:,
                                                         none_inds_res_indices[
                                                             i_none]]
            if store_ess_back:
                out_particles.ess_back[i, none_inds] = n_samps

        if verbose:
            if full_sampling:
                print(
                    str(filter_particles[i].latest_observation_time) +
                    " Av Backward ESS: " + str(np.mean(ess_back[i])))
            else:
                print(str(filter_particles[i].latest_observation_time))

        if store_ess_back:
            out_particles.ess_back = ess_back

    if store_norm_quants:
        out_particles.dev_norm_quants = norm_quants

    return out_particles
Esempio n. 4
0
def fixed_lag_stitch_post_split(graph: MultiDiGraph,
                                fixed_particles: MMParticles,
                                new_particles: MMParticles,
                                new_weights: np.ndarray,
                                mm_model: MapMatchingModel,
                                max_rejections: int) -> MMParticles:
    """
    Stitch together fixed_particles with samples from new_particles according to joint fixed-lag posterior
    :param graph: encodes road network, simplified and projected to UTM
    :param fixed_particles: trajectories before stitching time (won't be changed)
    :param new_particles: trajectories after stitching time (to be resampled)
        one observation time overlap with fixed_particles
    :param new_weights: weights applied to new_particles
    :param mm_model: MapMatchingModel
    :param max_rejections: number of rejections to attempt before doing full fixed-lag stitching
        0 will do full fixed-lag stitching and track ess_stitch
    :return: MMParticles object
    """

    n = len(fixed_particles)
    full_fixed_lag_resample = max_rejections == 0

    min_resample_time = new_particles.observation_times[1]
    min_resample_time_indices = [
        np.where(particle[:, 0] == min_resample_time)[0][0]
        if particle is not None else 0 for particle in new_particles
    ]
    originial_stitching_distances = np.array([
        new_particles[j][min_resample_time_indices[j],
                         -1] if new_particles[j] is not None else 0
        for j in range(n)
    ])

    max_fixed_time = fixed_particles._first_non_none_particle[-1, 0]

    stitch_time_interval = min_resample_time - max_fixed_time

    distance_prior_evals = mm_model.distance_prior_evaluate(
        originial_stitching_distances, stitch_time_interval)

    fixed_last_coords = np.array([
        part[0, 5:7] if part is not None else [0, 0] for part in new_particles
    ])
    new_coords = np.array([
        new_particles[j][min_resample_time_indices[j],
                         5:7] if new_particles[j] is not None else [0, 0]
        for j in range(n)
    ])
    deviation_prior_evals = mm_model.deviation_prior_evaluate(
        fixed_last_coords, new_coords, originial_stitching_distances)

    original_prior_evals = np.zeros(n)
    pos_inds = new_particles.prior_norm > 1e-5
    original_prior_evals[pos_inds] = distance_prior_evals[pos_inds] \
                                     * deviation_prior_evals[pos_inds] \
                                     * new_particles.prior_norm[pos_inds]

    out_particles = fixed_particles

    # Initiate some required quantities depending on whether to do rejection sampling or not
    if full_fixed_lag_resample:
        ess_stitch_track = np.zeros(n)

        # distance_prior_bound = None
        # adjusted_weights = None
    else:
        ess_stitch_track = None

        pos_prior_bound = mm_model.pos_distance_prior_bound(
            stitch_time_interval)
        prior_bound = mm_model.distance_prior_bound(stitch_time_interval)
        store_out_parts = fixed_particles.copy()

    adjusted_weights = new_weights.copy()
    adjusted_weights[original_prior_evals > 1e-5] /= original_prior_evals[
        original_prior_evals > 1e-5]
    adjusted_weights[original_prior_evals < 1e-5] = 0
    adjusted_weights /= np.sum(adjusted_weights)

    resort_to_full = False

    # Iterate through particles
    for j in range(n):
        fixed_particle = fixed_particles[j]

        # Check if particle is None
        # i.e. fixed lag approx has failed
        if fixed_particle is None:
            out_particles[j] = None
            if full_fixed_lag_resample:
                ess_stitch_track[j] = 0
            continue

        last_edge_fixed = fixed_particle[-1]
        last_edge_fixed_geom = get_geometry(graph, last_edge_fixed[1:4])
        last_edge_fixed_length = last_edge_fixed_geom.length

        if full_fixed_lag_resample:
            # Full resampling
            out_particles[j], ess_stitch_track[j] = full_fixed_lag_stitch(
                fixed_particle, last_edge_fixed, last_edge_fixed_length,
                new_particles, adjusted_weights, stitch_time_interval,
                min_resample_time_indices, mm_model, True)

        else:
            # Rejection sampling
            out_particles[j] = rejection_fixed_lag_stitch(
                fixed_particle,
                last_edge_fixed,
                last_edge_fixed_length,
                new_particles,
                adjusted_weights,
                stitch_time_interval,
                min_resample_time_indices,
                pos_prior_bound,
                mm_model,
                max_rejections,
                break_on_zero=True)
            if out_particles[j] is None:
                # Rejection sampling reached max_rejections -> try full resampling
                out_particles[j] = full_fixed_lag_stitch(
                    fixed_particle, last_edge_fixed, last_edge_fixed_length,
                    new_particles, adjusted_weights, stitch_time_interval,
                    min_resample_time_indices, mm_model, False)

            if isinstance(out_particles[j], int) and out_particles[j] == 0:
                resort_to_full = True
                break

    if resort_to_full:
        for j in range(n):
            fixed_particle = store_out_parts[j]

            # Check if particle is None
            # i.e. fixed lag approx has failed
            if fixed_particle is None:
                out_particles[j] = None
                if full_fixed_lag_resample:
                    ess_stitch_track[j] = 0
                continue

            last_edge_fixed = fixed_particle[-1]
            last_edge_fixed_geom = get_geometry(graph, last_edge_fixed[1:4])
            last_edge_fixed_length = last_edge_fixed_geom.length

            # Rejection sampling with full bound
            out_particles[j] = rejection_fixed_lag_stitch(
                fixed_particle, last_edge_fixed, last_edge_fixed_length,
                new_particles, adjusted_weights, stitch_time_interval,
                min_resample_time_indices, prior_bound, mm_model,
                max_rejections)
            if out_particles[j] is None:
                # Rejection sampling reached max_rejections -> try full resampling
                out_particles[j] = full_fixed_lag_stitch(
                    fixed_particle, last_edge_fixed, last_edge_fixed_length,
                    new_particles, adjusted_weights, stitch_time_interval,
                    min_resample_time_indices, mm_model, False)

    if full_fixed_lag_resample:
        out_particles.ess_stitch = np.append(out_particles.ess_stitch,
                                             np.atleast_2d(ess_stitch_track),
                                             axis=0)

    # Do full resampling where fixed lag approx broke
    none_inds = np.array([p is None for p in out_particles])
    good_inds = ~none_inds
    n_good = good_inds.sum()

    if n_good == 0:
        raise ValueError(
            "Map-matching failed: all stitching probabilities zero,"
            "try increasing the lag or number of particles")

    if n_good < n:
        none_inds_res_indices = np.random.choice(n,
                                                 n - n_good,
                                                 p=good_inds / n_good)
        for i, j in enumerate(np.where(none_inds)[0]):
            out_particles[j] = out_particles[none_inds_res_indices[i]]
        if full_fixed_lag_resample:
            out_particles.ess_stitch[-1,
                                     none_inds] = 1 / (new_weights**2).sum()

    return out_particles
Esempio n. 5
0
def sample_route(
        graph: MultiDiGraph,
        timestamps: Union[float, np.ndarray],
        num_obs: int = None,
        mm_model: MapMatchingModel = ExponentialMapMatchingModel(),
        d_refine: float = 1.,
        start_position: np.ndarray = None,
        num_inter_cut_off: int = None) -> Tuple[np.ndarray, np.ndarray]:
    """
    Runs offline map-matching. I.e. receives a full polyline and returns an equal probability collection
    of trajectories.
    Forward-filtering backward-simulation implementation - no fixed-lag approximation needed for offline inference.

    :param graph: encodes road network, simplified and projected to UTM
    :param timestamps: seconds
        either float if all times between observations are the same, or a series of timestamps in seconds/UNIX timestamp
    :param num_obs: int length of observed polyline to generate
    :param mm_model: MapMatchingModel
    :param d_refine: metres, resolution of distance discretisation
    :param start_position: optional start position; array (u, v, k, alpha)
    :param num_inter_cut_off: maximum number of intersections to cross in the time interval
    :return: tuple with sampled route (array with same shape as a single MMParticles)
        and polyline (array with shape (num_obs, 2))
    """

    if isinstance(timestamps, np.ndarray):
        num_obs = len(timestamps) + 1

    time_interval_arr = get_time_interval_array(timestamps, num_obs)

    if start_position is None:
        start_position = random_positions(graph, 1)[0]

    start_geom = edges.get_geometry(graph, start_position)
    start_coords = edges.edge_interpolate(start_geom, start_position[-1])

    full_sampled_route = np.concatenate([[0.], start_position, start_coords,
                                         [0.]])[np.newaxis]

    for k in range(num_obs - 1):
        time_interval = time_interval_arr[k]
        d_max = mm_model.d_max(time_interval)

        num_inter_cut_off_i = max(
            int(time_interval /
                1.5), 10) if num_inter_cut_off is None else num_inter_cut_off

        prev_pos = full_sampled_route[-1:].copy()
        prev_pos[0, 0] = 0.
        prev_pos[0, -1] = 0.

        possible_routes = proposal.get_all_possible_routes_overshoot(
            graph, prev_pos, d_max, num_inter_cut_off=num_inter_cut_off_i)

        # Get all possible positions on each route
        discretised_routes_indices_list = []
        discretised_routes_list = []
        for i, route in enumerate(possible_routes):
            # All possible end positions of route
            discretised_edge_matrix = edges.discretise_edge(
                graph, route[-1, 1:4], d_refine)

            if route.shape[0] == 1:
                discretised_edge_matrix = discretised_edge_matrix[
                    discretised_edge_matrix[:, 0] >= full_sampled_route[-1, 4]]
                discretised_edge_matrix[:, -1] -= discretised_edge_matrix[-1,
                                                                          -1]
            else:
                discretised_edge_matrix[:, -1] += route[-2, -1]

            discretised_edge_matrix = discretised_edge_matrix[
                discretised_edge_matrix[:, -1] < d_max + 1e-5]

            # Track route index and append to list
            if discretised_edge_matrix is not None and len(
                    discretised_edge_matrix) > 0:
                discretised_routes_indices_list += [
                    np.ones(discretised_edge_matrix.shape[0], dtype=int) * i
                ]
                discretised_routes_list += [discretised_edge_matrix]

        # Concatenate into numpy.ndarray
        discretised_routes_indices = np.concatenate(
            discretised_routes_indices_list)
        discretised_routes = np.concatenate(discretised_routes_list)

        if len(discretised_routes) == 0 or (len(discretised_routes) == 1 and
                                            discretised_routes[0][-1] == 0):
            warnings.warn('sample_route exited prematurely')
            break

        # Distance prior evals
        distances = discretised_routes[:, -1]
        distance_prior_evals = mm_model.distance_prior_evaluate(
            distances, time_interval)

        # Deviation prior evals
        deviation_prior_evals = mm_model.deviation_prior_evaluate(
            full_sampled_route[-1, 5:7], discretised_routes[:, 1:3],
            discretised_routes[:, -1])

        # Normalise prior/transition probabilities
        prior_probs = distance_prior_evals * deviation_prior_evals
        prior_probs_norm_const = prior_probs.sum()

        sampled_dis_route_index = np.random.choice(len(prior_probs),
                                                   1,
                                                   p=prior_probs /
                                                   prior_probs_norm_const)[0]
        sampled_dis_route = discretised_routes[sampled_dis_route_index]

        # Append sampled route to old particle
        sampled_route = possible_routes[
            discretised_routes_indices[sampled_dis_route_index]]

        full_sampled_route = proposal.process_proposal_output(
            full_sampled_route, sampled_route, sampled_dis_route,
            time_interval, True)

    obs_indices = edges.observation_time_indices(full_sampled_route[:, 0])
    polyline = full_sampled_route[obs_indices, 5:7] \
               + mm_model.gps_sd * np.random.normal(size=(obs_indices.sum(), 2))

    return full_sampled_route, polyline