예제 #1
0
PI33 = 0.95  # CV_high -> CV_high


p10 = 0.8  # initvalue for mode probabilities (starter ikke så ofte med en sving,men vet ikke når det dukker opp)
p11 = 0.1
p12 = 0.1

PI = np.array([[PI11, PI12, PI13], [PI21, PI22, PI23], [PI31, PI32, PI33]])
assert np.allclose(np.sum(PI, axis=1), 1), "rows of PI must sum to 1"

mean_init = Xgt[0]
mean_init = np.append(mean_init, 0.1)
cov_init = np.diag([sigma_z*10, sigma_z*10, 0.1, 0.1, 0.1]) ** 2
mode_probabilities_init = np.array([p10, p11, p12])
mode_states_init = GaussParams(mean_init, cov_init)
init_imm_state = MixtureParameters(mode_probabilities_init, [mode_states_init] * 3)

assert np.allclose(
    np.sum(mode_probabilities_init), 1
), "initial mode probabilities must sum to 1"

# make model
measurement_model = measurementmodels.CartesianPosition(sigma_z, state_dim=5)
dynamic_models: List[dynamicmodels.DynamicModel] = []
dynamic_models.append(dynamicmodels.WhitenoiseAccelleration(sigma_a_CV, n=5))
dynamic_models.append(dynamicmodels.ConstantTurnrate(sigma_a_CT, sigma_omega))
dynamic_models.append(dynamicmodels.WhitenoiseAccelleration(sigma_a_CV_high, n=5))
ekf_filters = []
ekf_filters.append(ekf.EKF(dynamic_models[0], measurement_model))
ekf_filters.append(ekf.EKF(dynamic_models[1], measurement_model))
ekf_filters.append(ekf.EKF(dynamic_models[2], measurement_model))
예제 #2
0
# make model
measurement_model = measurementmodels.CartesianPosition(sigma_z, state_dim=5)
CV = dynamicmodels.WhitenoiseAccelleration(sigma_a_CV, n=5)
CT = dynamicmodels.ConstantTurnrate(sigma_a_CT, sigma_omega)
ekf_filters: List[StateEstimator[GaussParams]] = []
ekf_filters.append(ekf.EKF(CV, measurement_model))
ekf_filters.append(ekf.EKF(CT, measurement_model))
imm_filter: imm.IMM[GaussParams] = imm.IMM(ekf_filters, PI)

init_weights = np.array([0.5] * 2)
init_mean = [0] * 5
init_cov = np.diag(
    [1] * 5
)  # HAVE TO BE DIFFERENT: use intuition, eg. diag guessed distance to true values squared.
init_mode_states = [GaussParams(init_mean, init_cov)] * 2  # copy of the two modes
init_immstate = MixtureParameters(init_weights, init_mode_states)

imm_preds = []
imm_upds = []
imm_ests = []
updated_immstate = init_immstate
for zk in Z:
    predicted_immstate = imm_filter.predict(updated_immstate, Ts)
    updated_immstate = imm_filter.update(zk, predicted_immstate)
    estimate = imm_filter.estimate(updated_immstate)

    imm_preds.append(predicted_immstate)
    imm_upds.append(updated_immstate)
    imm_ests.append(estimate)

x_est = np.array([est.mean for est in imm_ests])
예제 #3
0
    def reduce_mixture(
        self, immstate_mixture: MixtureParameters[MixtureParameters[MT]]
    ) -> MixtureParameters[MT]:
        """
        Approximate a mixture of immstates as a single immstate.

        We have Pr(a), Pr(s | a), p(x| s, a).
            - Pr(a) = immstate_mixture.weights
            - Pr(s | a=j) = immstate_mixture.components[j].weights
            - p(x | s=i, a=j) = immstate_mixture.components[j].components[i] # ie. Gaussian parameters

        So p(x, s) = sum_j Pr(a=j) Pr(s| a=j) p(x| s, a=j),
        which we want as a single probability Gaussian pair. Multiplying the above with
        1 = Pr(s)/Pr(s) and moving the denominator a little we have
        p(x, s) = Pr(s) sum_j [ Pr(a=j) Pr(s| a=j)/Pr(s) ]  p(x| s, a=j),
        where the bracketed term is Bayes for Pr(a=j|s). Thus the mode conditioned state estimate is.
        p(x | s) = sum_j Pr(a=j| s) p(x| s, a=j)

        That is:
            - we need to invoke discrete Bayes one time and
            - reduce self.filter[s].reduce_mixture for each s
        """

        #        raise NotImplementedError  # TODO remove this when done
        # extract probabilities as array
        ## eg. association weights/beta: Pr(a)
        weights = immstate_mixture.weights
        ## eg. the association conditioned mode probabilities element [j, s] is for association j and mode s: Pr(s | a = j)
        component_conditioned_mode_prob = np.array(
            [c.weights.ravel() for c in immstate_mixture.components])

        # flip conditioning order with Bayes to get Pr(s), and Pr(a | s)
        mode_prob, mode_conditioned_component_prob = discretebayes.discrete_bayes(
            weights, component_conditioned_mode_prob)  # TODO

        # We need to gather all the state parameters from the associations for mode s into a
        # single list in order to reduce it to a single parameter set.
        # for instance loop through the modes, gather the paramters for the association of this mode
        # into a single list and append the result of self.filters[s].reduce_mixture
        # The mode s for association j should be available as imm_mixture.components[j].components[s]

        modeAmount = len(immstate_mixture.components[0].weights)
        associationAmount = len(immstate_mixture.weights)

        mode_indexed_association_mixture = []
        for modeIt in range(modeAmount):
            currentWeights = []
            currentComponents = []
            for asscIt in range(associationAmount):
                currentWeights.append(mode_conditioned_component_prob[modeIt,
                                                                      asscIt])
                currentComponents.append(
                    immstate_mixture.components[asscIt].components[modeIt])

            mode_indexed_association_mixture.append(
                MixtureParameters(np.array(currentWeights), currentComponents))

        mode_states: List[GaussParams] = [
            self.filters[sidx].reduce_mixture(modeMixture) for sidx,
            modeMixture in enumerate(mode_indexed_association_mixture)
        ]  # TODO

        immstate_reduced = MixtureParameters(mode_prob, mode_states)

        return immstate_reduced
예제 #4
0
 def _(self, init: Sequence) -> MixtureParameters[MT]:
     weights = self.initial_mode_probabilities
     components = self.init_components(init)
     return MixtureParameters(weights, components)
예제 #5
0
assert np.allclose(np.sum(PI1, axis=1), 1), "rows of PI must sum to 1"
=======
PI11 = 0.95
PI22 = 0.95
PI33 = 0.95 #NADIA
>>>>>>> Stashed changes


PI2 = np.array([[PI11, 3*(1 - PI11)/4, (1-PI11)/4],[3*(1 - PI22)/4, PI22, (1-PI22)/4], [3*(1-PI33)/4, (1-PI33)/4, PI33]])

# init values
mean_init = np.array([7096, 3627, 0, 0, 0])
cov_init = np.diag([10, 10, 20, 20, 0.1]) ** 2  
mode_probabilities_init1 = np.array([p10, (1 - p10)])
mode_states_init = GaussParams(mean_init, cov_init)
init_imm_state1 = MixtureParameters(mode_probabilities_init1, [mode_states_init] * 2)
mode_probabilities_init2 = np.array([0.34, 0.33, 0.33]) #arbitrary doesnt have much effect
init_imm_state2 = MixtureParameters(mode_probabilities_init2, [mode_states_init] * 3)

assert np.allclose(
    np.sum(mode_probabilities_init1), 1
), "initial mode probabilities must sum to 1"


# make model
measurement_model = measurementmodels.CartesianPosition(sigma_z, state_dim=5)
dynamic_models: List[dynamicmodels.DynamicModel] = []
dynamic_models.append(dynamicmodels.WhitenoiseAccelleration(sigma_a_CV, n=5))
dynamic_models.append(dynamicmodels.ConstantTurnrate(sigma_a_CT, sigma_omega))
<<<<<<< Updated upstream
dynamic_models.append(dynamicmodels.WhitenoiseAccelleration(sigma_a_CV_H, n=5))
예제 #6
0
PI32 = 0.175
PI33 = 0.7

PI = np.array([
    [PI11, PI12, PI13], 
    [PI21, PI22, PI23], 
    [PI31, PI32, PI33]
])

assert np.allclose(np.sum(PI, axis=1), 1), "rows of PI must sum to 1"

mean_init = np.array([7116, 3617, 0, 0, 0]) # Sverre: er omtrent der sporet begynner. 
cov_init = np.diag([14, 14, 2, 2, 0.01]) ** 2 
mode_probabilities_init = np.array([0.7, 0.1, 0.2]) #sverre: utvidet pga den tredje moden
mode_states_init = GaussParams(mean_init, cov_init)
init_imm_state = MixtureParameters(mode_probabilities_init, [mode_states_init] * 3) #sverre: må ganges med tre og ikke to pga. den tredje moden

assert np.allclose(
    np.sum(mode_probabilities_init), 1
), "initial mode probabilities must sum to 1"

# make model
measurement_model = measurementmodels.CartesianPosition(sigma_z, state_dim=5)
dynamic_models: List[dynamicmodels.DynamicModel] = []
dynamic_models.append(dynamicmodels.WhitenoiseAccelleration(sigma_a_CV_high, n=5)) #five states: two for position, two for velocity and one for angle velocity
dynamic_models.append(dynamicmodels.WhitenoiseAccelleration(sigma_a_CV, n=5))
dynamic_models.append(dynamicmodels.ConstantTurnrate(sigma_a_CT, sigma_omega))
ekf_filters = []
ekf_filters.append(ekf.EKF(dynamic_models[0], measurement_model))
ekf_filters.append(ekf.EKF(dynamic_models[1], measurement_model))
ekf_filters.append(ekf.EKF(dynamic_models[2], measurement_model))