def mix_probabilities( self, immstate: MixtureParameters[MT], # sampling time Ts: float, ) -> Tuple[ np.ndarray, np. ndarray]: # predicted_mode_probabilities, mix_probabilities: shapes = ((M, (M ,M))). # mix_probabilities[s] is the mixture weights for mode s """Calculate the predicted mode probability and the mixing probabilities.""" predicted_mode_probabilities, mix_probabilities = discretebayes.discrete_bayes( immstate.weights, self.PI) assert predicted_mode_probabilities.shape == ( self.PI.shape[0], ), "IMM.mix_probabilities: Wrong shape on the predicted mode probabilities" assert (mix_probabilities.shape == self.PI.shape ), "IMM.mix_probabilities: Wrong shape on mixing probabilities" assert np.all( np.isfinite(predicted_mode_probabilities) ), "IMM.mix_probabilities: predicted mode probabilities not finite" assert np.all(np.isfinite(mix_probabilities) ), "IMM.mix_probabilities: mix probabilities not finite" assert np.allclose( mix_probabilities.sum(axis=1), 1 ), "IMM.mix_probabilities: mix probabilities does not sum to 1 per mode" return predicted_mode_probabilities, mix_probabilities
def reduce_mixture( self, immstate_mixture: MixtureParameters[MixtureParameters[MT]] ) -> MixtureParameters[MT]: """Approximate a mixture of immstates as a single immstate""" # extract probabilities as array weights = immstate_mixture.weights component_conditioned_mode_prob = np.array( [c.weights.ravel() for c in immstate_mixture.components]) # flip conditioning order with Bayes mode_prob, mode_conditioned_component_prob = discrete_bayes( weights, component_conditioned_mode_prob) # Hint list_a of lists_b to list_b of lists_a: zip(*immstate_mixture.components) mode_states = [] components = zip( *[comp.components for comp in immstate_mixture.components]) for filt, mode_conditioned, component in zip( self.filters, mode_conditioned_component_prob, components): state = filt.reduce_mixture( MixtureParameters(mode_conditioned, component)) mode_states.append(state) immstate_reduced = MixtureParameters(mode_prob, mode_states) return immstate_reduced
def reduce_mixture( self, immstate_mixture: MixtureParameters[MixtureParameters[MT]] ) -> MixtureParameters[MT]: """ Approximate a mixture of immstates as a single immstate. We have Pr(a), Pr(s | a), p(x| s, a). - Pr(a) = immstate_mixture.weights - Pr(s | a=j) = immstate_mixture.components[j].weights - p(x | s=i, a=j) = immstate_mixture.components[j].components[i] # ie. Gaussian parameters So p(x, s) = sum_j Pr(a=j) Pr(s| a=j) p(x| s, a=j), which we want as a single probability Gaussian pair. Multiplying the above with 1 = Pr(s)/Pr(s) and moving the denominator a little we have p(x, s) = Pr(s) sum_j [ Pr(a=j) Pr(s| a=j)/Pr(s) ] p(x| s, a=j), where the bracketed term is Bayes for Pr(a=j|s). Thus the mode conditioned state estimate is. p(x | s) = sum_j Pr(a=j| s) p(x| s, a=j) That is: - we need to invoke discrete Bayes one time and - reduce self.filter[s].reduce_mixture for each s """ # extract probabilities as array ## eg. association weights/beta: Pr(a) weights = immstate_mixture.weights ## eg. the association conditioned mode probabilities element [j, s] is for association j and mode s: Pr(s | a = j) component_conditioned_mode_prob = np.array( [c.weights.ravel() for c in immstate_mixture.components] ) # flip conditioning order with Bayes to get Pr(s), and Pr(a | s) mode_prob, mode_conditioned_component_prob = discretebayes.discrete_bayes(weights, component_conditioned_mode_prob) # TODO # We need to gather all the state parameters from the associations for mode s into a # single list in order to reduce it to a single parameter set. # for instance loop through the modes, gather the paramters for the association of this mode # into a single list and append the result of self.filters[s].reduce_mixture # The mode s for association j should be available as imm_mixture.components[j].components[s] # p(x| s=i, a=j) = imm_mixture.components[j].components[s] # The sum over a (7.54 in the book) should be left for the reduction in filters[s].reduce_mixture # => sum(a)(p(sIa)*p(xIs,a)) #p(sIa = j) = imm_mixture.components[j].weights[s] #p(xIs,a) = imm_mixture.components[j].components[s] mode_states: list[GaussParams] = [ fs.reduce_mixture(MixtureParameters(modestate_conditional_combined_probability, mode_state_comp)) for fs, modestate_conditional_combined_probability,mode_state_comp in zip(self.filters, mode_conditioned_component_prob, zip(*[comp.components for comp in immstate_mixture.components])) ] # TODO immstate_reduced = MixtureParameters(mode_prob, mode_states) return immstate_reduced
def reduce_mixture( self, immstate_mixture: MixtureParameters[MixtureParameters[ MT]] #this is posterior density of x_k (double mixture) ) -> MixtureParameters[MT]: """ Approximate a mixture of immstates as a single immstate. That is: - we need to invoke discrete Bayes one time and - reduce self.filter[s].reduce_mixture for each s """ # Association weights/beta as array weights = immstate_mixture.weights # Pr{a} # Association conditioned mode probabilities component_conditioned_mode_prob = np.array( [c.weights.ravel() for c in immstate_mixture.components] # Pr{s|a} ) # input: Pr(a), Pr(s|a) # output: Pr(s), Pr(a|s) mode_prob, mode_conditioned_component_prob = discretebayes.discrete_bayes( weights, component_conditioned_mode_prob) num_modes = len(self.filters) mode_states: List[GaussParams] = [ ] # state params from associations for mode s for (s, pr_a_given_s) in zip(range(num_modes), mode_conditioned_component_prob): #~The mode s for association j should be available as imm_mixture.components[j].components[s] # gather all state params from associations for mode s into a list: mode_cond_params = np.array( [c.components[s] for c in immstate_mixture.components]) #gaussian mix with weights Pr(a|s) mixture_params = MixtureParameters(pr_a_given_s, mode_cond_params) #reduce and append mode_states.append(self.filters[s].reduce_mixture(mixture_params)) immstate_reduced = MixtureParameters(mode_prob, mode_states) return immstate_reduced
def mix_probabilities( self, immstate: MixtureParameters[MT], Ts: float, ) -> Tuple[ np.ndarray, np. ndarray]: # predicted_mode_probabilities, mix_probabilities: shapes = ((M, (M ,M))). """Calculate the predicted mode probability and the mixing probabilities.""" predicted_mode_probabilities, mix_probabilities = discretebayes.discrete_bayes( immstate.weights, self.PI) # Optional assertions for debugging assert np.all(np.isfinite(predicted_mode_probabilities)) assert np.all(np.isfinite(mix_probabilities)) assert np.allclose(mix_probabilities.sum(axis=1), 1) return predicted_mode_probabilities, mix_probabilities
def reduce_mixture( self, immstate_mixture: MixtureParameters[MixtureParameters[MT]] ) -> MixtureParameters[MT]: """Approximate a mixture of immstates as a single immstate""" # extract probabilities as array weights = immstate_mixture.weights component_conditioned_mode_prob = np.array( [c.weights.ravel() for c in immstate_mixture.components]) # flip conditioning order with Bayes mode_prob, mode_conditioned_component_prob = discretebayes.discrete_bayes( weights, component_conditioned_mode_prob) # Hint list_a of lists_b to list_b of lists_a: zip(*immstate_mixture.components) mode_states = None # TODO: immstate_reduced = MixtureParameters(mode_prob, mode_states) return immstate_reduced
def mix_probabilities( self, immstate: MixtureParameters[MT], # sampling time Ts: float, ) -> Tuple[ np.ndarray, np. ndarray]: # predicted_mode_probabilities, mix_probabilities: shapes = ((M, (M ,M))). # mix_probabilities[s] is the mixture weights for mode s """Calculate the predicted mode probability and the mixing probabilities.""" # self.PI # conditional probability # immstate.components # the prior? predicted_mode_probabilities, mix_probabilities = discrete_bayes( pr=immstate.weights, cond_pr=self.PI) # Optional assertions for debugging assert np.all(np.isfinite(predicted_mode_probabilities)) assert np.all(np.isfinite(mix_probabilities)) assert np.allclose(mix_probabilities.sum(axis=1), 1) return predicted_mode_probabilities, mix_probabilities
def mix_probabilities( self, immstate: MixtureParameters[MT], # sampling time Ts: float, ) -> Tuple[ np.ndarray, np.ndarray ]: # predicted_mode_probabilities, mix_probabilities: shapes = ((M, (M ,M))). # mix_probabilities[s] is the mixture weights for mode s """Calculate the predicted mode probability and the mixing probabilities.""" # My comment: this step should implement step 1., 6.27 predicted_mode_probabilities, mix_probabilities = discretebayes.discrete_bayes(immstate.weights,self.PI) # TODO hint: discretebayes.discrete_bayes # Optional assertions for debugging assert np.all(np.isfinite(predicted_mode_probabilities)) assert np.all(np.isfinite(mix_probabilities)) assert np.allclose(mix_probabilities.sum(axis=1), 1) return predicted_mode_probabilities, mix_probabilities
def mix_probabilities( #sverre: this is step 1 in the workflow of the IMM method. self, immstate: MixtureParameters[MT], # sampling time Ts: float, ) -> Tuple[ np.ndarray, np.ndarray ]: # predicted_mode_probabilities, mix_probabilities: shapes = ((M, (M ,M))). # mix_probabilities[s] is the mixture weights for mode s """Calculate the predicted mode probability and the mixing probabilities.""" #sverre: predicted_mode_probabilities er sannsynligheten for at man er i et gitt mode predicted_mode_probabilities, mix_probabilities = discretebayes.discrete_bayes( immstate.weights, self.PI #sverre: weights is the prior passed to bayes' (Pr{s_k-1 | z_1:k-1} #sverre: PI (the transition matrix) is the conditional: Pr{s_k | s_k-1, z_1:k-1} ) assert predicted_mode_probabilities.shape == ( self.PI.shape[0], ), "IMM.mix_probabilities: Wrong shape on the predicted mode probabilities" assert ( mix_probabilities.shape == self.PI.shape ), "IMM.mix_probabilities: Wrong shape on mixing probabilities" assert np.all( np.isfinite(predicted_mode_probabilities) ), "IMM.mix_probabilities: predicted mode probabilities not finite" assert np.all( np.isfinite(mix_probabilities) ), "IMM.mix_probabilities: mix probabilities not finite" assert np.allclose( mix_probabilities.sum(axis=1), 1 ), "IMM.mix_probabilities: mix probabilities does not sum to 1 per mode" return predicted_mode_probabilities, mix_probabilities
def reduce_mixture( self, immstate_mixture: MixtureParameters[MixtureParameters[MT]] ) -> MixtureParameters[MT]: """ Approximate a mixture of immstates as a single immstate. We have Pr(a), Pr(s | a), p(x| s, a). - Pr(a) = immstate_mixture.weights - Pr(s | a=j) = immstate_mixture.components[j].weights - p(x | s=i, a=j) = immstate_mixture.components[j].components[i] # ie. Gaussian parameters So p(x, s) = sum_j Pr(a=j) Pr(s| a=j) p(x| s, a=j), which we want as a single probability Gaussian pair. Multiplying the above with 1 = Pr(s)/Pr(s) and moving the denominator a little we have p(x, s) = Pr(s) sum_j [ Pr(a=j) Pr(s| a=j)/Pr(s) ] p(x| s, a=j), where the bracketed term is Bayes for Pr(a=j|s). Thus the mode conditioned state estimate is. p(x | s) = sum_j Pr(a=j| s) p(x| s, a=j) That is: - we need to invoke discrete Bayes one time and - reduce self.filter[s].reduce_mixture for each s """ # extract probabilities as array ## eg. association weights/beta: Pr(a) weights = immstate_mixture.weights # Pr{a | Z_1:k} ## eg. the association conditioned mode probabilities element [j, s] is for association j and mode s: Pr(s | a = j) component_conditioned_mode_prob = np.array([ c.weights.ravel() for c in immstate_mixture.components ] # Pr{s | a} ) # flip conditioning order with Bayes to get Pr(s), and Pr(a | s) mode_prob, mode_conditioned_component_prob = discretebayes.discrete_bayes( weights, component_conditioned_mode_prob) # We need to gather all the state parameters from the associations for mode s into a # single list in order to reduce it to a single parameter set. # for instance loop through the modes, gather the paramters for the association of this mode # into a single list and append the result of self.filters[s].reduce_mixture # The mode s for association j should be available as imm_mixture.components[j].components[s] num_modes = len(self.filters) flipped_mixture_components = [] for (s, prob_a_cond_s) in zip(range(num_modes), mode_conditioned_component_prob): components_across_a = [ imm_component.components[s] for imm_component in immstate_mixture.components ] flipped_mixture_components.append( MixtureParameters(prob_a_cond_s, components_across_a)) mode_states: List[GaussParams] = [ self.filters[s].reduce_mixture(mixture_params) for (s, mixture_params ) in zip(range(num_modes), flipped_mixture_components) ] immstate_reduced = MixtureParameters(mode_prob, mode_states) return immstate_reduced
def reduce_mixture( self, immstate_mixture: MixtureParameters[MixtureParameters[MT]] ) -> MixtureParameters[MT]: """ Approximate a mixture of immstates as a single immstate. We have Pr(a), Pr(s | a), p(x| s, a). - Pr(a) = immstate_mixture.weights - Pr(s | a=j) = immstate_mixture.components[j].weights - p(x | s=i, a=j) = immstate_mixture.components[j].components[i] # ie. Gaussian parameters So p(x, s) = sum_j Pr(a=j) Pr(s| a=j) p(x| s, a=j), which we want as a single probability Gaussian pair. Multiplying the above with 1 = Pr(s)/Pr(s) and moving the denominator a little we have p(x, s) = Pr(s) sum_j [ Pr(a=j) Pr(s| a=j)/Pr(s) ] p(x| s, a=j), where the bracketed term is Bayes for Pr(a=j|s). Thus the mode conditioned state estimate is. p(x | s) = sum_j Pr(a=j| s) p(x| s, a=j) That is: - we need to invoke discrete Bayes one time and - reduce self.filter[s].reduce_mixture for each s """ # raise NotImplementedError # TODO remove this when done # extract probabilities as array ## eg. association weights/beta: Pr(a) weights = immstate_mixture.weights ## eg. the association conditioned mode probabilities element [j, s] is for association j and mode s: Pr(s | a = j) component_conditioned_mode_prob = np.array( [c.weights.ravel() for c in immstate_mixture.components]) # flip conditioning order with Bayes to get Pr(s), and Pr(a | s) mode_prob, mode_conditioned_component_prob = discretebayes.discrete_bayes( weights, component_conditioned_mode_prob) # TODO # We need to gather all the state parameters from the associations for mode s into a # single list in order to reduce it to a single parameter set. # for instance loop through the modes, gather the paramters for the association of this mode # into a single list and append the result of self.filters[s].reduce_mixture # The mode s for association j should be available as imm_mixture.components[j].components[s] modeAmount = len(immstate_mixture.components[0].weights) associationAmount = len(immstate_mixture.weights) mode_indexed_association_mixture = [] for modeIt in range(modeAmount): currentWeights = [] currentComponents = [] for asscIt in range(associationAmount): currentWeights.append(mode_conditioned_component_prob[modeIt, asscIt]) currentComponents.append( immstate_mixture.components[asscIt].components[modeIt]) mode_indexed_association_mixture.append( MixtureParameters(np.array(currentWeights), currentComponents)) mode_states: List[GaussParams] = [ self.filters[sidx].reduce_mixture(modeMixture) for sidx, modeMixture in enumerate(mode_indexed_association_mixture) ] # TODO immstate_reduced = MixtureParameters(mode_prob, mode_states) return immstate_reduced