Esempio n. 1
0
def viterbi(HMM, ev, prior):
    """[Equation 15.11]
    Viterbi algorithm to find the most likely sequence. Computes the best path,
    given an HMM model and a sequence of observations."""
    t = len(ev)
    ev.insert(0, None)

    m = [[0.0, 0.0] for _ in range(len(ev) - 1)]

    # the recursion is initialized with m1 = forward(P(X0), e1)
    m[0] = forward(HMM, prior, ev[1])

    for i in range(1, t):
        m[i] = element_wise_product(HMM.sensor_dist(ev[i + 1]), [
            max(element_wise_product(HMM.transition_model[0], m[i - 1])),
            max(element_wise_product(HMM.transition_model[1], m[i - 1]))
        ])

    path = [0.0] * (len(ev) - 1)
    # the construction of the most likely sequence starts in the final state with the largest probability,
    # and runs backwards; the algorithm needs to store for each xt its best predecessor xt-1
    for i in range(t, -1, -1):
        path[i - 1] = max(m[i - 1])

    return path
def forward_backward(HMM, ev):
    """
    [Figure 15.4]
    Forward-Backward algorithm for smoothing. Computes posterior probabilities
    of a sequence of states given a sequence of observations."""
    t = len(ev)
    ev.insert(0, None)  # to make the code look similar to pseudo code

    fv = [[0.0, 0.0] for _ in range(len(ev))]
    b = [1.0, 1.0]
    bv = [b]  # we don't need bv; but we will have a list of all backward messages here
    sv = [[0, 0] for _ in range(len(ev))]

    fv[0] = HMM.prior

    for i in range(1, t + 1):
        fv[i] = forward(HMM, fv[i - 1], ev[i])
    for i in range(t, -1, -1):
        sv[i - 1] = normalize(element_wise_product(fv[i], b))
        b = backward(HMM, b, ev[i])
        bv.append(b)

    sv = sv[::-1]

    return sv
Esempio n. 3
0
def forward(HMM, fv, ev):
    prediction = vector_add(
        scalar_vector_product(fv[0], HMM.transition_model[0]),
        scalar_vector_product(fv[1], HMM.transition_model[1]))
    sensor_dist = HMM.sensor_dist(ev)

    return normalize(element_wise_product(sensor_dist, prediction))
Esempio n. 4
0
def backward(HMM, b, ev):
    sensor_dist = HMM.sensor_dist(ev)
    prediction = element_wise_product(sensor_dist, b)

    return normalize(
        vector_add(
            scalar_vector_product(prediction[0], HMM.transition_model[0]),
            scalar_vector_product(prediction[1], HMM.transition_model[1])))
Esempio n. 5
0
def viterbi(HMM, ev):
    """
    [Equation 15.11]
    Viterbi algorithm to find the most likely sequence. Computes the best path and the
    corresponding probabilities, given an HMM model and a sequence of observations.
    """
    t = len(ev)
    ev = ev.copy()
    ev.insert(0, None)

    m = [[0.0, 0.0] for _ in range(len(ev) - 1)]

    # the recursion is initialized with m1 = forward(P(X0), e1)
    m[0] = forward(HMM, HMM.prior, ev[1])
    # keep track of maximizing predecessors
    backtracking_graph = []

    for i in range(1, t):
        m[i] = element_wise_product(HMM.sensor_dist(ev[i + 1]), [
            max(element_wise_product(HMM.transition_model[0], m[i - 1])),
            max(element_wise_product(HMM.transition_model[1], m[i - 1]))
        ])
        backtracking_graph.append([
            np.argmax(element_wise_product(HMM.transition_model[0], m[i - 1])),
            np.argmax(element_wise_product(HMM.transition_model[1], m[i - 1]))
        ])

    # computed probabilities
    ml_probabilities = [0.0] * (len(ev) - 1)
    # most likely sequence
    ml_path = [True] * (len(ev) - 1)

    # the construction of the most likely sequence starts in the final state with the largest probability, and
    # runs backwards; the algorithm needs to store for each xt its predecessor xt-1 maximizing its probability
    i_max = np.argmax(m[-1])

    for i in range(t - 1, -1, -1):
        ml_probabilities[i] = m[i][i_max]
        ml_path[i] = True if i_max == 0 else False
        if i > 0:
            i_max = backtracking_graph[i - 1][i_max]

    return ml_path, ml_probabilities
Esempio n. 6
0
def forward_backward(HMM, ev, prior):
    """Algoritmo forward-backward para suavização. Calcula probabilidades posteriores
     De uma seqüência de estados dada uma seqüência de observações."""
    t = len(ev)
    ev.insert(0, None)

    fv = [[0.0, 0.0] for i in range(len(ev))]
    b = [1.0, 1.0]
    bv = [b]
    sv = [[0, 0] for i in range(len(ev))]

    fv[0] = prior

    for i in range(1, t + 1):
        fv[i] = forward(HMM, fv[i - 1], ev[i])
    for i in range(t, -1, -1):
        sv[i - 1] = normalize(element_wise_product(fv[i], b))
        b = backward(HMM, b, ev[i])
        bv.append(b)

    sv = sv[::-1]

    return sv
Esempio n. 7
0
def forward_backward(HMM, ev, prior):
    """[Figure 15.4]
    Forward-Backward algorithm for smoothing. Computes posterior probabilities
    of a sequence of states given a sequence of observations."""
    t = len(ev)
    ev.insert(0, None)  # to make the code look similar to pseudo code

    fv = [[0.0, 0.0] for i in range(len(ev))]
    b = [1.0, 1.0]
    bv = [b]    # we don't need bv; but we will have a list of all backward messages here
    sv = [[0, 0] for i in range(len(ev))]

    fv[0] = prior

    for i in range(1, t + 1):
        fv[i] = forward(HMM, fv[i - 1], ev[i])
    for i in range(t, -1, -1):
        sv[i - 1] = normalize(element_wise_product(fv[i], b))
        b = backward(HMM, b, ev[i])
        bv.append(b)

    sv = sv[::-1]

    return(sv)
Esempio n. 8
0
def backward(HMM, b, ev):
    sensor_dist = HMM.sensor_dist(ev)
    prediction = element_wise_product(sensor_dist, b)

    return(normalize(vector_add(scalar_vector_product(prediction[0], HMM.transition_model[0]),
                                scalar_vector_product(prediction[1], HMM.transition_model[1]))))
Esempio n. 9
0
def forward(HMM, fv, ev):
    prediction = vector_add(scalar_vector_product(fv[0], HMM.transition_model[0]),
                            scalar_vector_product(fv[1], HMM.transition_model[1]))
    sensor_dist = HMM.sensor_dist(ev)

    return(normalize(element_wise_product(sensor_dist, prediction)))