コード例 #1
0
ファイル: probability.py プロジェクト: tchigher/aima-python
def viterbi(HMM, ev, prior):
    """[Equation 15.11]
    Viterbi algorithm to find the most likely sequence. Computes the best path,
    given an HMM model and a sequence of observations."""
    t = len(ev)
    ev.insert(0, None)

    m = [[0.0, 0.0] for _ in range(len(ev) - 1)]

    # the recursion is initialized with m1 = forward(P(X0), e1)
    m[0] = forward(HMM, prior, ev[1])

    for i in range(1, t):
        m[i] = element_wise_product(HMM.sensor_dist(ev[i + 1]), [
            max(element_wise_product(HMM.transition_model[0], m[i - 1])),
            max(element_wise_product(HMM.transition_model[1], m[i - 1]))
        ])

    path = [0.0] * (len(ev) - 1)
    # the construction of the most likely sequence starts in the final state with the largest probability,
    # and runs backwards; the algorithm needs to store for each xt its best predecessor xt-1
    for i in range(t, -1, -1):
        path[i - 1] = max(m[i - 1])

    return path
コード例 #2
0
def forward_backward(HMM, ev):
    """
    [Figure 15.4]
    Forward-Backward algorithm for smoothing. Computes posterior probabilities
    of a sequence of states given a sequence of observations."""
    t = len(ev)
    ev.insert(0, None)  # to make the code look similar to pseudo code

    fv = [[0.0, 0.0] for _ in range(len(ev))]
    b = [1.0, 1.0]
    bv = [b]  # we don't need bv; but we will have a list of all backward messages here
    sv = [[0, 0] for _ in range(len(ev))]

    fv[0] = HMM.prior

    for i in range(1, t + 1):
        fv[i] = forward(HMM, fv[i - 1], ev[i])
    for i in range(t, -1, -1):
        sv[i - 1] = normalize(element_wise_product(fv[i], b))
        b = backward(HMM, b, ev[i])
        bv.append(b)

    sv = sv[::-1]

    return sv
コード例 #3
0
def forward(HMM, fv, ev):
    prediction = vector_add(
        scalar_vector_product(fv[0], HMM.transition_model[0]),
        scalar_vector_product(fv[1], HMM.transition_model[1]))
    sensor_dist = HMM.sensor_dist(ev)

    return normalize(element_wise_product(sensor_dist, prediction))
コード例 #4
0
def backward(HMM, b, ev):
    sensor_dist = HMM.sensor_dist(ev)
    prediction = element_wise_product(sensor_dist, b)

    return normalize(
        vector_add(
            scalar_vector_product(prediction[0], HMM.transition_model[0]),
            scalar_vector_product(prediction[1], HMM.transition_model[1])))
コード例 #5
0
ファイル: probability.py プロジェクト: xinkecf35/aima-python
def viterbi(HMM, ev):
    """
    [Equation 15.11]
    Viterbi algorithm to find the most likely sequence. Computes the best path and the
    corresponding probabilities, given an HMM model and a sequence of observations.
    """
    t = len(ev)
    ev = ev.copy()
    ev.insert(0, None)

    m = [[0.0, 0.0] for _ in range(len(ev) - 1)]

    # the recursion is initialized with m1 = forward(P(X0), e1)
    m[0] = forward(HMM, HMM.prior, ev[1])
    # keep track of maximizing predecessors
    backtracking_graph = []

    for i in range(1, t):
        m[i] = element_wise_product(HMM.sensor_dist(ev[i + 1]), [
            max(element_wise_product(HMM.transition_model[0], m[i - 1])),
            max(element_wise_product(HMM.transition_model[1], m[i - 1]))
        ])
        backtracking_graph.append([
            np.argmax(element_wise_product(HMM.transition_model[0], m[i - 1])),
            np.argmax(element_wise_product(HMM.transition_model[1], m[i - 1]))
        ])

    # computed probabilities
    ml_probabilities = [0.0] * (len(ev) - 1)
    # most likely sequence
    ml_path = [True] * (len(ev) - 1)

    # the construction of the most likely sequence starts in the final state with the largest probability, and
    # runs backwards; the algorithm needs to store for each xt its predecessor xt-1 maximizing its probability
    i_max = np.argmax(m[-1])

    for i in range(t - 1, -1, -1):
        ml_probabilities[i] = m[i][i_max]
        ml_path[i] = True if i_max == 0 else False
        if i > 0:
            i_max = backtracking_graph[i - 1][i_max]

    return ml_path, ml_probabilities
コード例 #6
0
def forward_backward(HMM, ev, prior):
    """Algoritmo forward-backward para suavização. Calcula probabilidades posteriores
     De uma seqüência de estados dada uma seqüência de observações."""
    t = len(ev)
    ev.insert(0, None)

    fv = [[0.0, 0.0] for i in range(len(ev))]
    b = [1.0, 1.0]
    bv = [b]
    sv = [[0, 0] for i in range(len(ev))]

    fv[0] = prior

    for i in range(1, t + 1):
        fv[i] = forward(HMM, fv[i - 1], ev[i])
    for i in range(t, -1, -1):
        sv[i - 1] = normalize(element_wise_product(fv[i], b))
        b = backward(HMM, b, ev[i])
        bv.append(b)

    sv = sv[::-1]

    return sv
コード例 #7
0
ファイル: probability.py プロジェクト: rajul/aima-python
def forward_backward(HMM, ev, prior):
    """[Figure 15.4]
    Forward-Backward algorithm for smoothing. Computes posterior probabilities
    of a sequence of states given a sequence of observations."""
    t = len(ev)
    ev.insert(0, None)  # to make the code look similar to pseudo code

    fv = [[0.0, 0.0] for i in range(len(ev))]
    b = [1.0, 1.0]
    bv = [b]    # we don't need bv; but we will have a list of all backward messages here
    sv = [[0, 0] for i in range(len(ev))]

    fv[0] = prior

    for i in range(1, t + 1):
        fv[i] = forward(HMM, fv[i - 1], ev[i])
    for i in range(t, -1, -1):
        sv[i - 1] = normalize(element_wise_product(fv[i], b))
        b = backward(HMM, b, ev[i])
        bv.append(b)

    sv = sv[::-1]

    return(sv)
コード例 #8
0
ファイル: probability.py プロジェクト: rajul/aima-python
def backward(HMM, b, ev):
    sensor_dist = HMM.sensor_dist(ev)
    prediction = element_wise_product(sensor_dist, b)

    return(normalize(vector_add(scalar_vector_product(prediction[0], HMM.transition_model[0]),
                                scalar_vector_product(prediction[1], HMM.transition_model[1]))))
コード例 #9
0
ファイル: probability.py プロジェクト: rajul/aima-python
def forward(HMM, fv, ev):
    prediction = vector_add(scalar_vector_product(fv[0], HMM.transition_model[0]),
                            scalar_vector_product(fv[1], HMM.transition_model[1]))
    sensor_dist = HMM.sensor_dist(ev)

    return(normalize(element_wise_product(sensor_dist, prediction)))