Example #1
0
def expectation(model, instance):
    '''
    Perform forward-backward algorithm to calculate the second component
    of the detrieve.
    '''
    # get the cached score

    L = len(instance)
    T = model.nr_tags
    A = model.nr_attrs
    g0, g = build_score_cache(model.w, L, T, A, instance)

    a = forward(g0, g, L, T)
    b = backward(g, L, T)

    logZ = logsumexp(a[L-1,:])

    E = defaultdict(float)
    f = instance.features_table

    c = exp(g0 + b[0,:] - logZ).clip(0., 1.)
    for j in xrange(T):
        for k in f[0,None,j]:
            E[k] += c[j]

    for i in xrange(1, L):
        c = exp(add.outer(a[i-1,:], b[i,:]) + g[i,:,:] - logZ).clip(0.,1.)
        for j in range(T):
            for k in range(T):
                for e in f[i,j,k]:
                    E[e] += c[j,k]

    return E
Example #2
0
def forward(g0, g, L, T):
    '''
    The forward process of forward-backward algorithm.

    Parameters
    ----------
    g0 : array of T
        $\Phi(0,None,k)$
    g : array of L*T*T
        $\Phi(i,j,k)$
    L : int
        length of instance
    T : int
        number of tags

    Returns
    -------
    a : array of L*T
        The forward process matrix
    '''
    a = zeros((L, T), dtype=float)
    a[0, :] = g0
    for i in xrange(1, L):
        ap = a[i - 1, :]
        for o in xrange(T):
            a[i, o] = logsumexp(ap + g[i, :, o])
    return a
Example #3
0
def expectation(model, instance):
    '''
    Perform forward-backward algorithm to calculate the second component
    of the detrieve.
    '''
    # get the cached score

    L = len(instance)
    T = model.nr_tags
    A = model.nr_attrs
    g0, g = build_score_cache(model.w, L, T, A, instance)

    a = forward(g0, g, L, T)
    b = backward(g, L, T)

    logZ = logsumexp(a[L - 1, :])

    E = defaultdict(float)
    f = instance.features_table

    c = exp(g0 + b[0, :] - logZ).clip(0., 1.)
    for j in xrange(T):
        for k in f[0, None, j]:
            E[k] += c[j]

    for i in xrange(1, L):
        c = exp(add.outer(a[i - 1, :], b[i, :]) + g[i, :, :] - logZ).clip(
            0., 1.)
        for j in range(T):
            for k in range(T):
                for e in f[i, j, k]:
                    E[e] += c[j, k]

    return E
Example #4
0
def forward(g0, g, L, T):
    '''
    The forward process of forward-backward algorithm.

    Parameters
    ----------
    g0 : array of T
        $\Phi(0,None,k)$
    g : array of L*T*T
        $\Phi(i,j,k)$
    L : int
        length of instance
    T : int
        number of tags

    Returns
    -------
    a : array of L*T
        The forward process matrix
    '''
    a = zeros((L, T), dtype=float)
    a[0,:] = g0
    for i in xrange(1, L):
        ap = a[i-1,:]
        for o in xrange(T):
            a[i,o] = logsumexp(ap + g[i,:,o])
    return a
Example #5
0
def _dlikelihood(w, instance, model):
    '''
    Calculate gradient of a instance

    - param[in] w           The weight vector
    - param[in] instance    The instance
    - param[in] model       The model
    '''
    grad = zeros(w.shape[0], dtype=float)

    L = len(instance)
    T = model.nr_tags
    A = model.nr_attrs

    build_instance(model.attrs, model.tags, instance, True)
    g0, g = build_score_cache(w, L, T, A, instance)

    F = instance.correct_features
    for k, v in F.iteritems():
        grad[k] += v

    a = forward(g0, g, L, T)  # forward
    b = backward(g, L, T)  # backward

    logZ = logsumexp(a[L - 1, :])

    U = instance.unigram_features_table
    B = instance.bigram_features_table

    c = exp(g0 + b[0, :] - logZ).clip(0., 1.)
    for j in xrange(T):
        grad[U[0, j]] -= c[j]

    for i in xrange(1, L):
        c = exp(add.outer(a[i - 1, :], b[i, :]) + g[i, :, :] - logZ).clip(
            0., 1.)
        # The following code is an equilism of this
        #for j in range(T):
        #    for k in range(T):
        #        grad[U[i,k]] -= c[j,k]
        #        grad[B[j,k]] -= c[j,k]
        for k in range(T):
            grad[U[i, k]] -= c[:, k].sum()
        grad[range(A * T, (A + T) * T)] -= c.flatten()

    return grad
Example #6
0
def _dlikelihood(w, instance, model):
    '''
    Calculate gradient of a instance

    - param[in] w           The weight vector
    - param[in] instance    The instance
    - param[in] model       The model
    '''
    grad = zeros(w.shape[0], dtype=float)

    L = len(instance)
    T = model.nr_tags
    A = model.nr_attrs

    build_instance(model.attrs, model.tags, instance, True)
    g0, g = build_score_cache(w, L, T, A, instance)

    F = instance.correct_features
    for k, v in F.iteritems():
        grad[k] += v

    a = forward(g0, g, L, T)    # forward
    b = backward(g, L, T)       # backward

    logZ = logsumexp(a[L-1,:])

    U = instance.unigram_features_table
    B = instance.bigram_features_table

    c = exp(g0 + b[0,:] - logZ).clip(0., 1.)
    for j in xrange(T):
        grad[U[0,j]] -= c[j]

    for i in xrange(1, L):
        c = exp(add.outer(a[i-1,:], b[i,:]) + g[i,:,:] - logZ).clip(0.,1.)
        # The following code is an equilism of this
        #for j in range(T):
        #    for k in range(T):
        #        grad[U[i,k]] -= c[j,k]
        #        grad[B[j,k]] -= c[j,k]
        for k in range(T):
            grad[U[i,k]] -= c[:,k].sum()
        grad[range(A*T, (A+T)*T)] -= c.flatten()

    return grad
Example #7
0
def _likelihood(w, instance, model):
    '''
    Calculate the likelihood of one instance

    - param[in] w
    - param[in] Instance
    - param[in] model
    '''
    L = len(instance)
    T = model.nr_tags
    A = model.nr_attrs

    # Filling the correct_features and features_table
    build_instance(model.attrs, model.tags, instance, True)
    g0, g = build_score_cache(w, L, T, A, instance)

    # calcualte the correct likelihood
    F = instance.correct_features
    ret = array([w[k] * v for k, v in F.iteritems()]).sum()

    # calcualte the marginal
    a = forward(g0, g, L, T)

    return ret - logsumexp(a[L - 1, :])
Example #8
0
def _likelihood(w, instance, model):
    '''
    Calculate the likelihood of one instance

    - param[in] w
    - param[in] Instance
    - param[in] model
    '''
    L = len(instance)
    T = model.nr_tags
    A = model.nr_attrs

    # Filling the correct_features and features_table
    build_instance(model.attrs, model.tags, instance, True)
    g0, g = build_score_cache(w, L, T, A, instance)

    # calcualte the correct likelihood
    F = instance.correct_features
    ret = array([w[k] * v for k, v in F.iteritems()]).sum()

    # calcualte the marginal
    a = forward(g0, g, L, T)

    return ret - logsumexp(a[L-1,:])
Example #9
0
def backward(g, L, T):
    '''
    The backward process of forward-backward algorithm.

    Parameters
    ----------
    g : array of L*T*T
        $\Phi(i,j,k)$
    L : int
        length of instance
    T : int
        number of tags

    Returns
    -------
    b : array of L*T
        The forward process matrix
    '''
    b = zeros((L, T), dtype=float)
    for i in xrange(L - 2, -1, -1):
        bp = b[i + 1, :]
        for o in xrange(T):
            b[i, o] = logsumexp(bp + g[i + 1, o, :])
    return b
Example #10
0
def backward(g, L, T):
    '''
    The backward process of forward-backward algorithm.

    Parameters
    ----------
    g : array of L*T*T
        $\Phi(i,j,k)$
    L : int
        length of instance
    T : int
        number of tags

    Returns
    -------
    b : array of L*T
        The forward process matrix
    '''
    b = zeros((L, T), dtype=float)
    for i in xrange(L-2, -1, -1):
        bp = b[i+1,:]
        for o in xrange(T):
            b[i,o] = logsumexp(bp + g[i+1,o,:])
    return b