Esempio n. 1
0
def Predict(max_step, prefix):

    edf.components = []

    T = max_step
    h = edf.Value(np.zeros((1, hidden_dim)))
    c = edf.Value(np.zeros((1, hidden_dim)))

    prediction = []

    for t in range(T):

        if t < len(prefix):
            pred = edf.Value(prefix[t])
            prediction.append(pred)
        else:
            prediction.append(pred)

        wordvec = edf.Embed(pred, C2V)
        xt = edf.Reshape(wordvec, [-1, hidden_dim])
        h_next, c_next = LSTMCell(xt, h, c)
        p = edf.SoftMax(edf.VDot(h_next, V))
        pred = edf.ArgMax(p)
        h = h_next
        c = c_next

    edf.Forward()

    idx = [pred.value for pred in prediction]
    stop_idx = utils.to_index('}')

    if stop_idx in idx:
        return idx[0:idx.index(stop_idx) + 1]
    else:
        return idx
    def BuildModel():

        edf.components = []

        B = inp.value.shape[0]
        T = inp.value.shape[1]
        h = edf.Value(np.zeros((B, hidden_dim)))
        c = edf.Value(np.zeros((B, hidden_dim)))

        score = []

        for t in range(T - 1):

            wordvec = edf.Embed(edf.Value(inp.value[:, t]), C2V)
            xt = edf.Reshape(wordvec, [-1, hidden_dim])
            h_next, c_next = LSTMCell(xt, h, c)
            p = edf.SoftMax(edf.VDot(h_next, V))
            logloss = edf.Reshape(
                edf.LogLoss(edf.Aref(p, edf.Value(inp.value[:, t + 1]))),
                (B, 1))

            if t == 0:
                loss = logloss
            else:
                loss = edf.ConCat(loss, logloss)

            score.append(p)
            h = h_next
            c = c_next

        masks = np.zeros((B, T - 1), dtype=np.int32)
        masks[inp.value[:, 1:] != 0] = 1
        loss = edf.MeanwithMask(loss, edf.Value(masks))

        return loss, score
Esempio n. 3
0
def BuildModel():

    edf.components = []

    B, T = inp.value.shape

    score = []
    loss = None

    # Init h_0 with one-hot
    vocab_init = np.ones([B])
    vocab_init = edf.Value(vocab_init)
    h = edf.Embed(vocab_init, C2V)
    # Init C_0 to be zero
    c = edf.Value(np.zeros([B, hidden_dim]))

    for t in range(T):
        x_t = edf.Value(inp.value[:, t])

        x_t = edf.Embed(x_t, C2V)

        h, c = LSTMCell(x_t, h, c)
        # Score and loss

        pred = edf.SoftMax(edf.VDot(h, V))
        if t != T - 1:
            score.append(pred)
            x_t1 = edf.Value(inp.value[:, t + 1])
        else:
            x_t1 = edf.Value(np.zeros(B))
        loss_t = edf.LogLoss(edf.Aref(pred, x_t1))
        if loss is None:
            loss = loss_t
        else:
            loss = edf.Add(loss, loss_t)

    loss = edf.Mul(edf.Mean(loss), edf.Value(np.float64(1) / T))
    return loss, score
Esempio n. 4
0
def Predict(max_step, prefix):

    edf.components = []

    T = max_step
    h = [[None] * layer] * (T + 1)
    c = [[None] * layer] * (T + 1)
    for i in range(layer):
        h[0][i] = edf.Value(np.zeros((1, hidden_dim)))
        c[0][i] = edf.Value(np.zeros((1, hidden_dim)))

    prediction = []

    for t in range(T):

        if t < len(prefix):
            pred = edf.Value(prefix[t])
            prediction.append(pred)
        else:
            prediction.append(pred)

        wordvec = edf.Embed(pred, C2V)
        xt = edf.Reshape(wordvec, [-1, hidden_dim])
        for i in range(layer):
            h[t + 1][i], c[t + 1][i] = LSTMCell(xt, h[t][i], c[t][i], i)
            xt = h[t + 1][i]
        p = edf.SoftMax(edf.VDot(xt, V))
        pred = edf.ArgMax(p)

    edf.Forward()

    idx = [pred.value for pred in prediction]
    stop_idx = utils.to_index('}')

    if stop_idx in idx:
        return idx[0:idx.index(stop_idx) + 1]
    else:
        return idx
Esempio n. 5
0
def BuildModel():

    edf.components = []

    B = inp.value.shape[0]
    T = inp.value.shape[1]
    h = [[None] * layer] * T
    c = [[None] * layer] * T

    for i in range(layer):
        h[0][i] = edf.Value(np.zeros((B, hidden_dim)))
        c[0][i] = edf.Value(np.zeros((B, hidden_dim)))
    score = []

    for t in range(T - 1):

        wordvec = edf.Embed(edf.Value(inp.value[:, t]), C2V)
        xt = edf.Reshape(wordvec, [-1, hidden_dim])

        for i in range(layer):
            h[t + 1][i], c[t + 1][i] = LSTMCell(xt, h[t][i], c[t][i], i)
            xt = h[t + 1][i]
        p = edf.SoftMax(edf.VDot(xt, V))
        logloss = edf.Reshape(edf.LogLoss(edf.Aref(p, edf.Value(inp.value[:, t + 1]))), (B, 1))

        if t == 0:
            loss = logloss
        else:
            loss = edf.ConCat(loss, logloss)

        score.append(p)

    masks = np.zeros((B, T - 1), dtype=np.int32)
    masks[inp.value[:, 1:] != 0] = 1
    loss = edf.MeanwithMask(loss, edf.Value(masks))

    return loss, score