コード例 #1
0
    def dec_lstm_cell(self, x, h, c):
        concat = Concat(h, x)

        # Forget Gate
        f_gate = Sigmoid(Add(Dot(concat, self.dwf), self.dbf))
        # Input Gate
        i_gate = Sigmoid(Add(Dot(concat, self.dwi), self.dbi))
        # Temp Vars
        c_temp = Tanh(Add(Dot(concat, self.dwc), self.dbc))
        o_temp = Sigmoid(Add(Dot(concat, self.dwo), self.dbo))

        # Output
        c_next = Add(Mul(f_gate, c), Mul(i_gate, c_temp))
        h_next = Mul(o_temp, Tanh(c_next))
        return h_next, c_next
コード例 #2
0
    def build(self, prefix, expect_length):
        # clear nodes above watermark
        del self.nodes[self.watermark:]
        del self.inputs[2:]

        prefix_len = len(prefix)
        hidden_dim = self.hidden_dim

        self.h0.value = np.zeros((1, hidden_dim))
        self.c0.value = np.zeros((1, hidden_dim))
        self.predicts = []
        h = self.h0
        c = self.c0
        for t in range(expect_length):
            if t < prefix_len:
                x = self.input()
                x.value = prefix[t]
            else:
                x = ArgMax(SoftMax(Dot(h, self.V)))
            self.predicts.append(x)
            wordvec = Embed(x, self.C2V)

            cell = LSTMCell(self.wf, self.bf, self.wi, self.bi, self.wc,
                            self.bc, self.wo, self.bo, wordvec, h, c)

            h = cell.hout
            c = cell.cout
コード例 #3
0
    def build(self, batch):
        # clear nodes above watermark
        del self.nodes[self.watermark:]
        # Remove retained inputs
        del self.inputs[2:]
        data = batch.data
        B = data.shape[0]
        T = data.shape[1]
        hidden_dim = self.hidden_dim

        self.h0.value = np.zeros((B, hidden_dim))
        self.c0.value = np.zeros((B, hidden_dim))

        collect = []
        h = self.h0
        c = self.c0
        for t in range(T - 1):
            x = self.input()
            x.value = data[:, t]
            wordvec = Embed(x, self.C2V)

            cell = LSTMCell(self.wf, self.bf, self.wi, self.bi, self.wc,
                            self.bc, self.wo, self.bo, wordvec, h, c)

            collect.append(SoftMax(Dot(cell.hout, self.V)))

            h = cell.hout
            c = cell.cout

        self.output(Collect(collect))
        self.expect(data[:, 1:T].T)
コード例 #4
0
def build_graph(batch):
    data = batch.data[0]

    fwd_graph.reset()
    bcwd_graph.reset()

    bsize, length = data.shape

    fwd_graph.h0.value = np.zeros([bsize, hidden_dim])
    fwd_graph.c0.value = np.zeros([bsize, hidden_dim])
    bcwd_graph.h0.value = np.zeros([bsize, hidden_dim])
    bcwd_graph.c0.value = np.zeros([bsize, hidden_dim])

    fwd_h = fwd_graph.h0
    fwd_c = fwd_graph.c0
    bcwd_h = bcwd_graph.h0
    bcwd_c = bcwd_graph.c0

    fwd_outputs = []
    bcwd_outputs = []
    for idx in range(length - 1):
        # Build Fowward Graph
        fwd_in_i = fwd_graph.input()
        fwd_in_i.value = data[:, idx]  # Get value from batch
        fwd_x = Embed(fwd_in_i, fwd_graph.embed)
        fwd_h, fwd_c = fwd_graph.lstm_cell(fwd_x, fwd_h, fwd_c)
        fwd_out_i = SoftMax(Dot(fwd_h, fwd_graph.v2c))
        fwd_outputs.append(fwd_out_i)

        # Build Backward Graph
        bcwd_in_i = bcwd_graph.input()
        bcwd_in_i.value = data[:, length - 1 - idx]  # Get value from batch
        bcwd_x = Embed(bcwd_in_i, bcwd_graph.embed)
        bcwd_h, bcwd_c = bcwd_graph.lstm_cell(bcwd_x, bcwd_h, bcwd_c)
        bcwd_out_i = SoftMax(Dot(bcwd_h, bcwd_graph.v2c))
        bcwd_outputs.append(bcwd_out_i)

    fwd_graph.output(Collect(fwd_outputs))
    fwd_graph.expect(data[:, 1:])

    bcwd_graph.output(Collect(bcwd_outputs))
    bcwd_graph.expect(np.flip(data, axis=1)[:, 1:])
コード例 #5
0
    def __init__(self, wf, bf, wi, bi, wc, bc, wo, bo, x, h, c):
        self.wf = wf
        self.bf = bf
        self.wi = wi
        self.bi = bi
        self.wc = wc
        self.bc = bc
        self.wo = wo
        self.bo = bo
        self.x = x
        self.h = h
        self.c = c

        concat = Concat(x, h)
        fgate = Sigmoid(Add(Dot(concat, wf), bf))
        igate = Sigmoid(Add(Dot(concat, wi), bi))
        cgate = Mul(Tanh(Add(Dot(concat, wc), bc)), igate)
        ogate = Sigmoid(Add(Dot(concat, wo), bo))

        self.cout = Add(Mul(c, fgate), cgate)
        self.hout = Mul(Tanh(self.cout), ogate)
コード例 #6
0
    def build_graph(self, batch):
        enc_data = batch.data
        self.reset()

        bsize, enc_length = enc_data.shape

        outputs = []

        # Build Fwd Encode Graph
        self.feh0.value = np.zeros([bsize, self.half_dim])
        self.fec0.value = np.zeros([bsize, self.half_dim])

        fh = self.feh0
        fc = self.fec0
        for idx in range(enc_length):
            in_i = self.input()
            in_i.value = enc_data[:, idx]  # Get value from batch
            x = Embed(in_i, self.feembed)
            fh, fc = self.fenc_lstm_cell(x, fh, fc)

        # Build Bwd Encode Graph
        self.beh0.value = np.zeros([bsize, self.half_dim])
        self.bec0.value = np.zeros([bsize, self.half_dim])

        bh = self.beh0
        bc = self.bec0
        for idx in range(enc_length):
            in_i = self.input()
            in_i.value = enc_data[:,
                                  enc_length - 1 - idx]  # Get value from batch
            x = Embed(in_i, self.beembed)
            bh, bc = self.benc_lstm_cell(x, bh, bc)

        # Build Decode Graph
        h = Concat(fh, bh)
        c = Concat(fc, bc)

        self.encoded_h = h
        self.encoded_c = c

        # Build Decode Graph

        decode_in = self.input()
        decode_in.value = np.zeros([bsize])
        decode_embed = Embed(decode_in, self.dembed)
        x = decode_embed
        for idx in range(self.predict_len):
            h, c = self.dec_lstm_cell(x, h, c)
            out_i = ArgMax(SoftMax(Dot(h, self.dv2c)))
            outputs.append(out_i)
            x = Embed(out_i, self.dembed)
        self.output(Collect(outputs))
        self.expect(np.zeros([bsize, self.predict_len]))
コード例 #7
0
    def build_graph(self, batch):
        enc_data = batch.data[0]
        dec_data = batch.data[1]
        self.reset()

        bsize, enc_length = enc_data.shape
        dec_length = dec_data.shape[1]

        outputs = []

        # Build Fwd Encode Graph
        self.feh0.value = np.zeros([bsize, self.half_dim])
        self.fec0.value = np.zeros([bsize, self.half_dim])

        fh = self.feh0
        fc = self.fec0
        for idx in range(enc_length):
            in_i = self.input()
            in_i.value = enc_data[:, idx]  # Get value from batch
            x = Embed(in_i, self.feembed)
            fh, fc = self.fenc_lstm_cell(x, fh, fc)

        # Build Bwd Encode Graph
        self.beh0.value = np.zeros([bsize, self.half_dim])
        self.bec0.value = np.zeros([bsize, self.half_dim])

        bh = self.beh0
        bc = self.bec0
        for idx in range(enc_length):
            in_i = self.input()
            in_i.value = enc_data[:,
                                  enc_length - 1 - idx]  # Get value from batch
            x = Embed(in_i, self.beembed)
            bh, bc = self.benc_lstm_cell(x, bh, bc)

        # Build Decode Graph
        h = Concat(fh, bh)
        c = Concat(fc, bc)

        self.encoded_h = h
        self.encoded_c = c

        for idx in range(dec_length - 1):
            in_i = self.input()
            in_i.value = dec_data[:, idx]
            x = Embed(in_i, self.dembed)
            h, c = self.dec_lstm_cell(x, h, c)
            out_i = SoftMax(Dot(h, self.dv2c))
            outputs.append(out_i)

        self.output(Collect(outputs))
        self.expect(dec_data[:, 1:])
コード例 #8
0
    def build_graph(self, batch):
        data = batch.data[1]
        self.reset()
        # Build Computation Graph according to length
        bsize, length = data.shape

        h, c = self.bow_encode(batch.data[0])

        outputs = []
        for idx in range(length - 1):
            in_i = self.input()
            in_i.value = data[:, idx]  # Get value from batch
            x = Embed(in_i, self.embed)
            h, c = self.lstm_cell(x, h, c)
            out_i = SoftMax(Dot(h, self.v2c))
            outputs.append(out_i)
        self.output(Collect(outputs))
        self.expect(data[:, 1:])
コード例 #9
0
    def build_graph(self, batch):
        self.reset()
        # Build Computation Graph according to length
        bsize, length = batch.data.shape

        self.h0.value = np.zeros([bsize, self.hidden_dim])
        self.c0.value = np.zeros([bsize, self.hidden_dim])

        h = self.h0
        c = self.c0
        outputs = []
        for idx in range(length - 1):
            in_i = self.input()
            in_i.value = batch.data[:, idx]  # Get value from batch
            x = Embed(in_i, self.embed)
            h, c = self.lstm_cell(x, h, c)
            out_i = SoftMax(Dot(h, self.v2c))
            outputs.append(out_i)
        self.output(Collect(outputs))
        self.expect(batch.data[:, 1:])
コード例 #10
0
ファイル: s2s_decoder.py プロジェクト: harperjiang/TTIC31210
def build_graph(batch):
    data = batch.data[1]
    decode_graph.reset()
    # Build Computation Graph according to length
    bsize, length = data.shape

    '''
    Change the function here to switch between encoders
    '''
    h, c = lstm_encode(batch.data[0])

    outputs = []
    for idx in range(length - 1):
        in_i = decode_graph.input()
        in_i.value = data[:, idx]  # Get value from batch
        x = Embed(in_i, decode_graph.embed)
        h, c = decode_graph.lstm_cell(x, h, c)
        out_i = SoftMax(Dot(h, decode_graph.v2c))
        outputs.append(out_i)
    decode_graph.output(Collect(outputs))
    decode_graph.expect(data[:, 1:])
コード例 #11
0
def build_graph(batch):
    data = batch.data[0]
    graph.reset()
    # Build Computation Graph according to length
    bsize, length = data.shape

    graph.h0.value = np.zeros([bsize, hidden_dim])
    graph.c0.value = np.zeros([bsize, hidden_dim])

    h = graph.h0
    c = graph.c0
    outputs = []
    for idx in range(length - 1):
        in_i = graph.input()
        in_i.value = data[:, idx]  # Get value from batch
        x = Embed(in_i, graph.embed)
        h, c = graph.lstm_cell(x, h, c)
        out_i = SoftMax(Dot(h, graph.v2c))
        outputs.append(out_i)
    graph.output(Collect(outputs))
    graph.expect(data[:, 1:])
コード例 #12
0
    def build_graph(self, batch):
        enc_data = batch.data
        self.reset()

        bsize, enc_length = enc_data.shape

        outputs = []

        # Build Encode Graph
        self.h0.value = np.zeros([bsize, self.hidden_dim])
        self.c0.value = np.zeros([bsize, self.hidden_dim])

        h = self.h0
        c = self.c0
        for idx in range(enc_length):
            in_i = self.input()
            in_i.value = enc_data[:, idx]  # Get value from batch
            x = Embed(in_i, self.eembed)
            h, c = self.enc_lstm_cell(x, h, c)
            # out_i = SoftMax(Dot(h, graph.ev2c))
            # outputs.append(out_i)

        self.encoded_h = h
        self.encoded_c = c

        # Build Decode Graph

        decode_in = self.input()
        decode_in.value = np.zeros([bsize])
        decode_embed = Embed(decode_in, self.dembed)
        x = decode_embed
        for idx in range(self.predict_len):
            h, c = self.dec_lstm_cell(x, h, c)
            out_i = ArgMax(SoftMax(Dot(h, self.dv2c)))
            outputs.append(out_i)
            x = Embed(out_i, self.dembed)
        self.output(Collect(outputs))
        self.expect(np.zeros([bsize, self.predict_len]))
コード例 #13
0
    def build_graph(self, batch):
        enc_data = batch.data[0]
        dec_data = batch.data[1]
        self.reset()

        bsize, enc_length = enc_data.shape
        dec_length = dec_data.shape[1]

        outputs = []

        # Build Encode Graph
        self.h0.value = np.zeros([bsize, self.hidden_dim])
        self.c0.value = np.zeros([bsize, self.hidden_dim])

        h = self.h0
        c = self.c0
        for idx in range(enc_length):
            in_i = self.input()
            in_i.value = enc_data[:, idx]  # Get value from batch
            x = Embed(in_i, self.eembed)
            h, c = self.enc_lstm_cell(x, h, c)
            # out_i = SoftMax(Dot(h, graph.ev2c))
            # outputs.append(out_i)

        self.encoded_h = h
        self.encoded_c = c

        # Build Decode Graph
        for idx in range(dec_length - 1):
            in_i = self.input()
            in_i.value = dec_data[:, idx]
            x = Embed(in_i, self.dembed)
            h, c = self.dec_lstm_cell(x, h, c)
            out_i = SoftMax(Dot(h, self.dv2c))
            outputs.append(out_i)

        self.output(Collect(outputs))
        self.expect(dec_data[:, 1:])
コード例 #14
0
    def build_graph(self, batch):
        enc_data = batch.data
        dec_data = batch.expect
        self.reset()
        bsize = 1
        enc_length = enc_data.shape[1]
        dec_length = dec_data.shape[0]

        outputs = []

        hcollect = []
        ccollect = []

        self.feh0.value = np.zeros([bsize, self.half_dim])
        self.fec0.value = np.zeros([bsize, self.half_dim])

        self.beh0.value = np.zeros([bsize, self.half_dim])
        self.bec0.value = np.zeros([bsize, self.half_dim])

        for line_idx in range(self.num_line):
            # Build Fwd Encode Graph

            fh = self.feh0
            fc = self.fec0
            for idx in range(enc_length):
                in_i = self.input()
                in_i.value = enc_data[line_idx,
                                      idx].reshape(1)  # Get value from batch
                x = Embed(in_i, self.feembed)
                fh, fc = self.fenc_lstm_cell(x, fh, fc)

            # Build Bwd Encode Graph
            bh = self.beh0
            bc = self.bec0
            for idx in range(enc_length):
                in_i = self.input()
                in_i.value = enc_data[line_idx, enc_length - 1 - idx].reshape(
                    1)  # Get value from batch
                x = Embed(in_i, self.beembed)
                bh, bc = self.benc_lstm_cell(x, bh, bc)

            h = Concat(fh, bh)
            c = Concat(fc, bc)
            hcollect.append(h)
            ccollect.append(c)

        # Build Decode Graph
        h = Average(Collect(hcollect))
        c = Average(Collect(ccollect))

        self.encoded_h = h
        self.encoded_c = c

        for idx in range(dec_length - 1):
            in_i = self.input()
            in_i.value = dec_data[idx].reshape(1)
            x = Embed(in_i, self.dembed)
            h, c = self.dec_lstm_cell(x, h, c)
            out_i = SoftMax(Dot(h, self.dv2c))
            outputs.append(out_i)

        self.output(Collect(outputs))
        self.expect(dec_data[1:])
コード例 #15
0
    def build_graph(self, batch):
        enc_data = batch.data
        self.reset()

        bsize, enc_length = enc_data.shape

        outputs = []

        fwd_encode_result = [None] * enc_length
        bwd_encode_result = [None] * enc_length

        # Build Fwd Encode Graph
        self.feh0.value = np.zeros([bsize, self.half_dim])
        self.fec0.value = np.zeros([bsize, self.half_dim])

        fh = self.feh0
        fc = self.fec0
        for idx in range(enc_length):
            in_i = self.input()
            in_i.value = enc_data[:, idx]  # Get value from batch
            x = Embed(in_i, self.feembed)
            fh, fc = self.fenc_lstm_cell(x, fh, fc)
            fwd_encode_result[idx] = fh

        # Build Bwd Encode Graph
        self.beh0.value = np.zeros([bsize, self.half_dim])
        self.bec0.value = np.zeros([bsize, self.half_dim])

        bh = self.beh0
        bc = self.bec0
        for idx in range(enc_length):
            in_i = self.input()
            in_i.value = enc_data[:,
                                  enc_length - 1 - idx]  # Get value from batch
            x = Embed(in_i, self.beembed)
            bh, bc = self.benc_lstm_cell(x, bh, bc)
            bwd_encode_result[enc_length - 1 - idx] = bh

        # Build Decode Graph
        h = Concat(fh, bh)
        # c = Concat(fc, bc)

        self.encoded_h = h
        # self.encoded_c = c

        encode_result = []
        for idx in range(enc_length):
            encode_result.append(
                Concat(fwd_encode_result[idx], bwd_encode_result[idx]))
        encode_state = Collect(encode_result)

        init = self.input()
        init.value = np.zeros([bsize])
        x = Embed(init, self.dembed)
        for idx in range(self.predict_len):
            c = Attention(encode_state, h)

            h, c = self.dec_lstm_cell(x, h, c)
            out_i = ArgMax(SoftMax(Dot(h, self.dv2c)))
            outputs.append(out_i)
            x = Embed(out_i, self.dembed)
        self.output(Collect(outputs))
        self.expect(np.zeros([bsize, self.predict_len]))