def gauss_bernoulli_params(m, y):
    width = [m, 2 * m, 2 * m, m, 1]
    y_mixws, y_means, y_stdds, y_corrs, y_e = split_axis_by_widths(y, width)
    y_mixws = F.softmax(y_mixws)
    y_means0, y_means1 = split_axis_by_widths(y_means, 2)
    y_stdds0, y_stdds1 = split_axis_by_widths(F.exp(y_stdds), 2)
    y_corrs = F.tanh(y_corrs)
    return (y_mixws, y_means0, y_means1, y_stdds0, y_stdds1, y_corrs), y_e
    def forward_one_step(self, x_data, c_data, y_data, state, train=True):
        x = chainer.Variable(x_data, volatile=not train)
        t = chainer.Variable(y_data, volatile=not train)
        c = chainer.Variable(c_data, volatile=not train)

        h1_in = self.l1_first(x) + self.l1_recur(state['h1']) + self.l1_w(state['w'])
        c1, h1 = F.lstm(state['c1'], h1_in)

        # soft window
        ws = F.exp(self.lw(h1))
        w_mixws, w_gains, w_means = split_axis_by_widths(ws, 3)
        w_means += state['w_means']
        w = self.forward_window(w_mixws, w_gains, w_means, c)

        h2_in = self.l2_first(x) + self.l2_recur(state['h2']) + self.l1_w(w) + self.l2_input(h1)
        c2, h2 = F.lstm(state['c2'], h2_in)

        h3_in = self.l3_first(x) + self.l3_recur(state['h3']) + self.l1_w(w) + self.l3_input(h2)
        c3, h3 = F.lstm(state['c3'], h3_in)

        y = self.l4(F.concat(h1, h2, h3))

        state = {'c1': c1, 'h1': h1, 'c2': c2, 'h2': h2, 'c3': c3, 'h3': h3,
                 'w': w, 'w_means': w_means}
        return state, loss_func(self.noutput_gauss, y, t)
Esempio n. 3
0
 def check(self, widths):
     x_size = sum(self.ws)
     x = numpy.arange(self.mini_batch * x_size,
                      dtype=numpy.float32).reshape(self.mini_batch, x_size)
     x = chainer.Variable(self.context(x))
     y = split_axis_by_widths(x, widths)
     z = concat(y)
     assert_allclose(x.data, z.data)
    def forward_one_step(self, hidden_state, lstm_cells, x_data, t_x_data, t_e_data, train=True):
        x = chainer.Variable(x_data, volatile=not train)
        t_x = chainer.Variable(t_x_data, volatile=not train)
        t_e = chainer.Variable(t_e_data, volatile=not train)

        gps, y_e, hidden_state, lstm_cells = self.bottle_neck(hidden_state, lstm_cells, x_data, train)
        t_x = split_axis_by_widths(t_x, [1, 1])
        gi, e = (gps + tuple(t_x)), (y_e, t_e)
        p = gaussian_mixture_2d_ref(*gi)
        loss = concat_losses(p, e)

        return hidden_state, lstm_cells, loss
Esempio n. 5
0
    def forward_one_step(self, x_data, c_data, y_data, state, train=True):
        x = chainer.Variable(x_data, volatile=not train)
        t = chainer.Variable(y_data, volatile=not train)
        c = chainer.Variable(c_data, volatile=not train)

        h1_in = self.l1_first(x) + self.l1_recur(state["h1"]) + self.l1_w(state["w"])
        c1, h1 = F.lstm(state["c1"], h1_in)

        # soft window
        ws = F.exp(self.lw(h1))
        w_mixws, w_gains, w_means = split_axis_by_widths(ws, 3)
        w_means += state["w_means"]
        w = self.forward_window(w_mixws, w_gains, w_means, c)

        h2_in = self.l2_first(x) + self.l2_recur(state["h2"]) + self.l1_w(w) + self.l2_input(h1)
        c2, h2 = F.lstm(state["c2"], h2_in)

        h3_in = self.l3_first(x) + self.l3_recur(state["h3"]) + self.l1_w(w) + self.l3_input(h2)
        c3, h3 = F.lstm(state["c3"], h3_in)

        y = self.l4(F.concat(h1, h2, h3))

        state = {"c1": c1, "h1": h1, "c2": c2, "h2": h2, "c3": c3, "h3": h3, "w": w, "w_means": w_means}
        return state, loss_func(self.noutput_gauss, y, t)
def split_args(m, y, t_x, t_e):
    gps, y_e = gauss_bernoulli_params(m, y)
    t_x = split_axis_by_widths(t_x, [1, 1])
    return gps + t_x, (y_e, t_e)