Esempio n. 1
0
    def twoDims_batchnormal_forward(self, A_pre, mode='train', momentum=0.9):
        if mode == 'train':
            if 'beta' not in self.parameters:
                self.parameters['beta'] = p.zeros((A_pre.shape[-1],))
                self.parameters['gamma'] = p.ones_like(self.parameters['beta'])
            mean = p.mean(A_pre, axis=0)
            xmu = A_pre - mean
            var = p.mean(xmu ** 2, axis=0)

            if self.running_mean is None:
                self.running_mean = p.zeros(A_pre.shape[-1], dtype=A_pre.dtype)
                self.running_var = p.zeros(A_pre.shape[-1], dtype=A_pre.dtype)
            self.running_mean = momentum * self.running_mean + (1 - momentum) * mean
            self.running_var = momentum * self.running_var + (1 - momentum) * var

            self.sqrtvar = p.sqrt(var + self.epsilon)
            ivar = 1. / self.sqrtvar
            xhat = xmu * ivar
            gammax = self.parameters['gamma'] * xhat
            out = gammax + self.parameters['beta']
            self.caches = (xhat, xmu, ivar, self.sqrtvar)
        elif mode == 'test':
            scale = self.parameters['gamma'] / self.sqrtvar
            out = A_pre * scale + (self.parameters['beta'] - self.running_mean * scale)
        else:
            raise ValueError
        return out
Esempio n. 2
0
    def forward(self, x, Y=None, mode='train'):
        self.x = x
        N, T = x.shape
        out = p.zeros((N, T, self.word_dim))

        for i in range(N):
            for j in range(T):
                out[i, j] = self.parameters['W'][x[i, j]]
        return out
Esempio n. 3
0
    def forward(self, x, a0=None, mode='train'):
        """
        :param x: 二维矩阵 [m, n_x]
        :param a0:
        :param mode:
        :return:
        """
        self.init_params(x.shape[-1])
        m, time_steps, n_x = x.shape
        n_a = int(self.parameters['b'].shape[0] / 4)
        a = p.zeros([m, time_steps, n_a])
        a_prev = a0 if a0 is not None else p.zeros([m, n_a])
        c_prev = p.zeros([m, n_a])

        for t in range(time_steps):
            xt = x[:, t, :]
            a_next, c_next, self.cache[t] = self.lstm_step_forward(xt, a_prev, c_prev)
            a_prev = a_next
            c_prev = c_next
            a[:, t, :] = a_prev
        return a
Esempio n. 4
0
    def backward(self, dout):
        m, time_steps, n_a = dout.shape
        z_i, z_f, z_o, z_g, z_t, c_prev, a_prev, x = self.cache[time_steps - 1]
        n_x = x.shape[1]

        da_prev = p.zeros((m, n_a))
        dc_prev = p.zeros((m, n_a))
        dx = p.zeros((m, time_steps, n_x))
        dWx = p.zeros((n_x, 4 * n_a))
        dWa = p.zeros((n_a, 4 * n_a))
        db = p.zeros((4 * n_a,))

        for t in range(time_steps):
            t = time_steps - 1 - t
            da_next = dout[:, t, :] + da_prev
            dc_next = dc_prev
            dx[:, t, :], da_prev, dc_prev, dWxt, dWat, dbt = self.lstm_step_backward(da_next, dc_next, self.cache[t])
            dWx, dWa, db = dWx + dWxt, dWa + dWat, db + dbt

        da0 = da_prev
        self.gradients['a'] = da0
        self.gradients['x'] = dx
        self.gradients['Wx'] = 1. / m * dWx
        self.gradients['Wa'] = 1. / m * dWa
        self.gradients['b'] = 1. / m * db
        return dx, da0
Esempio n. 5
0
    def backward(self, dh):
        N, T, H = dh.shape
        _, _, D = self.x.shape

        a_next = self.h[:, T - 1, :]

        da_prev = p.zeros((N, H))
        dx = p.zeros((N, T, D))
        dh0 = p.zeros((N, H))
        dWx = p.zeros((D, H))
        dWh = p.zeros((H, H))
        db = p.zeros((H,))

        for t in range(T):
            t = T - 1 - t
            xt = self.x[:, t, :]

            if t == 0:
                a_prev = self.h0
            else:
                a_prev = self.h[:, t - 1, :]

            step_cache = (xt, a_prev, a_next)
            a_next = a_prev
            da_next = dh[:, t, :] + da_prev
            dx[:, t, :], da_prev, dWxt, dWht, dbt = self.rnn_step_backward(da_next, step_cache)
            dWx, dWh, db = dWx + dWxt, dWh + dWht, db + dbt
        self.gradients['Wxa'] = 1. / N * dWx
        self.gradients['Waa'] = 1. / N * dWh
        self.gradients['ba'] = 1. / N * db
        dh0 = da_prev
        return dx, dh0
Esempio n. 6
0
    def forward(self, x, h0=None, mode='train'):
        self.x = x
        N, T, D = x.shape
        self.init_params(D)

        self.h = p.zeros((N, T, self.unit_number))
        a_prev = h0
        self.h0 = h0
        for t in range(T):
            xt = x[:, t, :]
            a_next = self.rnn_step_forward(xt, a_prev)
            a_prev = a_next
            if a_prev.ndim == 3:
                a_prev = a_prev.reshape(1, -1)
            self.h[:, t, :] = a_prev
        return self.h
Esempio n. 7
0
def col2im_indices_cpu(cols,
                       x_shape,
                       field_height=3,
                       field_width=3,
                       padding=1,
                       stride=1):
    N, C, H, W = x_shape
    H_padded, W_padded = H + 2 * padding, W + 2 * padding
    x_padded = p.zeros((N, C, H_padded, H_padded), dtype=cols.dtype)
    k, i, j = get_im2col_indices(x_shape, field_height, field_width, padding,
                                 stride)
    cols_reshaped = cols.reshape(C * field_height * field_width, -1, N)
    cols_reshaped = cols_reshaped.transpose(2, 0, 1)
    p.add.at(x_padded, (slice(None), k, i, j), cols_reshaped)
    if padding == 0:
        return x_padded
    return x_padded[:, :, padding:-padding, padding:-padding]
Esempio n. 8
0
def ReluGrad(Z):
    res = p.zeros(Z.shape)
    res[Z > 0] = 1
    return res
Esempio n. 9
0
 def init_params(self, dim):
     if 'W' not in self.parameters:
         shape = (dim, self.unit_number)
         self.parameters['W'] = init.get_init(self.activation, shape, self.init_option)
     if 'b' not in self.parameters:
         self.parameters['b'] = p.zeros(self.unit_number)
Esempio n. 10
0
 def init_params(self, A_pre):
     if 'W' not in self.parameters:
         W_shape = (self.filter_count, A_pre.shape[1], self.filter_shape[0], self.filter_shape[1])
         self.parameters['W'] = init.get_init(self.activation, W_shape, self.init_option)
     if 'b' not in self.parameters:
         self.parameters['b'] = p.zeros(self.filter_count)