Пример #1
0
    def forward(self, inputs):
        # inputs: tuple of input tensor, contain raw input data.
        if len(inputs) == 1:
            inputs, = inputs
            n_samples = inputs[0].data.shape[0]

            # initial state, be initialized or passed.
            fw_state = Tensor(np.random.randn(
                n_samples, self.n_time_step, self.hidden_dim))
            bw_state = Tensor(np.random.randn(
                n_samples, self.n_time_step, self.hidden_dim))

        elif len(inputs) == 3:
            inputs, fw_state, bw_state = inputs

        fw_inputs = tuple(list(inputs) + [fw_state])

        inputs.reverse()
        bw_inputs = tuple(list(inputs) + [bw_state])

        fw_outputs, fw_state = self.rnn_module1(fw_inputs)
        bw_outputs, bw_state = self.rnn_module2(bw_inputs)

        outputs = Concat()(fw_outputs, bw_outputs)

        return outputs, fw_state, bw_state
Пример #2
0
def test_dot():
    a = Tensor(np.random.randn(2, 3, 4), requires_grad=True, is_leaf=True)
    b = Tensor(np.random.randn(4, 5), requires_grad=True, is_leaf=True)
    c, = dot_fn.Dot()(a, b)
    print(c.data.shape)
    c.backward()
    print(a.grad.data)
    print(b.grad.data)
Пример #3
0
def test_sum():
    a = Tensor(np.random.randn(2, 3, 4), requires_grad=True, is_leaf=True)
    b, = sum_fn.BatchSum()(a)
    c = Tensor(np.random.randn(2, ), requires_grad=True, is_leaf=True)
    d, = add_fn.Add()(b, c)
    e, = add_fn.Add()(d, c)
    e.backward()
    print(a.grad.data)
    print(c.grad.data)
Пример #4
0
    def batch_input_generator(self, X, y):
        # In the future, from file or other source.
        n_samples = X.data.shape[0]
        batch_size = self.batch_size
        total_batch = int(n_samples / batch_size)

        for batch_idx in range(total_batch):
            yield Tensor(X.data[batch_idx*batch_size:(batch_idx+1)*batch_size]), \
                  Tensor(y.data[batch_idx*batch_size:(batch_idx+1)*batch_size])
Пример #5
0
    def backward(self, grads):
        X_requires_grad, W_requires_grad, bias_requires_grad, n_samples, X, W = \
            self.saved_context

        X_grad = None
        W_grad = None
        bias_grad = None

        if X_requires_grad:
            # W.data: [n_in_feature, n_out_features]
            # X.data: [n_samples, n_in_features]
            X_grad_data = np.repeat(np.sum(W.data, axis=1), n_samples).reshape(
                (n_samples, self.n_in_features))

            if isinstance(grads, tuple):
                y_pred_grad, = grads
                # y_pred_grad_data: [n_samples, n_out_features]
                X_grad_data *= np.repeat(np.sum(y_pred_grad.data, axis=1), self.n_in_features).\
                    reshape((n_samples, self.n_in_features))

            X_grad = Tensor(X_grad_data)

        if W_requires_grad:
            # X.data: [n_samples, n_in_features]
            # W.data: [n_in_features, n_out_features]
            W_grad_data = np.repeat(np.sum(X.data, axis=0),
                                    self.n_out_features).reshape(
                                        (self.n_in_features,
                                         self.n_out_features))

            if isinstance(grads, tuple):
                y_pred_grad, = grads
                # y_pred_grad.data: [n_samples, n_out_features]
                W_grad_data *= np.sum(y_pred_grad.data, axis=0).reshape(
                    (1, self.n_out_features))

            W_grad = Tensor(W_grad_data)

        if bias_requires_grad:
            # bias_grad_data: [n_out_features, ]
            bias_grad_data = np.ones((self.n_out_features, ))

            if isinstance(y_pred_grad, Tensor):
                # y_pred_grad.data: [n_samples, n_out_features]
                bias_grad_data *= np.sum(y_pred_grad.data, axis=0)

            bias_grad = Tensor(bias_grad_data)

        #self.saved_context = None

        return X_grad, W_grad, bias_grad
Пример #6
0
    def backward(self, grads):
        Y_pred_grad, = grads

        X_data_shape, A_grad_data = self.saved_context
        B_grad_data = 1. - A_grad_data

        if isinstance(Y_pred_grad, Tensor):
            A_grad_data *= Y_pred_grad.data
            B_grad_data *= Y_pred_grad.data

        A_grad = Tensor(A_grad_data)
        B_grad = Tensor(B_grad_data)

        return A_grad, B_grad
Пример #7
0
def test_add():
    a = Tensor(np.array([[1, 2, 3, 4]]), requires_grad=True, is_leaf=True)
    b = Tensor(np.array([[2, 3, 5, 8]]), requires_grad=True, is_leaf=True)

    c, = add_fn.Add()(a, b)
    print(c.data.shape)

    d = Tensor(np.random.randn(1, 4))

    loss, = MSELoss()(c, d)
    print(loss.data)
    loss.backward()
    print(a.grad.data)
    print(b.grad.data)
    """
Пример #8
0
    def forward(self, inputs):
        """
        Input: X, W, bias; tuple of tensor
            X: [n_samples, n_in_features]
            W: [n_in_features, n_out_features]
            bias: [n_output_features, ]

        Output: y_pred, tensor
            y_pred: [n_samples, n_out_features]

        Use broadcast of numpy when add bias
        y_pred = dot(X, W) + bias (if needed)
        """
        X, W, bias = inputs

        y_pred_data = np.dot(X.data, W.data)

        if self.is_bias:
            y_pred_data += bias.data

        y_pred = Tensor(y_pred_data)

        n_samples = X.data.shape[0]

        bias_requires_grad = bias.requires_grad if isinstance(
            bias, Tensor) else False

        self.saved_context = X.requires_grad, W.requires_grad, bias_requires_grad, \
                             n_samples, X, W

        return y_pred,
Пример #9
0
    def forward(self, inputs):
        X, = inputs
        y_pred = Tensor(X.data.reshape(self.target_shape))

        self.saved_context = X.data.shape

        return y_pred,
Пример #10
0
    def backward(self, grads):
        #X_data = self.saved_context
        y_pred_data = self.saved_context
        """
        # tanh
        f(x) = tanh
        f'(x) = 1 - tanh ** 2
        
        # sigmoid
        f(x) = tanh(x) = 2 * sigmoid(2x) - 1
        f'(x) = 4 * sigmoid(2x) * (1 - sigmoid(2x))
        """
        #sig_2x = simgoid(2 * X_data)
        #X_grad_data = 4 * sig_2x * (1. - sig_2x)

        X_grad_data = 1 - y_pred_data**2

        if isinstance(grads, tuple):
            y_pred_grad, = grads
            if isinstance(y_pred_grad, Tensor):
                X_grad_data *= y_pred_grad.data

        X_grad = Tensor(X_grad_data)

        return X_grad,
Пример #11
0
    def forward(self, inputs):
        X, = inputs
        y_pred = Tensor(np.sum(X.data))

        self.saved_context = X.data.shape

        return y_pred,
Пример #12
0
def test_model():
    n_samples = 20
    n_input_channel = 3
    input_width = 28
    input_height = 28
    n_output_channel = 4
    stride = 1
    padding = 0
    kernel_size = 5

    X = Tensor(
        np.random.randn(n_samples, n_input_channel, input_width, input_height))

    output_width = int((input_width - kernel_size + 2 * padding) / stride + 1)
    output_height = int((input_height - kernel_size + 2 * padding) / stride +
                        1)
    y = Tensor(
        np.random.randn(n_samples, n_output_channel, output_width,
                        output_height))

    conv2d = Conv2D(n_input_channel=n_input_channel,
                    n_output_channel=n_output_channel,
                    kernel_size=kernel_size,
                    stride=stride,
                    padding=padding,
                    input_width=input_width,
                    input_height=input_height,
                    is_bias=True,
                    initializer=None,
                    activation=None)

    model = Model()
    model.add(conv2d)

    n_epoch = 10
    batch_size = 2
    verbose = 0
    loss_fn = MSELoss()
    optimizer = SGD(lr=1e-5)

    model.compile(n_epoch=n_epoch,
                  batch_size=batch_size,
                  verbose=verbose,
                  loss_fn=loss_fn,
                  optimizer=optimizer)
    model.summary()
    model.fit(X, y)
Пример #13
0
    def forward(self, inputs):
        A, B = inputs
        Y_pred_data = np.maximum(A.data, B.data)
        Y_pred = Tensor(Y_pred_data)

        self.saved_context = A.data.shape, 1. * (A == Y_pred_data)

        return Y_pred,
Пример #14
0
    def forward(self, inputs):
        X, = inputs

        y_pred = Tensor(X.data.transpose(self.axes))

        self.saved_context = X.data.shape

        return y_pred,
Пример #15
0
def test_dropout():

    a = Tensor(np.random.randn(2, 3), requires_grad=True, is_leaf=True)
    b, = Dropout(dropout_ratio=0.2, is_training_mode=True)(a)
    print(a.data)
    print(b.data)
    b.backward()
    print(a.grad.data)
Пример #16
0
    def forward(self, inputs):
        X, = inputs
        #y_pred = Tensor(np.clip(X.data, 0, np.inf))
        y_pred = Tensor(X.data * (X.data > 0))

        self.saved_context = X

        return y_pred,
Пример #17
0
def test_transpose():

    a = Tensor(np.random.randn(2, 3, 4), requires_grad=True, is_leaf=True)
    axes = (1, 2, 0)
    b, = transpose.Transpose(axes=axes)(a)
    print(b.data.shape)
    b.backward()
    print(a.grad.data.shape)
Пример #18
0
    def backward(self, grads):
        a_grad_shape, b_grad_shape = self.saved_context

        a_grad_data = np.ones(a_grad_shape)
        b_grad_data = np.ones(b_grad_shape)

        if isinstance(grads, tuple):
            c_grad, = grads
            c_grad_data = c_grad.data
            a_grad_data *= c_grad_data
            b_grad_data *= c_grad_data

        a_grad = Tensor(a_grad_data)
        b_grad = Tensor(b_grad_data)

        #self.saved_context = None

        return a_grad, b_grad
Пример #19
0
    def forward(self, inputs):
        X, = inputs

        y_pred_data = simgoid(X.data)
        y_pred = Tensor(y_pred_data)

        self.saved_context = y_pred_data

        return y_pred,
Пример #20
0
    def forward(self, inputs):
        X,  = inputs

        y_pred_data = np.repeat(X.data, self.repeat_times)
        y_pred = Tensor(y_pred_data)

        self.saved_context = X.data.shape

        return y_pred,
Пример #21
0
def test_get_sub_tensor():

    # raw_shape = (2, 3, 4)
    coord_tuple = ((0, 1), (1, 2), (1, 3))
    a = Tensor(np.random.randn(2, 3, 4), requires_grad=True, is_leaf=True)
    b, = get_sub_tensor.GetSubTensor(coord_tuple=coord_tuple)(a)
    print(b.data)
    b.backward()
    print(a.grad.data)
Пример #22
0
    def forward(self, inputs):
        X, = inputs

        y_pred_data = tanh(X.data)
        y_pred = Tensor(y_pred_data)

        #self.saved_context = X.data
        self.saved_context = y_pred_data

        return y_pred,
Пример #23
0
    def forward(self, inputs):
        # X: shape
        X, = inputs

        y_pred_data = X.data[self.coord_tuple]
        y_pred = Tensor(y_pred_data)

        self.saved_context = X.data.shape

        return y_pred,
Пример #24
0
    def forward(self, inputs):

        a, b = inputs

        c_data = a.data + b.data
        c = Tensor(c_data)

        self.saved_context = a.data.shape, b.data.shape

        return c,
Пример #25
0
def test_concat():

    a = Tensor(np.random.randn(2, 3, 4), requires_grad=True, is_leaf=True)
    b = Tensor(np.random.randn(2, 3, 4), requires_grad=True, is_leaf=True)
    c = Tensor(np.random.randn(2, 3, 4), requires_grad=True, is_leaf=True)

    arr = tuple([a, b, c])
    d, = concat.Concat()(*arr)

    #print(d.data.shape)

    y = Tensor(np.random.randn(3, 2, 3, 4))
    loss, = MSELoss()(d, y)
    print(loss.data)

    loss.backward()

    print(a.grad.data)
    """
Пример #26
0
    def backward(self, grads):
        X_data_shape = self.saved_context
        X_grad_data = np.ones(X_data_shape)

        if isinstance(grads, tuple):
            y_pred_grad, = grads
            X_grad_data *= y_pred_grad.data

        X_grad = Tensor(X_grad_data)

        return X_grad,
Пример #27
0
    def forward(self, inputs):
        # a: [n_samples, shape]
        # b: [shape]
        a, b = inputs

        c_data = a.data + b.data
        c = Tensor(c_data)

        self.saved_context = a.data.shape, b.data.shape

        return c,
Пример #28
0
def test_reshape():
    raw_shape = (2, 3, 4)
    target_shape = (2, 4, 3)

    a = Tensor(np.arange(24).reshape(raw_shape),
               requires_grad=True,
               is_leaf=True)
    b, = reshape.Reshape(target_shape=target_shape)(a)
    print(b.data)
    b.backward()
    print(a.grad.data)
Пример #29
0
def test_conv2d():

    n_samples = 2
    n_input_channel = 3
    input_width = 28
    input_height = 28
    n_output_channel = 4
    stride = 1
    padding = 0
    kernel_size = 5

    X = Tensor(
        np.random.randn(n_samples, n_input_channel, input_width, input_height))

    conv2d = Conv2D(n_input_channel=n_input_channel,
                    n_output_channel=n_output_channel,
                    kernel_size=kernel_size,
                    stride=stride,
                    padding=padding,
                    input_width=input_width,
                    input_height=input_height,
                    is_bias=True,
                    initializer=None,
                    activation=None)

    y_pred, = conv2d(X)

    #print(y_pred.data)
    #y_pred.backward()

    output_width = int((input_width - kernel_size + 2 * padding) / stride + 1)
    output_height = int((input_height - kernel_size + 2 * padding) / stride +
                        1)
    y = Tensor(
        np.random.randn(n_samples, n_output_channel, output_width,
                        output_height))
    loss, = MSELoss()(y_pred, y)
    loss.backward()
    print(conv2d.W.grad.data)
    print(conv2d.bias.grad.data)
    print(loss.data)
Пример #30
0
    def backward(self, grads):
        X = self.saved_context
        X_grad_data = 1. * (X.data > 0)

        if isinstance(grads, tuple):
            y_pred_grad, = grads
            X_grad_data *= y_pred_grad.data
        X_grad = Tensor(X_grad_data)

        #self.saved_context = None

        return X_grad,