Esempio n. 1
0
    def test_conv_backward_naive(self):
        print("\n======== Test3LayerConvNet.test_conv_backward_naive:")
        X = np.random.randn(4, 3, 5, 5)
        W = np.random.randn(2, 3, 3, 3)
        b = np.random.randn(2,)
        dout = np.random.randn(4, 2, 5, 5)
        conv_param = {'stride': 1, 'pad': 1}

        dx_num = check_gradient.eval_numerical_gradient_array(lambda x: conv_layers.conv_forward_naive(X, W, b, conv_param)[0], X, dout)
        dw_num = check_gradient.eval_numerical_gradient_array(lambda w: conv_layers.conv_forward_naive(X, W, b, conv_param)[0], W, dout)
        db_num = check_gradient.eval_numerical_gradient_array(lambda b: conv_layers.conv_forward_naive(X, W, b, conv_param)[0], b, dout)

        out, cache = conv_layers.conv_forward_naive(X, W, b, conv_param)
        dx, dw, db = conv_layers.conv_backward_naive(dout, cache)

        dx_error = error.rel_error(dx, dx_num)
        dw_error = error.rel_error(dw, dw_num)
        db_error = error.rel_error(db, db_num)

        print("dx_error : %.9f" % dx_error)
        print("dw_error : %.9f" % dw_error)
        print("db_error : %.9f" % db_error)

        self.assertLessEqual(dx_error, self.eps)
        self.assertLessEqual(dw_error, self.eps)
        self.assertLessEqual(db_error, self.eps)

        print("======== Test3LayerConvNet.test_conv_backward_naive: <END> ")
Esempio n. 2
0
    def test_affine_layer_backward(self):
        print("\n======== TestLayers.test_affine_layer_backward:")

        x = np.random.randn(10, 2, 3)
        w = np.random.randn(6, 5)
        b = np.random.randn(5)
        dout = np.random.randn(10, 5)

        dx_num = check_gradient.eval_numerical_gradient_array(
            lambda x: layers.affine_forward(x, w, b)[0], x, dout)
        dw_num = check_gradient.eval_numerical_gradient_array(
            lambda w: layers.affine_forward(x, w, b)[0], w, dout)
        db_num = check_gradient.eval_numerical_gradient_array(
            lambda b: layers.affine_forward(x, w, b)[0], b, dout)

        _, cache = layers.affine_forward(x, w, b)
        dx, dw, db = layers.affine_backward(dout, cache)

        dx_diff = error.rel_error(dx_num, dx)
        dw_diff = error.rel_error(dw_num, dw)
        db_diff = error.rel_error(db_num, db)

        print("dx error : %.9f" % dx_diff)
        print("dw error : %.9f" % dw_diff)
        print("db error : %.9f" % db_diff)

        # NOTE : occasionally we may randomly get a value greater than self.eps
        # here... I don't think its worth re-writing this test such that it can
        # pass every time, rather it might be better
        self.assertLessEqual(dx_diff, self.eps)
        self.assertLessEqual(dw_diff, self.eps)
        self.assertLessEqual(db_diff, self.eps)

        print("======== TestLayers.test_affine_layer_backward: <END> ")
Esempio n. 3
0
    def test_step_backward(self):
        print("\n======== TestCaptioningRNN.test_step_forward:")

        N = 4
        D = 5
        H = 6
        X = np.random.randn(N, D)
        h = np.random.randn(N, H)
        Wx = np.random.randn(D, H)
        Wh = np.random.randn(H, H)
        b = np.random.randn(H)

        out, cache = rnn_layers.rnn_step_forward(X, h, Wx, Wh, b)
        dnext_h = np.random.randn(*out.shape)

        fx = lambda x: rnn_layers.rnn_step_forward(X, h, Wx, Wh, b)[0]
        fh = lambda prev_h: rnn_layers.rnn_step_forward(X, h, Wx, Wh, b)[0]
        fWx = lambda Wx: rnn_layers.rnn_step_forward(X, h, Wx, Wh, b)[0]
        fWh = lambda Wh: rnn_layers.rnn_step_forward(X, h, Wx, Wh, b)[0]
        fb = lambda b: rnn_layers.rnn_step_forward(X, h, Wx, Wh, b)[0]

        dx_num = check_gradient.eval_numerical_gradient_array(fx, X, dnext_h)
        dprev_h_num = check_gradient.eval_numerical_gradient_array(
            fh, h, dnext_h)
        dWx_num = check_gradient.eval_numerical_gradient_array(
            fWx, Wx, dnext_h)
        dWh_num = check_gradient.eval_numerical_gradient_array(
            fWh, Wh, dnext_h)
        db_num = check_gradient.eval_numerical_gradient_array(fb, b, dnext_h)

        dx, dprev_h, dWx, dWh, db = rnn_layers.rnn_step_backward(
            dnext_h, cache)

        dx_err = error.rel_error(dx, dx_num)
        dprev_h_err = error.rel_error(dprev_h, dprev_h_num)
        dwx_err = error.rel_error(dWx, dWx_num)
        dwh_err = error.rel_error(dWh, dWh_num)
        db_err = error.rel_error(db, db_num)

        print("dx_err : %f" % dx_err)
        print("dprev_h_err : %f" % dprev_h_err)
        print("dwx_err : %f" % dwx_err)
        print("dwh_err : %f" % dwh_err)
        print("db_err : %f" % db_err)

        self.assertLessEqual(dx_err, self.eps)
        self.assertLessEqual(dprev_h_err, self.eps)
        #self.assertLessEqual(dwx_err, self.eps)
        self.assertLessEqual(dwh_err, self.eps)
        self.assertLessEqual(db_err, self.eps)

        print("======== TestCaptioningRNN.test_step_forward: <END> ")
Esempio n. 4
0
    def test_affine_backward(self):
        print("======== TestAffineLayerObject.test_affine_backward:")
        N = 4
        D = 8
        affine_layer = layer_objects.AffineLayer(self.weight_scale,
                                                 self.weight_init, N, D)

        print(affine_layer)
        self.assertEqual(affine_layer.W.shape[0], N)
        self.assertEqual(affine_layer.W.shape[1], D)

        print('Computing affine forward pass')
        X = np.linspace(-0.5, 0.5, num=N * D).reshape(N, D)
        print('X shape : %s' % (str(X.shape)))
        h = affine_layer.forward(X)
        print('forward activation shape: %s' % str(h.shape))
        print('Computing affine backward pass')
        #affine_layer.backward()
        dz = np.random.randn(*X.shape)
        print('Gradient shape : %s' % str(dz.shape))
        dx = affine_layer.backward(dz)
        #fx = lambda x: affine_layer.backward(dz)[0]
        dx_num = check_gradient.eval_numerical_gradient_array(
            lambda x: affine_layer.backward(dz)[0], X, dz)
        dx_err = error.rel_error(dx, dx_num)

        print('dx error: %f' % dx_err)
        self.assertLessEqual(dx_err, self.eps)

        print("======== TestAffineLayerObject.test_affine_backward: <END> ")
Esempio n. 5
0
    def test_gradient(self):
        x = np.random.randn(10, 2, 3)
        w = np.random.randn(6, 5)
        b = np.random.randn(5)
        dout = np.random.randn(10, 5)

        dx_num = check_gradient.eval_numerical_gradient_array(
            lambda x: layers.affine_forward(x, w, b)[0], x, dout)
        dw_num = check_gradient.eval_numerical_gradient_array(
            lambda w: layers.affine_forward(x, w, b)[0], w, dout)
        db_num = check_gradient.eval_numerical_gradient_array(
            lambda b: layers.affine_forward(x, w, b)[0], b, dout)

        _, cache = layers.affine_forward(x, w, b)
        dx, dw, db = layers.affine_backward(dout, cache)

        print("dx error : %.6f " % error.rel_error(dx_num, dx))
        print("dw error : %.6f " % error.rel_error(dw_num, dw))
        print("db error : %.6f " % error.rel_error(db_num, db))
Esempio n. 6
0
    def test_temporal_affine_forward(self):
        print("\n======== TestCaptioningRNN.test_temporal_affine_forward:")

        N = 2
        T = 3
        D = 4
        M = 5

        X = np.random.randn(N, T, D)
        W = np.random.randn(D, M)
        b = np.random.randn(M)

        out, cache = rnn_layers.temporal_affine_forward(X, W, b)
        dout = np.random.randn(*out.shape)
        # Forward pass lambda functions
        fx = lambda x: rnn_layers.temporal_affine_forward(X, W, b)[0]
        fw = lambda w: rnn_layers.temporal_affine_forward(X, W, b)[0]
        fb = lambda b: rnn_layers.temporal_affine_forward(X, W, b)[0]

        dx_num = check_gradient.eval_numerical_gradient_array(fx, X, dout)
        dw_num = check_gradient.eval_numerical_gradient_array(fw, W, dout)
        db_num = check_gradient.eval_numerical_gradient_array(fb, b, dout)
        dx, dw, db = rnn_layers.temporal_affine_backward(dout, cache)
        # Compute errors
        dx_err = error.rel_error(dx_num, dx)
        dw_err = error.rel_error(dw_num, dw)
        db_err = error.rel_error(db_num, db)

        self.assertLessEqual(dx_err, self.eps)
        self.assertLessEqual(dw_err, self.eps)
        self.assertLessEqual(db_err, self.eps)

        print('dx_err : %f' % dx_err)
        print('dw_err : %f' % dw_err)
        print('db_err : %f' % db_err)

        print(
            "======== TestCaptioningRNN.test_temporal_affine_forward: <END> ")
Esempio n. 7
0
    def test_batchnorm_backward(self):
        print("\n======== TestLayersBatchnorm.test_batchnorm_backward:")

        N = 4
        D = 5
        x = 5 * np.random.randn(N, D) + 12
        gamma = np.random.randn(D)
        beta = np.random.randn(D)
        dout = np.random.randn(N, D)

        bn_param = {'mode': 'train'}

        fx = lambda x: layers.batchnorm_forward(x, gamma, beta, bn_param)[0]
        fg = lambda a: layers.batchnorm_forward(x, gamma, beta, bn_param)[0]
        fb = lambda b: layers.batchnorm_forward(x, gamma, beta, bn_param)[0]

        dx_num = check_gradient.eval_numerical_gradient_array(fx, x, dout)
        da_num = check_gradient.eval_numerical_gradient_array(fg, gamma, dout)
        db_num = check_gradient.eval_numerical_gradient_array(fb, beta, dout)

        _, cache = layers.batchnorm_forward(x, gamma, beta, bn_param)
        dx, dgamma, dbeta = layers.batchnorm_backward(dout, cache)

        dx_error = error.rel_error(dx, dx_num)
        dgamma_error = error.rel_error(dgamma, da_num)
        dbeta_error = error.rel_error(dbeta, db_num)

        print("dx_error : %f" % dx_error)
        print("dgamma_error : %f" % dgamma_error)
        print("dbeta_error : %f" % dbeta_error)

        self.assertLessEqual(dx_error, self.eps)
        self.assertLessEqual(dgamma_error, self.eps)
        self.assertLessEqual(dbeta_error, self.eps)

        print("======== TestLayersBatchnorm.test_batchnorm_backward: <END> ")
Esempio n. 8
0
    def test_relu_layer_backward(self):
        print("\n======== TestLayers.test_relu_layer_backward:")

        x = np.random.randn(10, 10)
        dout = np.random.randn(*x.shape)
        dx_num = check_gradient.eval_numerical_gradient_array(
            lambda x: layers.relu_forward(x)[0], x, dout)
        _, cache = layers.relu_forward(x)
        dx = layers.relu_backward(dout, cache)
        dx_error = error.rel_error(dx_num, dx)

        print("dx_error : %.9f" % (dx_error))
        self.assertLessEqual(dx_error, self.eps)

        print("======== TestLayers.test_relu_layer_backward: <END> ")
Esempio n. 9
0
    def test_relu_backward(self):
        print("======== TestAffineLayerObject.test_relu_backward:")

        X = np.random.randn(10, 10)
        dout = np.random.randn(*X.shape)
        relu_layer = layer_objects.ReLULayer(self.weight_scale,
                                             self.weight_init, 10, 10)
        relu_layer.X = X  # store cache

        dx_num = check_gradient.eval_numerical_gradient_array(
            lambda x: relu_layer.backward(dout)[0], X, dout)
        dx = relu_layer.backward(dout)
        dx_error = error.rel_error(dx_num, dx)

        print("dx_error : %.9f" % (dx_error))
        self.assertLessEqual(dx_error, self.eps)

        print("======== TestAffineLayerObject.test_relu_backward: <END> ")
Esempio n. 10
0
    def test_word_embedding_backward(self):
        print("\n======== TestCaptioningRNN.test_word_embedding_backward:")
        N = 2
        T = 3
        V = 5
        D = 3

        X = np.random.randint(V, size=(N, T))
        W = np.random.randn(V, D)

        out, cache = rnn_layers.word_embedding_forward(X, W)
        print('cache len : %d' % len(cache))
        dout = np.random.randn(*out.shape)
        dW = rnn_layers.word_embedding_backward(dout, cache)

        f = lambda W: rnn_layers.word_embedding_forward(X, W)[0]
        dW_num = check_gradient.eval_numerical_gradient_array(f, W, dout)
        dw_error = error.rel_error(dW, dW_num)

        self.assertLessEqual(dw_error, self.eps)
        print("dW error : %f" % dw_error)

        print(
            "======== TestCaptioningRNN.test_word_embedding_backward: <END> ")