Esempio n. 1
0
    def test_backward(self):
        """Test gradient computation for inputs and all layer's parameters."""
        linear = LinearLayer(n_in=10, n_out=5)

        check = check_finite_differences(linear.forward,
                                         linear.backward,
                                         gen_input_fn=lambda:
                                         (np.random.randn(30, 10), ),
                                         aux_only=True)
        self.assertTrue(check)

        inp = (np.random.randn(50, 30, 10), )
        checker = TestParamGradInLayer(linear, 'W', layer_input=inp)
        check = check_finite_differences(
            checker.forward,
            checker.backward,
            gen_input_fn=lambda:
            (np.random.randn(*linear.params['W'].shape), ),
            aux_only=True)
        self.assertTrue(check)

        checker = TestParamGradInLayer(linear, 'b', layer_input=inp)
        check = check_finite_differences(
            checker.forward,
            checker.backward,
            gen_input_fn=lambda:
            (np.random.randn(*linear.params['b'].shape), ),
            aux_only=True)
        self.assertTrue(check)
Esempio n. 2
0
    def test_backward(self):
        np.random.seed(9)

        def gen():
            x = np.random.randn(5, 3, 10)
            h0 = np.random.randn(3, 14)
            c0 = np.random.randn(3, 14)
            return (
                x,
                h0,
                c0,
            )

        lstm = LSTM(n_in=10, n_out=14)

        check = check_finite_differences(lstm.forward,
                                         lstm.backward,
                                         gen_input_fn=gen,
                                         aux_only=True,
                                         test_inputs=(
                                             0,
                                             1,
                                             2,
                                         ))
        self.assertTrue(check)

        params_shape = lstm.params['WLSTM'].shape

        checker = TestParamGradInLayer(lstm, 'WLSTM', layer_input=gen())
        check = check_finite_differences(checker.forward,
                                         checker.backward,
                                         gen_input_fn=lambda:
                                         (np.random.randn(*params_shape), ),
                                         aux_only=True)
        self.assertTrue(check)
Esempio n. 3
0
 def test_backward(self):
     self.assertTrue(
         check_finite_differences(
             SeqLoss.forward,
             SeqLoss.backward,
             gen_input_fn=lambda: (np.random.dirichlet([1, 1], (
                 10, )), [np.random.binomial(1, 0.5) for i in range(10)]),
             aux_only=True))
Esempio n. 4
0
 def test_backward(self):
     softmax = Softmax()
     self.assertTrue(
         check_finite_differences(softmax.forward,
                                  softmax.backward,
                                  gen_input_fn=lambda:
                                  (np.random.randn(7, 3), ),
                                  aux_only=True))
Esempio n. 5
0
    def test_backward(self):
        calc = DataCalc(max_num=5, n_words=50)
        db = DB(calc.get_db(), calc.get_vocab())

        emb = OneHot(n_tokens=len(db.vocab))

        nton = NTON(n_tokens=len(db.vocab), db=db, emb=emb, n_cells=5)
        nton.print_step = lambda *args, **kwargs: None
        #nton.max_gen =
        ((dec_sym, ), _) = emb.forward(([db.vocab['[EOS]']], ))

        def gen_input():
            ((E, ), _) = emb.forward(
                (np.random.randint(1, len(db.vocab), (5, )), ))

            return (
                E,
                dec_sym[0],
            )

        check = check_finite_differences(nton.forward,
                                         nton.backward,
                                         gen_input_fn=gen_input,
                                         aux_only=True)
        self.assertTrue(check)

        # ['att__Wh', 'att__Wy', 'att__w', 'in_rnn__WLSTM', 'out_rnn__WLSTM',
        # 'out_rnn_clf__00__W', 'out_rnn_clf__00__b', 'switch__00__W', 'switch__00__b']

        #for param_name in ['out_rnn_clf__00__W', 'out_rnn_clf__00__b']: #nton.params.names():
        for param_name in ['switch__00__W'
                           ]:  #, 'switch__00__b']: #nton.params.names():
            params_shape = nton.params[param_name].shape

            checker = TestParamGradInLayer(nton,
                                           param_name,
                                           layer_input=gen_input())
            check = check_finite_differences(
                checker.forward,
                checker.backward,
                gen_input_fn=lambda: (np.random.randn(*params_shape), ),
                aux_only=True,
                test_outputs=(0, ),
                n_times=100)
            self.assertTrue(check, msg='Failed check for: %s' % param_name)
Esempio n. 6
0
 def test_backward(self):
     self.assertTrue(
         check_finite_differences(
             Sigmoid.forward,
             Sigmoid.backward,
             gen_input_fn=lambda: (np.random.randn(7, 3), ),
             aux_only=True
         )
     )
Esempio n. 7
0
    def test_backward(self):
        emb = Embeddings(n_tokens=10, n_dims=100)

        inp = (np.array([0, 1, 9]), )

        checker = TestParamGradInLayer(emb, 'W', layer_input=inp)
        check = check_finite_differences(
            checker.forward,
            checker.backward,
            gen_input_fn=lambda: (np.random.randn(*emb.params['W'].shape), ),
            aux_only=True)
        self.assertTrue(check)
Esempio n. 8
0
    def test_backward(self):
        seq = Sequential([
            LinearLayer(n_in=5, n_out=2, init_w=Eye(), init_b=Constant(0.0)),
            Softmax()
        ])

        check = check_finite_differences(
            fwd_fn=seq.forward,
            bwd_fn=seq.backward,
            gen_input_fn=lambda: (np.random.randn(3, 5), ),
            aux_only=True
        )
        self.assertTrue(check)
Esempio n. 9
0
    def test_backward(self):
        att = Attention(n_hidden=5)

        def gen_input():
            h_out = np.random.randn(11, 5)
            g_t = np.random.randn(5)
            emb_in = np.random.randn(11, 13)  # Input emb size 13.

            return (h_out, g_t, emb_in, )

        check = check_finite_differences(
            att.forward,
            att.backward,
            gen_input_fn=gen_input,
            test_inputs=(0, 1, 2),
            aux_only=True
        )
        self.assertTrue(check)
Esempio n. 10
0
    def test_backward(self):
        def gen_input():
            p1 = np.random.randn(1)
            in1 = np.random.randn(100)
            in2 = np.random.randn(100)

            return (
                p1,
                in1,
                in2,
            )

        check = check_finite_differences(Switch.forward,
                                         Switch.backward,
                                         gen_input_fn=gen_input,
                                         test_inputs=(0, 1, 2),
                                         aux_only=True)
        self.assertTrue(check)
Esempio n. 11
0
    def test_backward_gen(self):
        calc = DataCalc(max_num=5, n_words=50)
        db = DB(calc.get_db(), calc.get_vocab())
        n_words = len(db.vocab)

        emb = OneHot(n_tokens=len(db.vocab))

        nton = NTON(n_tokens=len(db.vocab), db=db, emb=emb, n_cells=5)
        nton.print_step = lambda *args, **kwargs: None
        shapes = [(n_words, ), (nton.n_cells, ), (nton.n_cells, ),
                  (6, nton.n_cells), (6, n_words)]
        check = check_finite_differences(nton.forward_gen_step,
                                         nton.backward_gen_step,
                                         gen_input_fn=lambda: tuple(
                                             np.random.randn(*shp)
                                             for shp in shapes),
                                         aux_only=True,
                                         n_times=100)
        self.assertTrue(check)
Esempio n. 12
0
    def test_forward_backward(self):
        vocab = Vocab()
        for i in range(19):
            vocab.add(str(i))
            #print str(i), '->', vocab.add(str(i)), vocab[str(i)]

        content = []
        for i in range(10):
            for y in range(10):
                content.append((str(i), str(y), str(i + y)))

        db = DB2(content, vocab)
        for e1, r, r2 in [(1, 1, 2), (1, 2, 3), (1, 5, 6), (5, 1, 6)]:
            ((y, ), aux) = db.forward(
                (db.get_vector(str(e1)), db.get_vector(str(r))))

            dy = np.random.randn(*y.shape)
            (de1, dr) = db.backward(aux, (dy, ))

        def gen_input():
            (e1, r, e2) = random.choice(db.content)
            v_e1 = db.get_vector(str(e1))
            v_e1 += np.random.randn(*v_e1.shape)
            v_r = db.get_vector(str(r))
            v_r += np.random.randn(*v_r.shape)

            return (
                v_e1,
                v_r,
            )

        check = check_finite_differences(db.forward,
                                         db.backward,
                                         gen_input_fn=gen_input,
                                         aux_only=True)

        self.assertTrue(check)