예제 #1
0
 def test_rnn_var_node(self):
     x = np.array([[1, 2, 1], [-1, 0, -.5]]).T
     x = x.reshape((3, 1, 2))
     x0_var = rnn.RnnVarNode(0, x)
     x1_var = rnn.RnnVarNode(1, x)
     np.testing.assert_equal(x0_var.value(), x[:, :, 0])
     np.testing.assert_equal(x1_var.value(), x[:, :, 1])
     debug("[SimpleRnnCellTests.test_rnn_var_node()] x0_var.value() = np.{}".format(repr(x0_var.value())))
예제 #2
0
 def test_train(self):
     num_iter = 100000
     x_node = n.VarNode('x')
     y_target_node = n.VarNode('y_target')
     rnn_node = rnn.SimpleRnnLayer(x_node, self.name_ds.n_categories, 15)
     loss_node = loss.SoftmaxCrossEntropy(rnn_node, y_target_node)
     all_losses = []
     optimizer_func = autodiff_optim.AdamOptimizer()
     optimizer_func = autodiff_optim.SGDOptimizer(lr=0.0001)
     optimizer = autodiff_optim.OptimizerIterator([x_node, y_target_node],
                                                  loss_node, optimizer_func)
     ctx = n.ComputeContext({'x': "", 'y_target': ""})
     log_at_info()
     every = 500
     t = time.time()
     for i in range(1, num_iter + 1):
         rnn_node.set_initial_state_to_zero()
         c, l, category_index, name_tensor = self.name_ds.random_training_example(
         )
         cat_tensor = self.name_ds.category_idx_to_tensor([category_index])
         ctx['x'] = name_tensor
         ctx['y_target'] = cat_tensor
         ctx['i'] = i
         loss_value = optimizer.step(ctx, 1.0)
         all_losses.append(loss_value)
         if i % every == 0:
             t = time.time() - t
             last_10 = all_losses[-every:]
             av = np.average(last_10)
             info("[{:06d}] Avg. loss = {:10.6f}"
                  " | {:04.2f}s per {}  | Total Iters set to:{}".format(
                      i, av, t, every, num_iter))
             all_losses = []
             t = time.time()
예제 #3
0
 def test_2_seq_rnn(self):
     x = np.array([[1, 2, 1], [-1, 0, -.5]]).T
     x = x.reshape((3, 1, 2))
     x0_var = rnn.RnnVarNode(0, x)
     x1_var = rnn.RnnVarNode(1, x)
     cell1 = rnn.RnnCell(x0_var, None,self.w_param, self.wb_param, self.u_param,
                         self.ub_param, self.h )
     cell2 = rnn.RnnCell(x1_var, cell1,self.w_param, self.wb_param, self.u_param,
                         self.ub_param )
     x0_var.forward(self.var_map)
     x1_var.forward(self.var_map)
     y,h = cell2.value() 
     debug("[SimpleRnnCellTests.test_2_seq_rnn()] y = np.{}".format(repr(y)))
     debug("[SimpleRnnCellTests.test_2_seq_rnn()] h = np.{}".format(repr(h)))
     dely, delh = y * .1, h * .1
     cell2.backward((dely, None), self, var_map=self.var_map)
     wgrad  = self.w_param._total_incoming_gradient()
     debug("[SimpleRnnCellTests.test_2_seq_rnn()] wgrad = np.{}".format(repr(wgrad)))
예제 #4
0
 def setUp(self):
     x1 = np.array([1, 2, 1]).reshape((3, 1))
     self.var_map = {'x': x1}
     self.h = np.array([0.6, 0.2]).reshape((2, 1))
     w = np.array([[0.63315733, 0.51699569, 0.78251473, 0.94678789, 0.30939115],
                   [0.12741137, 0.67238871, 0.23514442, 0.50932127, 0.60643467],
                   [0.26004482, 0.02306102, 0.56403955, 0.32862147, 0.13988205],
                   [0.97815493, 0.66425931, 0.85988497, 0.13528022, 0.03943312]])
     wb = np.array([[0.5],
                    [-0.25],
                    [1.],
                    [-1.]])
     u = np.array([[0.39865366, 0.49334758, 0.29215267, 0.97590111, 0.68403036],
                   [0.03237844, 0.73579572, 0.49288022, 0.32059863, 0.69219668]])
     ub = np.array([[-1],
                    [.5]])
     self.w_param, self.wb_param = rnn.SharedParam(w), rnn.SharedParam(wb)
     self.u_param, self.ub_param = rnn.SharedParam(u), rnn.SharedParam(ub)
예제 #5
0
    def test_forward(self):
        input_x_node = n.VarNode('x')

        rnn_cell = rnn.RnnCell(input_x_node, None, self.w_param, self.wb_param,
                               self.u_param, self.ub_param, self.h)
        input_x_node.forward(self.var_map)
        y, h = rnn_cell.value()
        debug("[SimpleRnnCellTests.test_forward()] y = np.{}".format(repr(y)))
        debug("[SimpleRnnCellTests.test_forward()] h = np.{}".format(repr(h)))
        dely, delh = y * .1, h * .1
        rnn_cell.backward((dely, delh), self, self.var_map)
        grad_x = input_x_node.total_incoming_gradient()
        debug("[SimpleRnnCellTests.test_forward()] grad_x = np.{}".format(repr(grad_x)))
 def test_rnn_layer(self):
     x = np.array([[1, 2, 1], [-1, 0, -.5]]).T
     x = x.reshape((3, 1, 2))
     input_node = n.VarNode('x')
     var_map = {'x': x}
     rnn_layer = rnn.SimpleRnnLayer(input_node, 4, 2)
     input_node.forward(var_map)
     y = rnn_layer.value()
     dely = y * .1
     rnn_layer.backward(dely, self, var_map)
     x_grad = input_node.total_incoming_gradient()
     debug("[SimpleRnnCellTests.test_rnn_layer()] x_grad = np.{}".format(
         repr(x_grad)))
    def test_rnn_layer_with_loss(self):
        debug(
            "[RnnLayerFullTests.test_rnn_layer_with_loss()] self.data_dir = {}"
            .format(self.data_dir))
        x = self.name_ds.line_to_numpy('ABCD')
        debug("[RnnLayerFullTests.test_rnn_layer_with_loss()] ABCD: x = np.{}".
              format(repr(x)))
        debug("------------------------------------------------------")
        x = self.name_ds.line_to_numpy('Albert')
        debug(
            "[RnnLayerFullTests.test_rnn_layer_with_loss()] x = np.{}".format(
                repr(x)))
        debug("------------------------------------------------------")
        log_at_info()
        for i in range(5):
            c, l, category_index, name_tensor = self.name_ds.random_training_example(
            )
            debug("[{}]:{}".format(c, l))
            cat_tensor = self.name_ds.category_idx_to_tensor([category_index])
            debug(
                "[RnnLayerFullTests.test_rnn_layer_with_loss()] cat_tensor = np.{}"
                .format(repr(cat_tensor)))

        x_node = n.VarNode('x')
        y_target_node = n.VarNode('y_target')

        ctx = n.ComputeContext({'x': name_tensor, 'y_target': cat_tensor})
        rnn_node = rnn.SimpleRnnLayer(x_node, self.name_ds.n_categories, 128)
        loss_node = loss.LogitsCrossEntropy(rnn_node, y_target_node)

        x_node.forward(ctx)
        y_target_node.forward(ctx)
        y = rnn_node.value()
        info(
            "[RnnLayerFullTests.test_rnn_layer_with_loss()]  y = np.{}".format(
                repr(y)))
        loss_value = loss_node.value()
        info("[RnnLayerFullTests.test_rnn_layer_with_loss()] loss = np.{}".
             format(repr(loss_value)))
        loss_node.backward(1.0, self, ctx)
        grads = rnn_node.total_incoming_gradient()
        info(grads)