def test_softmax(self):
     a = tensor([1, 3], 'a')
     feed = {'a': np.array([[1, 2, 3]])}
     answer_list = np.exp([1, 2, 3])
     answer_list /= np.sum(answer_list)
     assert_array_almost_equal(
         operator.softmax(a).forward(feed)[0], answer_list)
Exemple #2
0
    def construct_model(self, x_train, y_train):
        # get number of features
        input_dim = x_train.shape[-1]
        # get number of classes
        output_dim = len(np.unique(y_train))
        layer_num = len(self.hidden_layer_sizes)
        hidden_layer_num = self.hidden_layer_sizes
        batch_size = self.batch_size
        _lambda = self._lambda
        if batch_size == 'auto':
            # use all data
            batch_size = x_train.shape[0]

        self.input = lfdnn.tensor([batch_size, input_dim], 'input')
        self.label = lfdnn.tensor([batch_size, output_dim], 'label')
        h = self.input
        # put your construction code here, feel free to modify the assignment of `w`
        # Hint: you should put all weight and bias variables into self.weight
        w = lfdnn.tensor([input_dim, output_dim], 'output_weight')
        # end of your construction code

            
        self.weight['output_weight'] = w
        b = lfdnn.tensor([1, output_dim], 'output_bias')
        self.weight['output_bias'] = b
        h = operator.add(operator.matmul(h, w), b)
        self.output = operator.softmax(h)
        self.loss = operator.CE_with_logit(h, self.label)
        if _lambda > 0:
            for k, v in self.weight.items():
                if k.find('bias') > 0:
                    continue
                regularization_term = operator.scale(operator.mean_square_sum(v), _lambda)
                self.loss = operator.add(self.loss, regularization_term)
        self.accuracy = operator.accuracy(self.output, self.label)
 def test_log_softmax(self):
     a = tensor([1, 3], 'a')
     feed = {'a': np.array([[1, 2, 3]])}
     assert_array_almost_equal(
         operator.log_softmax(a).forward(feed),
         operator.log(operator.softmax(a)).forward(feed))