def test_softmax_calculate_gradient(self): # Given pre_activation = np.array([[1, 2, 3, 6], [2, 4, 5, 6], [3, 8, 7, 6]]) target = np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0]]) softmax = Softmax() # When activation = softmax.apply_activation(pre_activation) grad = softmax.calculate_gradient(activation, target)
def test_softmax_apply_activation(self): # Given pre_activation = np.array([[1, 2, 3, 6], [2, 4, 5, 6], [3, 8, 7, 6]]) softmax = Softmax() # When activation = softmax.apply_activation(pre_activation) # Then sum_of_columns = 4.0 self.assertTrue( np.isclose(sum_of_columns, np.sum(np.sum(activation, axis=0)), 1e-3))