def test_reg_grad(): print('*'*5, 'Testing Gradient') X = np.array([[1.0, 0.0], [1.0, 1.0], [1.0, -1.0]]) w = np.ones((2, 3)) y = np.eye(3, dtype='int64') reg = 1.0 f = lambda z: soft_cost(X, y, W=z, reg=reg) numerical_grad_check(f, w) print('Test Success')
def test_grad(): print('*' * 5, 'Testing Gradient') X = np.array([[1.0, 0.0], [1.0, 1.0], [1.0, -1.0]]) w = np.ones((2, 3)) y = np.array([0, 1, 2]) scl = SoftmaxClassifier(num_classes=3) f = lambda z: scl.cost_grad(X, y, W=z) numerical_grad_check(f, w) print('Test Success')
def test_reg_grad(): print('*' * 5, 'Testing Gradient') X = np.array([[1.0, 0.0], [1.0, 1.0]]) w = np.array([0.0, 0.0]) y = np.array([0, 0]).astype('int64') reg = 1.0 f = lambda z: log_cost(X, y, w=z, reg=reg) numerical_grad_check(f, w) print('Test Success')
def test_grad(): print('*' * 5, 'Testing Gradient') X = np.array([[1.0, 0.0], [1.0, 1.0], [2.0, 3.0]]) w = np.array([0.0, 0.0]) y = np.array([0, 0, 1]).astype('int64') print('shapes', X.shape, w.shape, y.shape) lr = LogisticRegressionClassifier() f = lambda z: lr.cost_grad(X, y, w=z) numerical_grad_check(f, w) print('Test Success')