def test_sd_p(self): x = np.array([[0], [0], [0], [0], [0], [0]]) x_opt = opt.steepest_descent(self.p, x, tol=1e-6) print('sd') print(x_opt) print(self.x_opt) print() self.assertTrue(np.linalg.norm(x_opt - self.x_opt) < 1e-3)
def test_sd(self): P = np.array([[1, 0, 0], [0, 2, 0], [0, 0, 4]]) v = lambda x: 0.5 * x.T @ P @ x del_v = lambda x: x.T @ P p = opt.Problem(v, del_v) x = np.array([[1], [1], [1]]) x_opt = np.array([[0], [0], [0]]) x_sd = opt.steepest_descent(p, x) self.assertTrue(np.linalg.norm(x_sd - x_opt) < 1e-6)
def test_sd(self): x = np.array([[0], [0], [0], [0], [0], [0]]) start = time.time() x_opt = opt.steepest_descent(self.p, x, tol=1e-6, hist=True) end = time.time() g = np.array( [np.linalg.norm(self.p.grad(x_opt[i])) for i in range(len(x_opt))]) fig = plt.figure() plt.plot(np.arange(len(x_opt)), g) plt.xlabel('Iteration') plt.ylabel('Norm of Gradient') fig.savefig('./fig/sd-pA.eps', format='eps') print('\nProblem A, Steepest Descent') print('arg min v(x) =\n', x_opt[-1]) print('time =\n', end - start, 's')
def test_sd(self): # Pick initial value x = np.array([[0], [0], [0], [0], [0], [0]]) start = time.time() # Run steepest descent with history turned on (get x at each iteration) x_opt = opt.steepest_descent(self.p, x, hist=True) end = time.time() # Calculate gradient at each iteration g = np.array( [np.linalg.norm(self.p.grad(x_opt[i])) for i in range(len(x_opt))]) # Plot gradient and save figure fig = plt.figure() plt.plot(np.arange(len(x_opt)), g) plt.xlabel('Iteration') plt.ylabel('Norm of Gradient') fig.savefig('./fig/sd-pA-grad.eps', format='eps') # Print out stats print('\nProblem A, Steepest Descent (Exact Gradient)') print('arg min v(x) =\n', x_opt[-1]) print('time =\n', end - start, 's')
def test_sd(self): x = np.array([[10], [10]]) x_opt = opt.steepest_descent(self.p, x, tol=1e-4) self.assertTrue(np.linalg.norm(x_opt - self.x_opt) < 1e-3)