grad_g = lambda x: np.dot(A.T, np.dot(A, x) - y) L = lin.norm(A, 2)**2 # Lipschitz constant # context maxiter = 1000 ctx = Context(full_output=True, maxiter=maxiter) ctx.callback = lambda x: F(x) + G(x) res = np.zeros((maxiter, len(methods))) i = 0 for method in methods: t1 = time.time() x, fx = forward_backward(prox_f, grad_g, np.zeros((n, 1)), L, method=method, context=ctx) t2 = time.time() print ("[" + method + "]: Performed 1000 iterations in " \ + str(t2 - t1) + "seconds.") res[:, i] = fx i += 1 e = np.min(res.flatten()) plt.loglog(res[:(maxiter // 10), :] - e) plt.legend(methods) plt.grid(True, which="both", ls="-") plt.tight_layout() plt.show()
grad_g = lambda x: np.dot(A.T, np.dot(A, x) - y) L = lin.norm(A, 2)**2 # Lipschitz constant callback = lambda x: F(x) + G(x) maxiter = 1000 res = np.zeros((maxiter, len(methods))) i = 0 for method in methods: t1 = time.time() x, fx = forward_backward(prox_f, grad_g, np.zeros((n, 1)), L, maxiter=maxiter, method=method, full_output=1, retall=0, callback=callback) t2 = time.time() print "[" + method + "]: Performed 1000 iterations in " \ + str(t2 - t1) + "seconds." res[:, i] = fx i += 1 e = np.min(res.flatten()) pl.loglog(res[:(maxiter // 10), :] - e) pl.legend(methods) pl.grid(True, which="both", ls="-")
F = lambda x: la * lin.norm(x, 1) G = lambda x: 1 / 2 * lin.norm(y - np.dot(A, x)) ** 2 prox_f = lambda x, tau: soft_thresholding(x, la * tau) grad_g = lambda x: np.dot(A.T, np.dot(A, x) - y) L = lin.norm(A, 2) ** 2 # Lipschitz constant callback = lambda x: F(x) + G(x) maxiter = 1000 res = np.zeros((maxiter, len(methods))) i = 0 for method in methods: t1 = time.time() x, fx = forward_backward(prox_f, grad_g, np.zeros((n, 1)), L, maxiter=maxiter, method=method, full_output=1, retall=0, callback=callback) t2 = time.time() print "[" + method + "]: Performed 1000 iterations in " \ + str(t2 - t1) + "seconds." res[:, i] = fx i += 1 e = np.min(res.flatten()) pl.loglog(res[:(maxiter // 10), :] - e) pl.legend(methods) pl.grid(True, which="both", ls="-") pl.tight_layout() pl.show()
F = lambda x: la*np.linalg.norm(x,1) G = lambda x: 1/2*np.linalg.norm(y - np.dot(A,x)) ** 2 ProxF = lambda x,tau: soft_thresholding(x, la*tau) GradG = lambda x: np.dot(A.T,np.dot(A,x) - y) L = np.linalg.norm(A, 2) ** 2 #Lipschitz constant callback = lambda x: F(x) + G(x) maxiter = 1000 res = np.zeros((maxiter,len(methods))) i = 0 for method in methods: t1 = time.time() x, fx = forward_backward(ProxF, GradG, np.zeros((n,1)), L, maxiter=maxiter, method=method, full_output=1, retall=0, callback=callback) t2 = time.time() print "[" + method + "]: Performed 1000 iterations in " \ + str(t2-t1) +"seconds." res[:,i] = fx i += 1 e = np.min(res.flatten()) plt.loglog(res[:(maxiter // 10),:] - e) plt.legend(methods) plt.grid(True,which="both",ls="-") plt.tight_layout() plt.show()