def experiment_3(parameter='n', method='subgradient', seed=31415): # n, m and lambda n, m, reg_coef = 500, 500, 1.0 np.random.seed(seed) grids = dict() grids['n'] = [10, 100, 1000] grids['m'] = [10, 100, 1000] grids['reg_coef'] = [0.01, 0.1, 1.0, 10.0] fig1, ax1 = plt.subplots() fig2, ax2 = plt.subplots() ax1.set_xlabel('Номер итерации') ax1.set_ylabel('Гарантированная точность по зазору двойственности') ax1.grid() ax1.set_yscale('log') ax2.set_xlabel('Время от начала эксперимента') ax2.set_ylabel('Гарантированная точность по зазору двойственности') ax2.grid() ax2.set_yscale('log') os.makedirs("report/pics/3", exist_ok=True) experiment_parameters = {'n': n, 'm': m, 'reg_coef': 1.0} for value in grids[parameter]: experiment_parameters[parameter] = value A = np.random.randn(experiment_parameters['m'], experiment_parameters['n']) b = np.random.randn(experiment_parameters['m']) x_0 = np.ones(experiment_parameters['n']) reg_coef = experiment_parameters['reg_coef'] if method == 'subgradient': oracle = create_lasso_nonsmooth_oracle(A, b, reg_coef) x_opt, message, history = subgradient_method(oracle, x_0, trace=True, max_iter=10000) if method == 'proximal': oracle = create_lasso_prox_oracle(A, b, reg_coef) x_opt, message, history = proximal_gradient_method(oracle, x_0, trace=True, max_iter=10000) if method == 'proximal_fast': oracle = create_lasso_prox_oracle(A, b, reg_coef) x_opt, message, history = proximal_fast_gradient_method(oracle, x_0, trace=True, max_iter=10000) ax1.plot(history['duality_gap'], label=f'{parameter}={value}') ax2.plot(history['time'], history['duality_gap'], label=f'{parameter}={value}') ax1.legend() ax2.legend() os.makedirs(f"report/pics/3/{method}", exist_ok=True) fig1.savefig(f"report/pics/3/{method}/lasso_gap_vs_iter_{parameter}.pdf", bbox_inches='tight') fig2.savefig(f"report/pics/3/{method}/lasso_gap_vs_time_{parameter}.pdf", bbox_inches='tight')
def test_proximal_gm_nonsmooth(): # Minimize ||x||_1. oracle = oracles.create_lasso_prox_oracle(np.zeros([2, 2]), np.zeros(2), regcoef=1.0) x_0 = np.array([2.0, -1.0]) x_star, status, hist = optimization.proximal_gradient_method(oracle, x_0, trace=True) eq_(status, 'success') ok_(np.allclose(x_star, np.array([0.0, 0.0]))) ok_(np.allclose(np.array(hist['func']), np.array([3.0, 1.0, 0.0])))
def test_proximal_gm_one_step(): # Simple smooth quadratic task. A = np.eye(2) b = np.array([1.0, 0.0]) oracle = oracles.create_lasso_prox_oracle(A, b, regcoef=0.0) x_0 = np.zeros(2) x_star, status, hist = optimization.proximal_gradient_method(oracle, x_0, trace=True) eq_(status, 'success') ok_(np.allclose(x_star, np.array([1.0, 0.0]))) ok_(np.allclose(np.array(hist['func']), np.array([0.5, 0.0])))
def experiment_2(): np.random.seed(31415) data_path = "data" datasets = ["bodyfat", "housing"] for dataset in datasets: print("___________________________") logging.info(f"{dataset} is in process...") A, b = load_svmlight_file(os.path.join(data_path, dataset)) fig, ax = plt.subplots(figsize=(12, 8)) ax.set_xlabel('Номер итерации') ax.set_ylabel('Суммарное число шагов подбора L') ax.grid() oracle = create_lasso_prox_oracle(A, b, 1.0) print(A.shape, 1 - A.size / (A.shape[0] * A.shape[1]), np.linalg.matrix_rank(A.toarray())) n = A.shape[1] x0 = np.random.randn(n) x_opt, message, history_usual = proximal_gradient_method(oracle, x0, trace=True, max_iter=10000) x_opt, message, history_fast = proximal_fast_gradient_method(oracle, x0, trace=True, max_iter=10000) sum_int_steps_usual = list(accumulate(history_usual['int_steps'])) sum_int_steps_fast = list(accumulate(history_fast['int_steps'])) ax.plot(sum_int_steps_usual, label="Usual proximal method") ax.plot(sum_int_steps_fast, label=f'Fast proximal method') ax.legend() os.makedirs("report/pics/2", exist_ok=True) fig.savefig(f"report/pics/2/prox_methods_steps_{dataset}.pdf", bbox_inches='tight')