def test_L1Ball(algorithm, lr): # Setup constraint = chop.constraints.L1Ball(alpha) prox = constraint.prox lmo = constraint.lmo assert (constraint.prox(w) == w).all() w_t = Variable(torch.zeros_like(w), requires_grad=True) constraint_oracles = { stochastic.PGD.name: { 'prox': [prox] }, stochastic.PGDMadry.name: { 'prox': [prox], 'lmo': [lmo] }, stochastic.FrankWolfe.name: { 'lmo': [lmo] }, stochastic.S3CM.name: { 'prox1': [prox], 'prox2': [prox] } } optimizer = algorithm([w_t], **(constraint_oracles[algorithm.name]), lr=lr) criterion = torch.nn.MSELoss(reduction='mean') # Logging store = Store(OUT_DIR) store.add_table('metadata', {'algorithm': str, 'lr': float}) store['metadata'].append_row({'algorithm': optimizer.name, 'lr': lr}) store.add_table(optimizer.name, { 'func_val': float, 'certificate': float, 'norm(w_t)': float }) cert = torch.tensor(np.inf) for ii in range(MAX_ITER): optimizer.zero_grad() loss = criterion(X.mv(w_t), y) loss.backward() optimizer.step() try: cert = next(optimizer.certificate) # only one parameter here except AttributeError: cert = torch.tensor(np.nan) store.log_table_and_tb( optimizer.name, { 'func_val': loss.item(), 'certificate': cert.item(), 'norm(w_t)': sum(abs(w_t)).item() }) store[optimizer.name].flush_row() store.close()
def test_L1Ball(algorithm, step_size): # Setup constraint = constopt.constraints.L1Ball(alpha) assert (constraint.prox(w) == w).all() w_t = Variable(torch.zeros_like(w), requires_grad=True) optimizer = algorithm([w_t], constraint) criterion = torch.nn.MSELoss(reduction='mean') # Logging store = Store(OUT_DIR) store.add_table('metadata', {'algorithm': str, 'step-size': float}) store['metadata'].append_row({ 'algorithm': optimizer.name, 'step-size': step_size }) store.add_table(optimizer.name, { 'func_val': float, 'FW gap': float, 'norm(w_t)': float }) gap = torch.tensor(np.inf) for ii in range(MAX_ITER): optimizer.zero_grad() loss = criterion(X.mv(w_t), y) loss.backward() # Compute gap with torch.no_grad(): gap = constraint.fw_gap(w_t.grad, w_t) optimizer.step(step_size) store.log_table_and_tb( optimizer.name, { 'func_val': loss.item(), 'FW gap': gap.item(), 'norm(w_t)': sum(abs(w_t)).item() }) store[optimizer.name].flush_row() store.close()
def test_L1Ball(algorithm, lr): # Setup constraint = chop.constraints.L1Ball(alpha) assert (constraint.prox(w) == w).all() w_t = Variable(torch.zeros_like(w), requires_grad=True) optimizer = algorithm([w_t], constraint, lr=lr) criterion = torch.nn.MSELoss(reduction='mean') # Logging store = Store(OUT_DIR) store.add_table('metadata', {'algorithm': str, 'lr': float}) store['metadata'].append_row({'algorithm': optimizer.name, 'lr': lr}) store.add_table(optimizer.name, { 'func_val': float, 'certificate': float, 'norm(w_t)': float }) cert = torch.tensor(np.inf) for ii in range(MAX_ITER): optimizer.zero_grad() loss = criterion(X.mv(w_t), y) loss.backward() optimizer.step() cert = next(optimizer.certificate) # only one parameter here store.log_table_and_tb( optimizer.name, { 'func_val': loss.item(), 'certificate': cert.item(), 'norm(w_t)': sum(abs(w_t)).item() }) store[optimizer.name].flush_row() store.close()
shutil.rmtree(OUT_DIR) except: pass os.mkdir(OUT_DIR) if __name__ == "__main__": for slope in range(5): store = Store(OUT_DIR) store.add_table('metadata', {'slope': int}) store.add_table('line_graphs', {'mx': int, 'mx^2': int}) store['metadata'].append_row({'slope': slope}) for x in range(100): store.log_table_and_tb('line_graphs', { 'mx': slope * x, 'mx^2': slope * (x**2) }) store['line_graphs'].flush_row() store.close() ### Collection reading print("Done experiments, printing results...") reader = CollectionReader(OUT_DIR) print(reader.df('line_graphs')) print("Starting tensorboard:") subprocess.run([ "python", "-m", "cox.tensorboard_view", "--logdir", OUT_DIR, "--format-str", "slope-{slope}", "--filter-param", "slope", "[1-3]", "--metadata-table", "metadata"