k = float(sys.argv[1]) if len(sys.argv) > 1 else 0 dataset = str(sys.argv[2]) if len(sys.argv) > 2 else 'mnist' iparam = str(sys.argv[3]) if len(sys.argv) > 3 else None print('k = ', k, 'dataset = ', dataset, 'params = ', iparam) num_epochs, batch_size, verbose = 200, 100, 1 optpol = lambda epoch: optpolicy.lr_linear_to0(epoch, 1e-3) arch = net_lenet5 net = run_experiment(dataset, 0, batch_size, arch, objectives.sgvlb, False, optpol, optpolicy.rw_linear, optimizer='adam') paramsv = build_params_from_init(net, iparam, lsinit=-10) if iparam else None net = run_experiment(dataset, num_epochs, batch_size, arch, objectives.sgvlb, verbose, optpol, optpolicy.rw_linear,
input_x, target_y, Winit = T.tensor4("input"), T.vector( "target", dtype='int32'), init.Normal() net = ll.InputLayer(input_shape, input_x) net = ConvLayer(net, 20, 5, W=init.Normal()) net = MaxPool2DLayer(net, 2) net = ConvLayer(net, 50, 5, W=init.Normal()) net = MaxPool2DLayer(net, 2) net = ll.DenseLayer(net, 500, W=init.Normal()) net = ll.DenseLayer(net, nclass, W=init.Normal(), nonlinearity=nl.softmax) return net, input_x, target_y, 1 num_epochs, batch_size, verbose, dataset = 200, 100, 1, 'mnist' optp = lambda epoch: optpolicy.lr_linear(epoch, 1e-4) arch = net_lenet5 net = run_experiment(dataset, num_epochs, batch_size, arch, objectives.sgvlb, verbose, optp, optpolicy.rw_linear, optimizer='adam', da=True)
if not os.path.exists('./experiments/logs'): os.mkdir('./experiments/logs') if not os.path.exists('./experiments/logs/' + folder_name): os.mkdir('./experiments/logs/' + folder_name) for trainset_size in trainset_sizes: # for alpha in alphas: for magn_var in magn_vars: log_fname = folder_name + '/' + filename + '-' + str( trainset_size) + '-' + str(alpha) + '-' + str(magn_var) run_experiment( dataset, num_epochs, batch_size, arch, criterion, verbose, optpol_linear, params=None, optimizer='adam', trainset_size=trainset_size, log_fname=log_fname, noise_type=noise_type, alpha=alpha, noise_magnitude=noise_magnitude, magn_var=magn_var, noise_ave_times=ave_times, )
int(512 * k), W=init.Normal(), nonlinearity=nl.rectify) net = BatchNormLayer(net, epsilon=1e-3) net = ll.NonlinearityLayer(net) net = ll.DropoutLayer(net, 0.5) net = ll.DenseLayer(net, nclass, W=init.Normal(), nonlinearity=nl.softmax) return net, input_x, target_y, k k = float(sys.argv[1]) if len(sys.argv) > 1 else 1.0 dataset = str(sys.argv[2]) if len(sys.argv) > 2 else 'cifar10' iparam = str(sys.argv[3]) if len(sys.argv) > 3 else None averaging = int(sys.argv[4]) if len(sys.argv) > 4 else 0 print('k = ', k, 'dataset = ', dataset, 'params = ', iparam) num_epochs, batch_size, verbose = 200, 100, 1 optpol = lambda epoch: optpolicy.lr_linear(epoch, 1e-5) arch = lambda input_shape, s: net_vgglike(k, input_shape, s) net = run_experiment(dataset, num_epochs, batch_size, arch, objectives.nll_l2, verbose, optpol, optpolicy.rw_linear, optimizer='adam')
folder_name = 'wn_tangent' filename = 'wn_tang' trainset_sizes = [100] alphas = [0.01] ave_times = 0 if not os.path.exists('./experiments/logs/' + folder_name): os.mkdir('./experiments/logs/' + folder_name) for trainset_size in trainset_sizes: for alpha in alphas: log_fname = folder_name + '/' + filename + '-' + str( trainset_size) + '-' + str(alpha) net = run_experiment( dataset, num_epochs, batch_size, arch, objectives.nll, verbose, optpol_linear, optpolicy.rw_linear, params=None, optimizer='adam', train_clip=False, trainset_size=trainset_size, log_fname=log_fname, alpha=alpha, noise_ave_times=ave_times, )