Esempio n. 1
0
def load(args):
    add_args(args, sub_args)
    net = LeNetCaffe()
    net.build_gate(VIB)
    train_loader, test_loader = get_MNIST(args.batch_size)

    base_params = []
    gate_params = []
    for name, param in net.named_parameters():
        if 'gate' in name:
            gate_params.append(param)
        else:
            base_params.append(param)
    optimizer = optim.Adam([{
        'params': gate_params,
        'lr': 1e-2
    }, {
        'params': base_params,
        'lr': 1e-3,
        'weight_decay': 1e-4
    }])
    scheduler = optim.lr_scheduler.MultiStepLR(
        optimizer,
        milestones=[int(r * args.num_epochs) for r in [.5, .8]],
        gamma=0.1)

    return net, train_loader, test_loader, optimizer, scheduler
def load(args):
    add_args(args, sub_args)
    net = LeNetMLP()
    net.build_gate(BBDropout)
    net.build_gate_dep(DBBDropout, argdicts={'kl_scale': args.kl_scale})
    train_loader, test_loader = get_MNIST(args.batch_size)

    base_params = []
    dgate_params = []
    for name, param in net.named_parameters():
        if 'dgate' in name:
            dgate_params.append(param)
        elif 'base' in name:
            base_params.append(param)
    optimizer = optim.Adam([{
        'params': dgate_params,
        'lr': 1e-2
    }, {
        'params': base_params,
        'lr': 1e-3,
        'weight_decay': 1e-4
    }])

    scheduler = optim.lr_scheduler.MultiStepLR(
        optimizer,
        milestones=[int(r * args.num_epochs) for r in [.5, .8]],
        gamma=0.1)

    return net, train_loader, test_loader, optimizer, scheduler
Esempio n. 3
0
def load(args):
    add_args(args, sub_args)
    net = LeNetCaffe()
    train_loader, test_loader = get_MNIST(args.batch_size)
    optimizer = optim.Adam(net.parameters(), lr=1e-3, weight_decay=1e-4)
    scheduler = optim.lr_scheduler.MultiStepLR(
        optimizer,
        milestones=[int(r * args.num_epochs) for r in [0.5, 0.8]],
        gamma=0.1)
    return net, train_loader, test_loader, optimizer, scheduler
Esempio n. 4
0
def load(args):
    add_args(args, sub_args)
    lamb = args.lambN / 60000.0
    net = LeNetMLP()
    net.build_gate(L0Reg, {'weight_decay':1e-4, 'lamb':lamb})
    train_loader, test_loader = get_MNIST(args.batch_size)
    optimizer = optim.Adam(net.parameters(), lr=1e-3)
    scheduler = optim.lr_scheduler.MultiStepLR(optimizer,
            milestones=[int(r*args.num_epochs) for r in [.5, .8]],
            gamma=0.1)

    return net, train_loader, test_loader, optimizer, scheduler
def load(args):
    net = LeNetCaffe()
    net.build_gate(SBP, [{'kl_scale':40}, {'kl_scale':16}, {}, {}])
    train_loader, test_loader = get_MNIST(100)

    base_params = []
    gate_params = []
    for name, param in net.named_parameters():
        if 'gate' in name:
            gate_params.append(param)
        else:
            base_params.append(param)
    optimizer = optim.Adam([
        {'params':gate_params, 'lr':1e-2},
        {'params':base_params, 'lr':1e-3, 'weight_decay':1e-4}])
    scheduler = optim.lr_scheduler.MultiStepLR(optimizer,
            milestones=[int(r*args.num_epochs) for r in [.5, .8]],
            gamma=0.1)

    args.pretrain_dir = '../results/lenet_caffe_mnist'
    gamma = 1./60000

    return net, gamma, train_loader, test_loader, optimizer, scheduler