def load(args):
    add_args(args, sub_args)
    net = LeNetCaffe()
    net.build_gate(BBDropout)
    net.build_gate(DBBDropout, argdicts={'kl_scale': args.kl_scale}, dep=True)
    train_loader, test_loader = get_MNIST(args.batch_size)

    base_params = []
    dgate_params = []
    for name, param in net.named_parameters():
        if 'dgate' in name:
            dgate_params.append(param)
        elif 'base' in name:
            base_params.append(param)
    optimizer = optim.Adam([{
        'params': dgate_params,
        'lr': 1e-2
    }, {
        'params': base_params,
        'lr': 1e-3,
        'weight_decay': 1e-4
    }])

    scheduler = optim.lr_scheduler.MultiStepLR(
        optimizer,
        milestones=[int(r * args.num_epochs) for r in [.5, .8]],
        gamma=0.1)

    return net, train_loader, test_loader, optimizer, scheduler
Esempio n. 2
0
def load(args):
    add_args(args, sub_args)
    net = VGG(10)
    net.build_gate(VIB)
    train_loader, test_loader = get_CIFAR10(args.batch_size)

    base_params = []
    gate_params = []
    for name, param in net.named_parameters():
        if 'gate' in name:
            gate_params.append(param)
        else:
            base_params.append(param)
    optimizer = optim.Adam([{
        'params': gate_params,
        'lr': 1e-2
    }, {
        'params': base_params,
        'lr': 1e-3,
        'weight_decay': 1e-4
    }])
    scheduler = optim.lr_scheduler.MultiStepLR(
        optimizer,
        milestones=[int(r * args.num_epochs) for r in [.5, .8]],
        gamma=0.1)

    return net, train_loader, test_loader, optimizer, scheduler
Esempio n. 3
0
File: kkanji.py Progetto: mlzxy/dac
def load(args):
    add_args(args, sub_args)
    args.testfile = os.path.join(
        benchmarks_path,
        'kkanji_10_100_4.tar' if args.testfile is None else args.testfile)
    args.clusterfile = os.path.join(
        benchmarks_path, 'kkanji_10_400_12.tar'
        if args.clusterfile is None else args.clusterfile)
    return Model(args)
Esempio n. 4
0
def load(args):
    add_args(args, sub_args)

    if args.testfile is None:
        args.testfile = os.path.join(
            os.path.join(benchmarks_path),
            'emnist_phase{}_10_1000_4.tar'.format(args.phase))

    return Model(args)
Esempio n. 5
0
def load(args):
    add_args(args, sub_args)
    net = VGG(100)
    train_loader, test_loader = get_CIFAR100(args.batch_size)
    optimizer = optim.Adam(net.parameters(), lr=1e-3, weight_decay=1e-4)
    scheduler = optim.lr_scheduler.MultiStepLR(
        optimizer,
        milestones=[int(r * args.num_epochs) for r in [0.5, 0.8]],
        gamma=0.1)
    return net, train_loader, test_loader, optimizer, scheduler
Esempio n. 6
0
def load(args):
    add_args(args, sub_args)
    lamb = args.lambN / 60000.0
    net = LeNetMLP()
    net.build_gate(L0Reg, {'weight_decay':1e-4, 'lamb':lamb})
    train_loader, test_loader = get_MNIST(args.batch_size)
    optimizer = optim.Adam(net.parameters(), lr=1e-3)
    scheduler = optim.lr_scheduler.MultiStepLR(optimizer,
            milestones=[int(r*args.num_epochs) for r in [.5, .8]],
            gamma=0.1)

    return net, train_loader, test_loader, optimizer, scheduler
Esempio n. 7
0
def load(args):
    add_args(args, sub_args)

    if args.novel:
        args.testfile = os.path.join(
            benchmarks_path, 'emnist_paired_novel.tar'
            if args.testfile is None else args.testfile)
    else:
        args.testfile = os.path.join(
            benchmarks_path,
            'emnist_paired.tar' if args.testfile is None else args.testfile)
    return Model(args)
Esempio n. 8
0
def load(args):
    add_args(args, sub_args)

    if args.novel:
        args.testfile = os.path.join(benchmarks_path,
                'mini_imagenet_novel_10_200_4.tar' if args.testfile is None else args.testfile)
        args.clusterfile = os.path.join(benchmarks_path,
                'mini_imagenet_novel_10_600_12.tar' if args.clusterfile is None else args.clusterfile)
    else:
        args.testfile = os.path.join(benchmarks_path,
                'mini_imagenet_10_200_4.tar' if args.testfile is None else args.testfile)
        args.clusterfile = os.path.join(benchmarks_path,
                'mini_imagenet_10_600_12.tar' if args.clusterfile is None else args.clusterfile)
    return Model(args)
Esempio n. 9
0
def load(args):
    add_args(args, sub_args)
    net = VGG(100)
    net.build_gate(L0Reg, {
        'weight_decay': 1e-4,
        'lamb': args.lamb,
        'droprate_init': 0.2
    })
    train_loader, test_loader = get_CIFAR100(args.batch_size)
    optimizer = optim.Adam(net.parameters(), lr=1e-3)
    scheduler = optim.lr_scheduler.MultiStepLR(
        optimizer,
        milestones=[int(r * args.num_epochs) for r in [.5, .8]],
        gamma=0.1)

    return net, train_loader, test_loader, optimizer, scheduler
Esempio n. 10
0
def load(args):
    add_args(args, sub_args)
    return Model(args)
Esempio n. 11
0
def load(args):
    add_args(args, sub_args)
    return AnchoredModel(args)