Exemple #1
0
# seed
args.cuda = torch.cuda.is_available()
torch.manual_seed(args.seed)
if args.cuda:
    torch.cuda.manual_seed(args.seed)

# data loader and model
assert args.type in ['cifar10', 'cifar100'], args.type
train_loader, test_loader = dataset.get10(batch_size=args.batch_size,
                                          num_workers=1)
model = model.cifar10(args=args, logger=logger)
if args.cuda:
    model.cuda()

optimizer = optim.SGD(model.parameters(), lr=1)

decreasing_lr = list(map(int, args.decreasing_lr.split(',')))
logger('decreasing_lr: ' + str(decreasing_lr))
best_acc, old_file = 0, None
t_begin = time.time()
grad_scale = args.grad_scale

try:
    # ready to go
    for epoch in range(args.epochs):
        model.train()

        if epoch in decreasing_lr:
            grad_scale = grad_scale / 8.0
# seed
args.cuda = torch.cuda.is_available()
torch.manual_seed(args.seed)
if args.cuda:
    torch.cuda.manual_seed(args.seed)

# data loader and model
assert args.type in ['cifar10', 'cifar100'], args.type
train_loader, test_loader = dataset.get10(batch_size=args.batch_size,
                                          num_workers=1)
model = model.cifar10(args=args, logger=logger)
if args.cuda:
    model.cuda()

optimizer = optim.SGD(model.parameters(), lr=1)

decreasing_lr = list(map(int, args.decreasing_lr.split(',')))
logger('decreasing_lr: ' + str(decreasing_lr))
best_acc, old_file = 0, None
t_begin = time.time()
grad_scale = args.grad_scale

try:
    # ready to go
    if args.cellBit != args.wl_weight:
        print(
            "Warning: Weight precision should be the same as the cell precison !"
        )
    # add d2dVari
    paramALTP = {}