Пример #1
0
def test():
    a = torch.randn(128,1,32,32).cuda()
    model = get_network_fn('yuGabor')
    model = model.cuda()
    #model = get_network_fn('gaborCNN')
    print(get_parameters_size(model)/1e6)
    #print(model)
    b = model(a)
    print(b[0].size())
Пример #2
0
	visualize_graph(model, writer, input_size=(1, 1, 28, 28))
except:
	print('\nNetwork Visualization Failed! But the training procedure continue.')

# optimizer = optim.Adadelta(model.parameters(), lr=args.lr, rho=0.9, eps=1e-06, weight_decay=3e-05)
# optimizer = optim.Adam(model.parameters(), lr=args.lr, weight_decay=3e-05)
optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=3e-05)
scheduler = StepLR(optimizer, step_size=10, gamma=0.5)
criterion = nn.CrossEntropyLoss()

device = torch.device("cuda" if use_cuda else "cpu")
model = model.to(device)
criterion = criterion.to(device)

# Calculate the total parameters of the model
print('Model size: {:0.2f} million float parameters'.format(get_parameters_size(model)/1e6))

if args.pretrained:
    if os.path.isfile(args.pretrained):
        print("=> loading checkpoint '{}'".format(args.pretrained))
        checkpoint = torch.load(args.pretrained)
        model.load_state_dict(checkpoint['state_dict'])
    else:
        print("=> no checkpoint found at '{}'".format(args.pretrained))

def train(epoch):
    model.train()
    global iteration
    st = time.time()
    for batch_idx, (data, target) in enumerate(train_loader):
        iteration += 1
                                          shuffle=True)

# Load model
model = resnet18(M=args.M, method=args.method, stages=stages).to(device)
print(model)

# Try to visulize the model
try:
    visualize_graph(model, writer, input_size=(1, 3, 32, 32))
except:
    print(
        '\nNetwork Visualization Failed! But the training procedure continue.')

# Calculate the total parameters of the model
print('Model size: {:0.2f} million float parameters'.format(
    get_parameters_size(model) / 1e6))

MFilter_params = [
    param for name, param in model.named_parameters()
    if name[-8:] == 'MFilters'
]
Other_params = [
    param for name, param in model.named_parameters()
    if name[-8:] != 'MFilters'
]
# optimizer = optim.Adadelta(model.parameters(), lr=args.lr, rho=0.9, eps=1e-06, weight_decay=3e-05)
# optimizer = optim.Adam(model.parameters(), lr=args.lr, weight_decay=3e-05)
optimizer = optim.SGD([{
    'params': MFilter_params,
    'weight_decay': 0,
    'lr': args.lr * 0.1,