'GRAM_PYRAMID'
'''
train_mode = 'GRAM_DIRECT'

if train_mode == 'WEIGHTS':
    if model_name[-3:] == '_bn':
        ensembleModel = VGGNetNthLayer(dataset, [13, 26, 39, 52], model_name)
    else:
        ensembleModel = VGGNetNthLayer(dataset, [9, 18, 27, 36], model_name)

print('Train mode=' + train_mode)

Trainloader, Testloader = LoadDataSet(dataset).data

#%%
model = LoadModel(model_name, dataset, train_mode).model
#if model_name=='SVGG14':
#    num_opt=1
#elif model_name=='SVGG11':
#    num_opt=2
#elif model_name=='SVGG8':
#    num_opt=3
#elif model_name=='SVGG5':
#    num_opt=4
print('loading big model...')
big_model_name, ensembleModel = Load_BigModel(model_name, train_mode, dataset)

model = torch.nn.DataParallel(model)  #make model DataParrallel
if (ensembleModel is not None):
    ensembleModel = torch.nn.DataParallel(ensembleModel)
コード例 #2
0
                                                                                     optimizer,
                                                                                     Trainloader,
                                                                                     Testloader, 
                                                                                     epochs=10,
                                                                                     Train_mode=train_mode,
                                                                                     scheduler=scheduler,
                                                                                     big_model_name=big_model_name,
                                                                                     ensembleModel=ensembleModel)

save_model(model,trainAcc_to_file,testAcc_to_file,trainloss_to_file,testloss_to_file,Parameters,
               model_name,train_mode,dataset,plot=False)
'''


#%%
model=LoadModel(model_name,dataset,train_mode).model
print('loading big model...')
big_model_name,ensembleModel=Load_BigModel(model_name,train_mode,dataset)

model = torch.nn.DataParallel(model) #make model DataParrallel
if (ensembleModel is not None):
    ensembleModel=torch.nn.DataParallel(ensembleModel)
    
print('model='+model_name)
print('big model='+big_model_name)
lr_stage1=1e-4
if 'fitnet'==train_mode.lower():
    optimizer=torch.optim.Adam([{'params': model.features.parameters()},{'params': model.compressed_features.parameters(), 'lr': lr_stage1*1},
                        {'params': model.frozen_features.parameters(), 'lr': lr_stage1*1},
                            {'params': model.regressor.parameters(), 'lr': lr_stage1*1},
                {'params': model.classifier.parameters(), 'lr': lr_stage1*1}], lr=lr_stage1,