示例#1
0
model = Baseline(model='train', model_name=model_name, model_path=model_path)
#model.load_param('models/model_1_180000.pth')
model = model.cuda()

optimizer = torch.optim.Adam(model.parameters(), lr=1e-4)
#exp_lr_scheduler = lr_scheduler.StepLR(optimizer, step_size=1, gamma=0.1)

# kd_id = 0
# kd_num = 7
# batch_size = 48
# instance_num = 1
train_data, val_data, trains, vals = make_dataloader(kd_id, kd_num)
train_loader = DataLoader(dataset=train_data,
                          batch_size=batch_size,
                          sampler=RandomSampler(trains, batch_size,
                                                instance_num),
                          shuffle=False,
                          num_workers=2,
                          collate_fn=train_collate)
#train_loader = DataLoader(dataset=train_data, batch_size=48, shuffle=False, num_workers=2, collate_fn=train_collate)
val_loader = DataLoader(dataset=val_data,
                        batch_size=64,
                        shuffle=False,
                        num_workers=2,
                        collate_fn=train_collate)
train_length = len(train_loader)
val_length = len(val_loader)

if __name__ == '__main__':
    max_epoch = 50
    max_val_acc = 0
示例#2
0
# model_name = 'MixNet'
# model_path = ' ' 

model = Baseline(model='train',model_name = model_name, model_path=model_path)
#model.load_param('models/model_1_180000.pth')
model = model.cuda()

optimizer = torch.optim.Adam(model.parameters(), lr=1e-4)
#exp_lr_scheduler = lr_scheduler.StepLR(optimizer, step_size=1, gamma=0.1)

# kd_id = 0
# kd_num = 7
# batch_size = 48
# instance_num = 1
train_data, val_data, trains, vals = make_dataloader(kd_id,kd_num)
train_loader = DataLoader(dataset=train_data, batch_size=batch_size, sampler=RandomSampler(trains, batch_size, instance_num), shuffle=False, num_workers=2, collate_fn=train_collate)
#train_loader = DataLoader(dataset=train_data, batch_size=48, shuffle=False, num_workers=2, collate_fn=train_collate)
val_loader   = DataLoader(dataset=val_data,   batch_size=64, shuffle=False, num_workers=2, collate_fn=train_collate )
train_length = len(train_loader)
val_length = len(val_loader)

if __name__ == '__main__':
    max_epoch = 50
    max_val_acc = 0

    for epoch in range(0,max_epoch):
        adjust_lr(optimizer, epoch)
        train_fuc(model, epoch) 
        val_acc = val_fuc(model, epoch) 

        torch.save(model.state_dict(), 'models/'+ str(kd_id)+'_'+ model_name + '_'+ '%.5f'%(val_acc) +'_'+ str(epoch) +'.pth')