def main(): global args net = UNet(3, 1) net.load(opt.ckpt_path) loss = Loss('soft_dice_loss') torch.cuda.set_device(0) net = net.cuda() loss = loss.cuda() if args.phase == 'train': # train dataset = NucleiDetector(opt, phase=args.phase) train_loader = DataLoader(dataset, batch_size=opt.batch_size, shuffle=True, num_workers=opt.num_workers, pin_memory=opt.pin_memory) lr = opt.lr optimizer = torch.optim.Adam(net.parameters(), lr=lr, weight_decay=opt.weight_decay) previous_loss = None # haven't run for epoch in range(opt.epoch + 1): now_loss = train(train_loader, net, loss, epoch, optimizer, opt.model_save_freq, opt.model_save_path) if previous_loss is not None and now_loss > previous_loss: lr *= opt.lr_decay for param_group in optimizer.param_groups: param_group['lr'] = lr save_lr(net.model_name, opt.lr_save_path, lr) previous_loss = now_loss elif args.phase == 'val': # val phase dataset = NucleiDetector(opt, phase='val') val_loader = DataLoader(dataset, batch_size=opt.batch_size, shuffle=True, num_workers=opt.num_workers, pin_memory=opt.pin_memory) val(val_loader, net, loss) else: # test phase dataset = NucleiDetector(opt, phase='test') test_loader = DataLoader(dataset, batch_size=1, shuffle=True, num_workers=opt.num_workers, pin_memory=opt.pin_memory) test(test_loader, net, opt)
def train(): # Load the data sets train_dataset = NucleusDataset( "data", train=True, transform=Compose([Rescale(256), ToTensor()]), target_transform=Compose([Rescale(256), ToTensor()])) # Use cuda if available device = "cuda" if torch.cuda.is_available() else "cpu" # Set model to GPU/CPU if args.from_checkpoint: model = UNet.load(args.from_checkpoint) else: model = UNet() model.to(device) # Initialize optimizer optimizer = optim.Adam(model.parameters(), lr=args.learning_rate) # Initialize trainer trainer = Trainer(dataset=train_dataset, model=model, optimizer=optimizer, batch_size=args.batch_size, device=args.device, output_dir=output_dir) # Run the training trainer.run_train_loop(epochs=args.epochs)
-------------------------------------------------------------------------------------------- """ parser = argparse.ArgumentParser() parser.add_argument('--model_path', type=str, default='result.pth') parser.add_argument('--mixture_folder', type=str, default='inference/mixture') parser.add_argument('--tar', type=str, default='inference/split') args = parser.parse_args() if not os.path.exists(args.tar): os.mkdir(args.tar) # ========================================================================================= # 2. Separate the singing voice for the song # ========================================================================================= # Load the pre-trained model model = UNet() model.load(args.model_path) model.eval() # Seperate! with torch.no_grad(): bar = tqdm( [_ for _ in sorted(os.listdir(args.mixture_folder)) if 'spec' in _]) for idx, name in enumerate(bar): if idx > 5: break mix = np.load(os.path.join(args.mixture_folder, name)) spec_sum = None for i in range(mix.shape[-1] // 128): # Get the fixed size of segment seg = mix[1:, i * 128:i * 128 + 128, np.newaxis] seg = np.asarray(seg, dtype=np.float32)
parser.add_argument('--save_path' , type = str, default = 'result.pth') parser.add_argument('--epoch' , type = int, default = 2) args = parser.parse_args() # ========================================================================================= # 2. Training # ========================================================================================= # Create the data loader loader = Data.DataLoader( dataset = SpectrogramDataset(args.train_folder), batch_size=1, num_workers=0, shuffle=True ) # Load the pre-trained model model = UNet() model.load(args.load_path) # Train! for ep in range(args.epoch): bar = tqdm_table(loader) for i, (mix, voc) in enumerate(bar): mix, voc = mix.cuda(), voc.cuda() model.backward(mix, voc) if i == len(bar) - 1: info_dict = model.getLoss(normalize = True) else: info_dict = model.getLoss(normalize = False) info_dict.update({'Epoch': ep}) bar.set_table_info(info_dict) model.save(args.save_path) print("Finish training!")