def train(self, train_loader, save_path, finetune=False, iters=450000): # writer = SummaryWriter(log_dir="log_info") self.G.train(finetune=finetune) if finetune: self.optm_G = optim.Adam(filter(lambda p: p.requires_grad, self.G.parameters()), lr=5e-5) print("Starting training from iteration:{:d}".format(self.iter)) s_time = time.time() while self.iter < iters: for items in train_loader: gt_images, masks = self.__cuda__(*items) masked_images = gt_images * masks self.forward(masked_images, masks, gt_images) self.update_parameters() self.iter += 1 if self.iter % 50 == 0: e_time = time.time() int_time = e_time - s_time print("Iteration:%d, l1_loss:%.4f, time_taken:%.2f" % (self.iter, self.l1_loss_val / 50, int_time)) s_time = time.time() self.l1_loss_val = 0.0 if self.iter % 40000 == 0: if not os.path.exists('{:s}'.format(save_path)): os.makedirs('{:s}'.format(save_path)) save_ckpt('{:s}/g_{:d}.pth'.format(save_path, self.iter), [('generator', self.G)], [('optimizer_G', self.optm_G)], self.iter) if self.iter % 200 == 0: self.visualizer.display_current_results( self.get_current_visuals(), self.iter, False) if not os.path.exists('{:s}'.format(save_path)): os.makedirs('{:s}'.format(save_path)) save_ckpt('{:s}/g_{:s}.pth'.format(save_path, "final"), [('generator', self.G)], [('optimizer_G', self.optm_G)], self.iter)
output, _ = model(image, mask) loss_dict = criterion(image, mask, output, gt) loss = 0.0 for key, coef in opt.LAMBDA_DICT.items(): value = coef * loss_dict[key] loss += value if (i + 1) % args.log_interval == 0: writer.add_scalar('loss_{:s}'.format(key), value.item(), i + 1) optimizer.zero_grad() loss.backward() optimizer.step() if (i + 1) % args.save_model_interval == 0 or (i + 1) == args.max_iter: save_ckpt('{:s}/ckpt/{:d}.pth'.format(args.save_dir, i + 1), [('model', model)], [('optimizer', optimizer)], i + 1) if (i + 1) % args.vis_interval == 0: model.eval() evaluate(model, dataset_val, device, '{:s}/images/test_{:d}.jpg'.format(args.save_dir, i + 1), gamma=args.gamma, exposure=args.exposure, black=args.black_level, white=args.white_level) writer.close()