def train_wrapper(model): if args.pretrained_model: model.load(args.pretrained_model) # load data train_input_handle, test_input_handle = datasets_factory.data_provider( args.dataset_name, args.train_data_paths, args.valid_data_paths, args.batch_size, args.img_width, seq_length=args.total_length, is_training=True) eta = args.sampling_start_value for itr in range(1, args.max_iterations + 1): print("Iter number:", itr) if train_input_handle.no_batch_left(): train_input_handle.begin(do_shuffle=True) ims = train_input_handle.get_batch() ims = preprocess.reshape_patch(ims, args.patch_size) eta, real_input_flag = schedule_sampling(eta, itr) trainer.train(model, ims, real_input_flag, args, itr) if itr % args.snapshot_interval == 0: model.save(itr) if itr % args.test_interval == 0: trainer.test(model, test_input_handle, args, itr) train_input_handle.next()
def main(_): model_config, train_config, input_config = get_configs_from_pipeline_file() model_fn = functools.partial(build_man_model, model_config=model_config, is_training=True) create_input_dict_fn = functools.partial(input_reader.read, input_config) trainer.train(model_fn, create_input_dict_fn, train_config, FLAGS.train_dir, FLAGS.image_root)
def train_wrapper(model): if args.pretrained_model: model.load(args.pretrained_model) # load data train_input_handle, test_input_handle = datasets_factory.data_provider( args.dataset_name, args.train_data_paths, args.valid_data_paths, args.batch_size, args.img_width, seq_length=args.total_length, is_training=True, ) eta = args.sampling_start_value best_valLoss = 999999999999 best_ssim = -1 best_psnr = -1 for itr in range(1, args.max_iterations + 1): if train_input_handle.no_batch_left(): train_input_handle.begin(do_shuffle=True) ims = train_input_handle.get_batch() ims = preprocess.reshape_patch(ims, args.patch_size) if args.reverse_scheduled_sampling == 1: real_input_flag = reserve_schedule_sampling_exp(itr) else: eta, real_input_flag = schedule_sampling(eta, itr) trainer.train(model, ims, real_input_flag, args, itr) if itr % args.snapshot_interval == 0: model.save(itr) else: model.save("latest") if itr % args.test_interval == 0: val_loss, ssim, psnr = trainer.test(model, test_input_handle, args, itr) if best_ssim < ssim: best_ssim = ssim model.save("bestssim") print("Best SSIM found: {}".format(best_ssim)) elif best_psnr < psnr: best_psnr = psnr model.save("bestpsnr") print("Best PSNR found: {}".format(best_psnr)) elif best_valLoss > val_loss: best_valLoss = val_loss model.save("bestvalloss") print("Best ValLossMSE found: {}".format(best_valLoss)) train_input_handle.next()
def wrapper_train(model): if args.pretrained_model: model.load(args.pretrained_model) # load data # train_input_handle, test_input_handle = datasets_factory.data_provider( # args.dataset_name, args.train_data_paths, args.valid_data_paths, args.batch_size, args.img_width, # seq_length=args.total_length, is_training=True) eta = args.sampling_start_value best_mse = math.inf tolerate = 0 limit = 3 best_iter = None for itr in range(1, args.max_iterations + 1): ims = sample( batch_size=batch_size ) ims = padding_CIKM_data(ims) ims = preprocess.reshape_patch(ims, args.patch_size) ims = nor(ims) eta, real_input_flag = schedule_sampling(eta, itr) cost = trainer.train(model, ims, real_input_flag, args, itr) if itr % args.display_interval == 0: print('itr: ' + str(itr)) print('training loss: ' + str(cost)) if itr % args.test_interval == 0: print('validation one ') valid_mse = wrapper_valid(model) print('validation mse is:',str(valid_mse)) if valid_mse<best_mse: best_mse = valid_mse best_iter = itr tolerate = 0 model.save() else: tolerate = tolerate+1 if tolerate==limit: model.load() test_mse = wrapper_test(model) print('the best valid mse is:',str(best_mse)) print('the test mse is ',str(test_mse)) break
def wrapper_train(model): if args.pretrained_model: model.load(args.pretrained_model) eta = args.sampling_start_value best_mse = math.inf tolerate = 0 limit = 2 best_iter = None for itr in range(1, args.max_iterations + 1): ims = sample( batch_size=batch_size ) ims = padding_CIKM_data(ims) ims = preprocess.reshape_patch(ims, args.patch_size) ims = nor(ims) eta, real_input_flag = schedule_sampling(eta, itr) cost = trainer.train(model, ims, real_input_flag, args, itr) if itr % args.display_interval == 0: print('itr: ' + str(itr)) print('training loss: ' + str(cost)) if itr % args.test_interval == 0: print('validation one ') valid_mse = wrapper_valid(model) print('validation mse is:',str(valid_mse)) if valid_mse<best_mse: best_mse = valid_mse best_iter = itr tolerate = 0 model.save() else: tolerate = tolerate+1 if tolerate==limit: model.load() test_mse = wrapper_test(model) print('the best valid mse is:',str(best_mse)) print('the test mse is ',str(test_mse)) break
if args.cuda: fasterRCNN.cuda() data_loader_builder = KITTIBEVDataLoaderBuilder(data_config, training=True) data_loader = data_loader_builder.build() # optimizer optimizer_builder = OptimizerBuilder(fasterRCNN, train_config['optimizer_config']) optimizer = optimizer_builder.build() scheduler_config = train_config['scheduler_config'] if args.resume: checkpoint_name = 'faster_rcnn_{}_{}.pth'.format(args.checkepoch, args.checkpoint) params_dict = { 'start_epoch': None, 'model': fasterRCNN, 'optimizer': optimizer, } saver.load(params_dict, checkpoint_name) train_config['start_epoch'] = params_dict['start_epoch'] scheduler_config['last_epoch'] = params_dict['start_epoch'] - 1 # scheduler(after resume) scheduler_builder = SchedulerBuilder(optimizer, scheduler_config) scheduler = scheduler_builder.build() trainer.train(train_config, data_loader, fasterRCNN, optimizer, scheduler, saver)