def main(): # =========================================================== # Set train dataset & test dataset # =========================================================== print('===> Loading datasets') train_set = get_training_set(args.upscale_factor) test_set = get_test_set(args.upscale_factor) training_data_loader = DataLoader(dataset=train_set, batch_size=args.batchSize, shuffle=True) testing_data_loader = DataLoader(dataset=test_set, batch_size=args.testBatchSize, shuffle=False) if args.model == 'sub': model = SubPixelTrainer(args, training_data_loader, testing_data_loader) elif args.model == 'srcnn': model = SRCNNTrainer(args, training_data_loader, testing_data_loader) elif args.model == 'vdsr': model = VDSRTrainer(args, training_data_loader, testing_data_loader) elif args.model == 'edsr': model = EDSRTrainer(args, training_data_loader, testing_data_loader) elif args.model == 'fsrcnn': model = FSRCNNTrainer(args, training_data_loader, testing_data_loader) elif args.model == 'drcn': model = DRCNTrainer(args, training_data_loader, testing_data_loader) elif args.model == 'srgan': model = SRGANTrainer(args, training_data_loader, testing_data_loader) elif args.model == 'dbpn': model = DBPNTrainer(args, training_data_loader, testing_data_loader) else: raise Exception("the model does not exist") model.run()
def main(): # =========================================================== # 设置train dataset & test dataset # =========================================================== print('===> Loading datasets') train_set = get_training_set(args.upscale_factor) test_set = get_test_set(args.upscale_factor) training_data_loader = DataLoader(dataset=train_set, batch_size=args.batchSize, shuffle=True) testing_data_loader = DataLoader(dataset=test_set, batch_size=args.testBatchSize, shuffle=False) model = FSRCNNTrainer(args, training_data_loader, testing_data_loader) print("USE ",model.device) model.run()
def main(): # =========================================================== # Set train dataset & test dataset # =========================================================== print('===> Loading datasets') train_set = get_training_set(args.upscale_factor, args.image_dir) test_set = get_test_set(args.upscale_factor, args.image_dir) training_data_loader = DataLoader(dataset=train_set, batch_size=args.batchSize, shuffle=True) testing_data_loader = DataLoader(dataset=test_set, batch_size=args.testBatchSize, shuffle=False) # =========================================================== # Generate Model from training data set # =========================================================== model = SRGANTrainer(args, training_data_loader, testing_data_loader) model.run()