def main(): # load data options = get_parser().parse_args() dataset = get_standard_dataset('lodopab') test_data = dataset.get_data_pairs('validation') ray_trafo = dataset.ray_trafo reduced_dataset = RandomSampleDataset(dataset, size_part=0.1, seed=options.seed) reconstructor = LearnedPDReconstructor(ray_trafo=ray_trafo, num_workers=8) reconstructor.load_hyper_params('params') reconstructor.save_best_learned_params_path = 'best-model-{}'.format( options.seed) reconstructor.log_dir = options.log_dir # create a Dival task table and run it task_table = TaskTable() task_table.append(reconstructor=reconstructor, measures=[PSNR, SSIM], test_data=test_data, dataset=reduced_dataset, hyper_param_choices=[reconstructor.hyper_params]) results = task_table.run() # save report save_results_table(results, full_name)
def main(): # load data options = get_parser().parse_args() dataset = load_standard_dataset(options.dataset, ordered=True) ray_trafo = dataset.ray_trafo X = ray_trafo.range lodopab_cache_file_names = { 'train': [None, None], 'validation': [None, None] } if options.method == 'fbpunet': X = ray_trafo.domain train_path = 'cache_train_{}_fbp.npy'.format(options.dataset) validation_path = 'cache_validation_{}_fbp.npy'.format(options.dataset) lodopab_cache_file_names = { 'train': [train_path, None], 'validation': [validation_path, None] } cached_dataset = CachedDataset(dataset, space=(X, ray_trafo.domain), cache_files=lodopab_cache_file_names, size_part=options.size_part) test_data = dataset.get_data_pairs('validation', cached_dataset.validation_len) print('validation size: %d' % len(test_data)) reconstructor = get_reconstructor(options.method, dataset=options.dataset, size_part=options.size_part, pretrained=False) print(reconstructor.hyper_params) full_name = '{}_{}_{}'.format( options.dataset, options.method, options.size_part) print(full_name) reconstructor.save_best_learned_params_path = get_weights_path(full_name) reconstructor.log_dir = options.log_dir reconstructor.num_data_loader_workers = 16 # create a Dival task table and run it task_table = TaskTable() task_table.append( reconstructor=reconstructor, measures=[PSNR, SSIM], test_data=test_data, dataset=cached_dataset, hyper_param_choices=[reconstructor.hyper_params] ) results = task_table.run() # save report save_results_table(results, full_name)
def main(): """Main function""" options = get_parser().parse_args() # options.dataset = 'lodopab' | 'ellipses' | 'lodopab-sparse' dataset = load_standard_dataset(options.dataset) test_data = dataset.get_data_pairs('test', 100) obs = list(y for y, x in test_data) gt = list(x for y, x in test_data) start = options.start count = options.count if count is None: count = len(test_data) test_data = DataPairs(obs[start:start + count], gt[start:start + count], name='test') # load reconstructor reconstructor = get_reconstructor(options.method, options.dataset, options.size_part) # eval on the test-set print('Reconstructor: %s' % options.method) print('Dataset: %s' % options.dataset) print('Offset: %d' % start) print('Count: %d' % count) task_table = TaskTable() task_table.append(reconstructor=reconstructor, measures=[PSNR, SSIM], test_data=test_data, options={'skip_training': True}) task_table.run() print(task_table.results.to_string(show_columns=['misc'])) if options.size_part is not None: save_path = '{}_{}_{}_eval'.format(options.dataset, options.method, options.size_part) else: save_path = '{}_{}_eval'.format(options.dataset, options.method) save_path += '_offset_%d' % start save_results_table(task_table.results, save_path)
def main(): # load data options = get_parser().parse_args() dataset = load_standard_dataset('lodopab', ordered=True) ray_trafo = dataset.ray_trafo global_results = [] sizes = [0.001, 0.01, 0.1, 1.00] #sizes = [0.0001] #sizes = [0.001] #sizes = [0.01] #sizes = [0.1] for size_part in sizes: cached_dataset = CachedDataset(dataset, space=(ray_trafo.range, ray_trafo.domain), cache_files={ 'train': [None, None], 'validation': [None, None] }, size_part=size_part) test_data = dataset.get_data_pairs('validation', cached_dataset.validation_len) print('validation size: %d' % len(test_data)) full_size_epochs = 10 if size_part >= 0.10 else 5 lr = 0.0001 if size_part >= 0.10 else 0.001 # scale number of epochs by 1/size_part, but maximum 1000 times as many # epochs as for full size dataset epochs = min(1000 * full_size_epochs, int(1. / size_part * full_size_epochs)) reconstructor = LearnedPDReconstructor( ray_trafo, log_dir='lodopab_learnedpd_{}'.format(size_part), save_best_learned_params_path=get_weights_path( 'lodopab_learnedpd_{}'.format(size_part))) # create a Dival task table and run it task_table = TaskTable() task_table.append(reconstructor=reconstructor, measures=[PSNR, SSIM], test_data=test_data, dataset=cached_dataset, hyper_param_choices={ 'batch_size': [1], 'epochs': [epochs], 'niter': [10], 'internal_ch': [64 if size_part >= 0.10 else 32], 'lr': [lr], 'lr_min': [lr], 'init_fbp': [True], 'init_frequency_scaling': [0.7] }) results = task_table.run() # save report save_results_table(results, 'lodopab_learnedpd_{}'.format(size_part)) # select best parameters and save them best_choice, best_error = select_hyper_best_parameters(results) params = Params(best_choice) params.save('lodopab_learnedpd_{}'.format(size_part))
'ellipses_learnedgd_{}'.format(size_part))) # create a Dival task table and run it task_table = TaskTable() task_table.append(reconstructor=reconstructor, measures=[PSNR, SSIM], test_data=test_data, dataset=cached_dataset, hyper_param_choices={ 'batch_size': [32], 'epochs': [epochs], 'niter': [10], 'lr': [0.001], 'lr_time_decay_rate': [3.2 * size_part] }) results = task_table.run() # save report save_results_table(results, 'ellipses_learnedgd_{}'.format(size_part)) # select best parameters and save them best_choice, best_error = select_hyper_best_parameters(results) params = Params(best_choice) params.save('ellipses_learnedgd_{}'.format(size_part)) # retrain the model with the optimal parameters and save the weights # reconstructor = LearnedGDReconstructor(dataset.ray_trafo, hyper_params=params.dict) # reconstructor.train(cached_dataset) save_weights(reconstructor, 'ellipses_learnedgd_{}'.format(size_part))