def test_partly_pretrained_train(self): path_to_pretrained_json = os.path.join(get_project_root(), 'res_experiments', 'hps_common_mlp.json') path_to_pretrained_model = os.path.join(get_project_root(), 'res_experiments', 'trained_models', 'test.pt') hps_pretrained = HyperParams.from_file( path_to_json=path_to_pretrained_json) model_common = Model(hps_pretrained) model_common.load_model(path_to_pretrained_model) path_to_json = os.path.join(get_project_root(), 'res_experiments', 'hps_partly_independent_mlp.json') hps = HyperParams.from_file(path_to_json=path_to_json) model_part = Model(hps) model_part.load_pretrained_bottom(path_to_pretrained_model, path_to_pretrained_json) filename = '../data/small_parameters_base.fits' history = model_part.train( filename=filename, path_to_save='../res_experiments/trained_models/partly_test.pt', pretrained_bottom=True, logdir='../res_experiments/') par_0_com = list(zip(*model_common.net.bottom.mlp.named_parameters()) )[1][0].detach().numpy() par_0_par = list(zip(*model_part.net.bottom.mlp.named_parameters()) )[1][0].detach().numpy() assert par_0_com == pytest.approx(par_0_par)
def test_partly_pretrained_init(self): path_to_json = os.path.join(get_project_root(), 'res_experiments', 'hps_common_mlp.json') path_to_model = os.path.join(get_project_root(), 'res_experiments', 'trained_models', 'test.pt') hps = HyperParams.from_file(path_to_json=path_to_json) model_common = Model(hps) model_common.load_model(path_to_model) pretrained_dict = model_common.net.state_dict() pretrained_dict = { k: v for k, v in pretrained_dict.items() if 'bottom' in k } path_to_json = os.path.join(get_project_root(), 'res_experiments', 'hps_partly_independent_mlp.json') hps = HyperParams.from_file(path_to_json=path_to_json) model_part = Model(hps) model_dict = model_part.net.state_dict() model_dict.update(pretrained_dict) model_part.net.load_state_dict(model_dict) par_0_part = list( zip(*model_part.net.bottom.mlp.named_parameters()))[1][0] par_0_com = list( zip(*model_common.net.bottom.mlp.named_parameters()))[1][0]
def test_partly_pretrained_fit_step(self): path_to_pretrained_json = os.path.join(get_project_root(), 'res_experiments', 'hps_common_mlp.json') path_to_pretrained_model = os.path.join(get_project_root(), 'res_experiments', 'trained_models', 'test.pt') hps_pretrained = HyperParams.from_file( path_to_json=path_to_pretrained_json) model_common = Model(hps_pretrained) model_common.load_model(path_to_pretrained_model) path_to_json = os.path.join(get_project_root(), 'res_experiments', 'hps_partly_independent_mlp.json') hps = HyperParams.from_file(path_to_json=path_to_json) model_part = Model(hps) model_part.load_pretrained_bottom(path_to_pretrained_model, path_to_pretrained_json) train_loader, val_loader = model_part.make_loader( filename='../data/small_parameters_base.fits') loss = model_part.fit_step(train_loader, pretrained_bottom=True) par_0_com = list(zip(*model_common.net.bottom.mlp.named_parameters()) )[1][0].detach().numpy() par_0_par = list(zip(*model_part.net.bottom.mlp.named_parameters()) )[1][0].detach().numpy() assert par_0_com == pytest.approx(par_0_par)
def test_resnet_train(self): path_to_json = os.path.join(get_project_root(), 'res_experiments', 'hps_base_resnet.json') hps = HyperParams.from_file(path_to_json=path_to_json) hps.trainset = 5 model = Model(hps) history = model.train() assert history[0][0] > 0
def test_model_conv_train(self): path_to_json = os.path.join(get_project_root(), 'res_experiments', 'hps_base_conv.json') hps = HyperParams.from_file(path_to_json=path_to_json) model = Model(hps) x = model.make_loader() x_ = next(iter(x)) history = model.train() assert history[0][0] > 0
def test_model_partly_ind_train(self): path_to_json = os.path.join(get_project_root(), 'res_experiments', 'hps_partly_independent_mlp.json') hps = HyperParams.from_file(path_to_json=path_to_json) model = Model(hps) filename = '../data/small_parameters_base.fits' history = model.train( filename=filename, path_to_save='../res_experiments/trained_models/partly_test.pt', logdir='../res_experiments/') # model.save_model(path_to_save='../res_experiments/trained_models/test.pt') assert True
def test_load_pretrained_bottom(self): path_to_pretrained_json = os.path.join(get_project_root(), 'res_experiments', 'hps_common_mlp.json') path_to_pretrained_model = os.path.join(get_project_root(), 'res_experiments', 'trained_models', 'test.pt') hps_pretrained = HyperParams.from_file( path_to_json=path_to_pretrained_json) model_common = Model(hps_pretrained) model_common.load_model(path_to_pretrained_model) path_to_json = os.path.join(get_project_root(), 'res_experiments', 'hps_partly_independent_mlp.json') hps = HyperParams.from_file(path_to_json=path_to_json) model_part = Model(hps) model_part.load_pretrained_bottom(path_to_pretrained_model, path_to_pretrained_json) par_0_part = list( zip(*model_part.net.bottom.mlp.named_parameters()))[1][0] par_0_com = list( zip(*model_common.net.bottom.mlp.named_parameters()))[1][0] assert True
def test_common_model_train(self, common_mlp_rescale_hps): path_to_json = os.path.join(get_project_root(), 'res_experiments', 'hps_common_mlp.json') hps = HyperParams.from_file(path_to_json=path_to_json) model = Model(hps) filename = '../data/small_parameters_base.fits' history = model.train( filename=filename, pregen=True, path_to_save='../res_experiments/trained_models/test.pt', logdir='../res_experiments/') model.save_model(path='../res_experiments/trained_models/common.pt') assert True
def test_predict_one_pixel_conv(self): path_to_json = os.path.join(get_project_root(), 'res_experiments', 'hps_base_conv.json') hps = HyperParams.from_file(path_to_json=path_to_json) hps.trainset = 1 hps.valset = 1 hps.n_epochs = 1 hps.batch_size = 1 model = Model(hps) history = model.train() filename = Path(os.getcwd()).parent / 'data' / "20170905_030404.fits" ref = fits.open(filename) predicted, y, x, _ = model.predict_one_pixel(ref, 3, 4) assert predicted[0].shape == torch.Size([1, 3])
def test_predict_full_image_conv(self): path_to_json = os.path.join(get_project_root(), 'res_experiments', 'hps_base_conv.json') hps = HyperParams.from_file(path_to_json=path_to_json) hps.trainset = 1 hps.valset = 1 hps.n_epochs = 1 hps.batch_size = 1 model = Model(hps) history = model.train() filename = Path(os.getcwd()).parent / 'data' / "20170905_030404.fits" ref = fits.open(filename) predicted, params, lines, cont = model.predict_full_image(ref, cnn=True) assert predicted.shape == (ref[1].data.shape + (3, ))
def test_model_ind_train(self): path_to_json = os.path.join(get_project_root(), 'res_experiments', 'hps_independent_mlp.json') hps = HyperParams.from_file(path_to_json=path_to_json) model = Model(hps) filename = '../data/small_parameters_base.fits' history = model.train( filename=filename, path_to_save='../res_experiments/trained_models/test.pt', logdir='../res_experiments/') # model.save_model(path_to_save='../res_experiments/trained_models/test.pt') # list(zip(*self.net.top.task_layers[0].named_parameters()))[1][0].grad params_groups_0 = list( zip(*model.net.top.task_layers[0].named_parameters())) params_groups_3 = list( zip(*model.net.top.task_layers[3].named_parameters())) assert True
def base_mlp_standard_hps(self): path_to_json = os.path.join(get_project_root(), 'res_experiments', 'hps_base_mlp_standard.json') hps = HyperParams.from_file(path_to_json=path_to_json) return hps
def common_mlp_rescale_hps(self): path_to_json = os.path.join(get_project_root(), 'res_experiments', 'hps_common_mlp.json') hps = HyperParams.from_file(path_to_json=path_to_json) return hps