def load_model(self, model_path, load_ae=False, map_location='cuda:1'): """ Load the trained model for the model. The key here is to initialize <self.c>. """ # Load the general model model_dict = torch.load(model_path, map_location=map_location) self.c = model_dict['c'] self.net.load_state_dict(model_dict['net_dict']) # Load autoencoder parameters if specified if load_ae: if self.ae_net is None: self.ae_net = build_autoencoder(self.net_name) self.ae_net.load_state_dict(model_dict['ae_net_dict'])
def pretrain(self, dataset, optimizer_name: str = 'adam', lr: float = 0.001, n_epochs: int = 100, lr_milestones: tuple = (50, 100, 150, 200), batch_size: int = 128, weight_decay: float = 1e-6, device: str = 'cuda:1', n_jobs_dataloader: int = 0, split_train: bool = False, split_test: bool = False): # Set autoencoder network # Note that <build_autoencoder> is different from <build_network> # Yet they share the same parameter self.ae_net = build_autoencoder(self.net_name) # Train self.ae_optimizer_name = optimizer_name self.ae_trainer = AETrainer(optimizer_name, lr=lr, n_epochs=n_epochs, lr_milestones=lr_milestones, batch_size=batch_size, weight_decay=weight_decay, device=device, n_jobs_dataloader=n_jobs_dataloader) self.ae_net = self.ae_trainer.train(dataset, self.ae_net, split_train) # Get train results self.ae_results['train_time'] = self.ae_trainer.train_time # Test self.ae_trainer.test(dataset, self.ae_net, split_test) # Get test results # self.ae_results['test_auc'] = self.ae_trainer.test_auc self.ae_results['test_time'] = self.ae_trainer.test_time # Initialize network weights from pre-trained encoder self.init_network_weights_from_pretraining()