Пример #1
0
    def test_save_load(self):
        fd, checkpoint_path = tempfile.mkstemp()

        task1 = create_task("task1")
        task2 = create_task("task2")
        # Make task2's second linear layer have different weights
        task2.module_pool["linear2"] = nn.Linear(2, 2)

        model = MultitaskModel([task1])
        self.assertTrue(
            torch.eq(
                task1.module_pool["linear2"].weight,
                model.module_pool["linear2"].module.weight,
            ).all())
        model.save(checkpoint_path)
        model = MultitaskModel([task2])
        self.assertFalse(
            torch.eq(
                task1.module_pool["linear2"].weight,
                model.module_pool["linear2"].module.weight,
            ).all())
        model.load(checkpoint_path)
        self.assertTrue(
            torch.eq(
                task1.module_pool["linear2"].weight,
                model.module_pool["linear2"].module.weight,
            ).all())

        os.close(fd)
Пример #2
0
    def load_best_model(self, model: MultitaskModel) -> MultitaskModel:
        """Load the best model from the checkpoint."""
        metric = list(self.checkpoint_metric.keys())[0]
        if metric not in self.best_metric_dict:  # pragma: no cover
            logging.info("No best model found, use the original model.")
        else:
            # Load the best model of checkpoint_metric
            best_model_path = (
                f"{self.checkpoint_dir}/best_model_{metric.replace('/', '_')}.pth"
            )
            logging.info(f"Loading the best model from {best_model_path}.")
            model.load(best_model_path)

        return model