def plot_model( model: ClassyModel, size: Tuple[int, ...] = (3, 224, 224), input_key: Optional[Union[str, List[str]]] = None, writer: Optional["SummaryWriter"] = None, folder: str = "", train: bool = True, ) -> None: """Visualizes a model in TensorBoard. The TensorBoard writer can be either specified directly via `writer` or can be specified via a `folder`. The model can be run in training or evaluation model via the `train` argument. Example usage on devserver: - Install TensorBoard using: `sudo feature install tensorboard` - Start TensorBoard using: `tensorboard --port=8098 --logdir <folder>` """ assert ( writer is not None or folder != "" ), "must specify SummaryWriter or folder to create SummaryWriter in" input = get_model_dummy_input(model, size, input_key) if writer is None: writer = SummaryWriter(log_dir=folder, comment="Model graph") with writer: orig_train = model.training model.train(train) # visualize model in desired mode writer.add_graph(model, input_to_model=(input, )) model.train(orig_train)
def test_from_checkpoint(self): config = get_test_task_config() for use_head in [True, False]: config["model"] = self.get_model_config(use_head) task = build_task(config) task.prepare() checkpoint_folder = f"{self.base_dir}/{use_head}/" input_args = {"config": config} # Simulate training by setting the model parameters to zero for param in task.model.parameters(): param.data.zero_() checkpoint_hook = CheckpointHook( checkpoint_folder, input_args, phase_types=["train"] ) # Create checkpoint dir, save checkpoint os.mkdir(checkpoint_folder) checkpoint_hook.on_start(task) task.train = True checkpoint_hook.on_phase_end(task) # Model should be checkpointed. load and compare checkpoint = load_checkpoint(checkpoint_folder) model = ClassyModel.from_checkpoint(checkpoint) self.assertTrue(isinstance(model, MyTestModel)) # All parameters must be zero for param in model.parameters(): self.assertTrue(torch.all(param.data == 0))
def test_classy_model_adapter(self): model = TestModel() classy_model = ClassyModel.from_model(model) # test that the returned object is an instance of ClassyModel self.assertIsInstance(classy_model, ClassyModel) # test that the returned object is also an instance of _ClassyModelAdapter self.assertIsInstance(classy_model, _ClassyModelAdapter) # test that forward works correctly input = torch.zeros((100, 10)) output = classy_model(input) self.assertEqual(output.shape, (100, 5)) # test that extract_features works correctly input = torch.zeros((100, 10)) output = classy_model.extract_features(input) self.assertEqual(output.shape, (100, 20)) # test that get_classy_state and set_classy_state work nn.init.constant_(classy_model.model.linear.weight, 1) weights = copy.deepcopy(classy_model.model.linear.weight.data) state_dict = classy_model.get_classy_state(deep_copy=True) nn.init.constant_(classy_model.model.linear.weight, 0) classy_model.set_classy_state(state_dict) self.assertTrue(torch.allclose(weights, classy_model.model.linear.weight.data))
def test_classy_model_adapter_properties(self): # test that the properties work correctly when passed to the adapter model = TestModel() input_shape = (10,) model_depth = 1 classy_model = ClassyModel.from_model( model, input_shape=input_shape, model_depth=model_depth ) self.assertEqual(classy_model.input_shape, input_shape)
def test_heads(self): model = models.resnet50(pretrained=False) classy_model = ClassyModel.from_model(model) num_classes = 5 head = FullyConnectedHead( unique_id="default", in_plane=2048, num_classes=num_classes ) classy_model.set_heads({"layer4": [head]}) input = torch.ones((1, 3, 224, 224)) self.assertEqual(classy_model(input).shape, (1, num_classes))
def test_train_step(self): # test that the model can be run in a train step model = models.resnet34(pretrained=False) classy_model = ClassyModel.from_model(model) config = get_fast_test_task_config() task = build_task(config) task.set_model(classy_model) trainer = LocalTrainer() trainer.train(task)
def from_model(cls, model: Union[nn.Module, ClassyModel]) -> "ClassyHubInterface": """Instantiates the ClassyHubInterface from a model. This function returns a hub interface based on a ClassyModel Args: model: torchhub model """ if not isinstance(model, ClassyModel): model = ClassyModel.from_model(model) return cls(model=model)
def _validate_and_get_optimizer_params( self, model: ClassyModel) -> Dict[str, Any]: """ Validate and return the optimizer params. The optimizer params are fetched from :fun:`models.ClassyModel.get_optimizer_params`. Args: model: The model to get the params from. Returns: A dict containing "regularized_params" and "unregularized_params". Weight decay will only be applied to "regularized_params". """ if isinstance(model, torch.nn.parallel.DistributedDataParallel): optimizer_params = model.module.get_optimizer_params() else: optimizer_params = model.get_optimizer_params() assert isinstance(optimizer_params, dict) and set( optimizer_params.keys() ) == { "regularized_params", "unregularized_params", }, "get_optimizer_params() of {0} should return dict with exact two keys\ 'regularized_params', 'unregularized_params'".format( type(model).__name__) trainable_params = [ params for params in model.parameters() if params.requires_grad ] assert len(trainable_params) == len( optimizer_params["regularized_params"] ) + len(optimizer_params["unregularized_params"]), ( "get_optimizer_params() of {0} should return params that cover all" "trainable params of model".format(type(model).__name__)) return optimizer_params
def _get_mock_classy_vision_model(self, trainable_params=True): mock_classy_vision_model = ClassyModel() if trainable_params: mock_classy_vision_model.get_optimizer_params = MagicMock( return_value=self._get_optimizer_params()) mock_classy_vision_model.parameters = MagicMock( return_value=self._get_optimizer_params()["regularized_params"] + self._get_optimizer_params()["unregularized_params"]) else: mock_classy_vision_model.get_optimizer_params = MagicMock( return_value={ "regularized_params": [], "unregularized_params": [] }) mock_classy_vision_model.parameters = MagicMock(return_value=[ param.detach() for param in self._get_optimizer_params()["regularized_params"] + self._get_optimizer_params()["unregularized_params"] ]) return mock_classy_vision_model
def __init__(self): super().__init__() self.resnet = ClassyModel.from_model(resnet50()) self.relu = nn.ReLU() self.linear = nn.Linear(1000, 8)