def build_nn_module(self, nn_module_meta, nn_module_params): if nn_module_meta is None: raise ValueError("nn_module is required attribute for argus.Model") nn_module, nn_module_params = choose_attribute_from_dict( nn_module_meta, nn_module_params) nn_module = cast_nn_module(nn_module) nn_module = nn_module(**nn_module_params) # Replace last fully connected layer num_classes = self.params['num_classes'] in_features = nn_module.classifier.in_features nn_module.classifier = torch.nn.Linear(in_features=in_features, out_features=num_classes) return nn_module
def test_not_dict_choose(self, linear_net_class): attribute_meta = linear_net_class attribute_params = {'in_features': 16, 'out_features': 1} attribute, params = choose_attribute_from_dict(attribute_meta, attribute_params) assert attribute is attribute_meta assert params == attribute_params with pytest.raises(TypeError): choose_attribute_from_dict(attribute_meta, ('qwerty', dict())) with pytest.raises(TypeError): choose_attribute_from_dict(attribute_meta, ('LinearNet', pytest)) with pytest.raises(TypeError): choose_attribute_from_dict(attribute_meta, 42)
def test_dict_choose(self, linear_net_class, vision_net_class): attribute_meta = { 'LinearNet': linear_net_class, 'VisionNet': vision_net_class } attribute_params = ('LinearNet', { 'in_features': 16, 'out_features': 1 }) attribute, params = choose_attribute_from_dict(attribute_meta, attribute_params) assert attribute is linear_net_class assert params == attribute_params[1] with pytest.raises(ValueError): choose_attribute_from_dict(attribute_meta, ('qwerty', dict())) with pytest.raises(TypeError): choose_attribute_from_dict(attribute_meta, ('LinearNet', pytest)) with pytest.raises(TypeError): choose_attribute_from_dict(attribute_meta, 42)
def build_optimizer(self, optimizer_meta, optim_params): optimizer, optim_params = choose_attribute_from_dict( optimizer_meta, optim_params) optimizer = cast_optimizer(optimizer) # Set small LR for pretrained layers pretrain_modules = [self.nn_module.features] pretrain_params = [] for pretrain_module in pretrain_modules: pretrain_params += pretrain_module.parameters() pretrain_ids = list(map(id, pretrain_params)) other_params = filter(lambda p: id(p) not in pretrain_ids, self.nn_module.parameters()) grad_params = [{ "params": pretrain_params, "lr": optim_params['lr'] * 0.01 }, { "params": other_params, "lr": optim_params['lr'] }] del optim_params['lr'] optimizer = optimizer(params=grad_params, **optim_params) return optimizer