def test_add_adapter_fusion_different_config(self): model = AutoModel.from_config(self.config()) model.eval() # fusion between a and b should be possible whereas fusion between a and c should fail model.add_adapter("a", config=PfeifferConfig(reduction_factor=16)) model.add_adapter("b", config=PfeifferConfig(reduction_factor=2)) model.add_adapter("c", config="houlsby") # correct fusion model.add_fusion(["a", "b"]) self.assertIn("a,b", model.config.adapter_fusion_models) # failing fusion self.assertRaises(ValueError, lambda: model.add_fusion(["a", "c"]))
def test_add_adapter(self): model = AutoModel.from_config(self.config()) model.eval() for adapter_config in [PfeifferConfig(), HoulsbyConfig()]: with self.subTest(model_class=model.__class__.__name__, config=adapter_config.__class__.__name__): name = adapter_config.__class__.__name__ model.add_adapter(name, config=adapter_config) model.set_active_adapters([name]) # adapter is correctly added to config self.assertTrue(name in model.config.adapters) self.assertEqual(adapter_config, model.config.adapters.get(name)) # check forward pass input_ids = self.get_input_samples((1, 128), config=model.config) input_data = {"input_ids": input_ids} adapter_output = model(**input_data) model.set_active_adapters(None) base_output = model(**input_data) self.assertEqual(len(adapter_output), len(base_output)) self.assertFalse(torch.equal(adapter_output[0], base_output[0]))
def test_adapter_forward(self): model = self.get_model() model.eval() for adapter_config in [PfeifferConfig(), HoulsbyConfig()]: with self.subTest(model_class=model.__class__.__name__, config=adapter_config.__class__.__name__): name = adapter_config.__class__.__name__ model.add_adapter(name, config=adapter_config) input_data = self.get_input_samples((1, 128), config=model.config) # set via property model.set_active_adapters([name]) output_1 = model(**input_data) # unset and make sure it's unset model.set_active_adapters(None) self.assertEqual(None, model.active_adapters) # check forward pass output_2 = model(**input_data, adapter_names=[name]) self.assertEqual(len(output_1), len(output_2)) self.assertTrue(torch.equal(output_1[0], output_2[0]))
def test_add_adapter_multiple_reduction_factors(self): model = self.get_model() model.eval() reduction_factor = {"1": 1, "default": 2} for adapter_config in [ PfeifferConfig(reduction_factor=reduction_factor), HoulsbyConfig(reduction_factor=reduction_factor), ]: with self.subTest(model_class=model.__class__.__name__, config=adapter_config.__class__.__name__): name = adapter_config.__class__.__name__ model.add_adapter(name, config=adapter_config) model.set_active_adapters([name]) # adapter is correctly added to config self.assertTrue(name in model.config.adapters) self.assertEqual(adapter_config, model.config.adapters.get(name)) adapter = model.get_adapter(name) self.assertEqual( adapter[0]["output"].adapter_down[0].in_features / adapter[0]["output"].adapter_down[0].out_features, reduction_factor["default"], ) self.assertEqual( adapter[1]["output"].adapter_down[0].in_features / adapter[1]["output"].adapter_down[0].out_features, reduction_factor["1"], )
def test_add_adapter_fusion_different_config(self): for model_class in self.model_classes: model_config = model_class.config_class model = model_class(model_config()) # fusion between a and b should be possible whereas fusion between a and c should fail model.add_adapter("a", AdapterType.text_task, config=PfeifferConfig(reduction_factor=16)) model.add_adapter("b", AdapterType.text_task, config=PfeifferConfig(reduction_factor=2)) model.add_adapter("c", AdapterType.text_task, config="houlsby") # correct fusion model.add_fusion(["a", "b"]) self.assertIn("a,b", model.config.adapter_fusion_models) # failing fusion self.assertRaises(ValueError, lambda: model.add_fusion(["a", "c"]))
def test_reduction_factor_no_default(self): model = AutoModel.from_config(self.config()) model.eval() reduction_factor = {"2": 8, "4": 32} for adapter_config in [ PfeifferConfig(reduction_factor=reduction_factor), HoulsbyConfig(reduction_factor=reduction_factor), ]: with self.subTest(model_class=model.__class__.__name__, config=adapter_config.__class__.__name__): name = adapter_config.__class__.__name__ with self.assertRaises(KeyError): model.add_adapter(name, config=adapter_config)
def test_add_adapter_multiple_reduction_factors(self): model = AutoModel.from_config(self.config()) model.eval() reduction_factor = {"1": 1, "default": 2} for adapter_config in [ PfeifferConfig(reduction_factor=reduction_factor), HoulsbyConfig(reduction_factor=reduction_factor), ]: with self.subTest(model_class=model.__class__.__name__, config=adapter_config.__class__.__name__): name = adapter_config.__class__.__name__ model.add_adapter(name, config=adapter_config) model.set_active_adapters([name]) # adapter is correctly added to config self.assertTrue(name in model.config.adapters) self.assertEqual(adapter_config, model.config.adapters.get(name)) # TODO: Add this method to model classes. def get_adapter_layer(idx): if isinstance(model, RobertaModel): adapter = model.encoder.layer[idx].output.adapters elif isinstance(model, DistilBertModel): adapter = model.transformer.layer[ idx].output_adapters.adapters elif isinstance(model, BartModel) or isinstance( model, MBartModel): adapter = model.encoder.layers[ idx].output_adapters.adapters elif isinstance(model, GPT2Model): adapter = model.h[idx].output_adapters.adapters else: adapter = model.encoder.layer[idx].output.adapters return (adapter.PfeifferConfig if isinstance( adapter_config, PfeifferConfig) else adapter.HoulsbyConfig) self.assertEqual( get_adapter_layer(0).adapter_down[0].in_features / get_adapter_layer(0).adapter_down[0].out_features, reduction_factor["default"], ) self.assertEqual( get_adapter_layer(1).adapter_down[0].in_features / get_adapter_layer(1).adapter_down[0].out_features, reduction_factor["1"], )