def test_load_adapter_fusion(self): for adater_fusion_config_name, adapter_fusion_config in ADAPTERFUSION_CONFIG_MAP.items(): model1 = AutoModel.from_config(self.config()) model1.eval() with self.subTest(model_class=model1.__class__.__name__): name1 = "name1" name2 = "name2" model1.add_adapter(name1) model1.add_adapter(name2) model2 = copy.deepcopy(model1) model2.eval() model1.add_fusion([name1, name2], adater_fusion_config_name) with tempfile.TemporaryDirectory() as temp_dir: model1.save_adapter_fusion(temp_dir, ",".join([name1, name2])) model2.load_adapter_fusion(temp_dir) # check if adapter was correctly loaded self.assertTrue(model1.config.adapter_fusion_models == model2.config.adapter_fusion_models) # check equal output in_data = self.get_input_samples((1, 128), config=model1.config) model1.set_active_adapters([[name1, name2]]) model2.set_active_adapters([[name1, name2]]) output1 = model1(in_data) output2 = model2(in_data) self.assertEqual(len(output1), len(output2)) self.assertTrue(torch.equal(output1[0], output2[0]))
def test_add_adapter_fusion(self): config_name = "pfeiffer" adapter_config = AdapterConfig.load(config_name) for adater_fusion_config_name, adapter_fusion_config in ADAPTERFUSION_CONFIG_MAP.items(): model = AutoModel.from_config(self.config()) model.eval() with self.subTest(model_class=model.__class__.__name__, config=config_name): name1 = f"{config_name}-1" name2 = f"{config_name}-2" model.add_adapter(name1, config=config_name) model.add_adapter(name2, config=config_name) # adapter is correctly added to config self.assertTrue(name1 in model.config.adapters) self.assertTrue(name2 in model.config.adapters) self.assertEqual(asdict(adapter_config), asdict(model.config.adapters.get(name1))) self.assertEqual(asdict(adapter_config), asdict(model.config.adapters.get(name2))) model.add_fusion([name1, name2], adater_fusion_config_name) # check forward pass input_ids = self.get_input_samples((1, 128), config=model.config) input_data = {"input_ids": input_ids} model.set_active_adapters([[name1, name2]]) adapter_output = model(**input_data) model.set_active_adapters(None) base_output = model(**input_data) self.assertEqual(len(adapter_output), len(base_output)) self.assertFalse(torch.equal(adapter_output[0], base_output[0]))
def test_model_config_serialization_fusion(self): """PretrainedConfigurations should not raise an Exception when serializing the config dict See, e.g., PretrainedConfig.to_json_string() """ for k, v in ADAPTERFUSION_CONFIG_MAP.items(): model = self.get_model() model.add_adapter("test1") model.add_adapter("test2") model.add_adapter_fusion(["test1", "test2"], config=v) # should not raise an exception model.config.to_json_string()
def test_model_config_serialization(self): """PretrainedConfigurations should not raise an Exception when serializing the config dict See, e.g., PretrainedConfig.to_json_string() """ for model_class in self.model_classes: for k, v in ADAPTERFUSION_CONFIG_MAP.items(): model_config = model_class.config_class model = model_class(model_config()) model.add_adapter("test1", AdapterType.text_task) model.add_adapter("test2", AdapterType.text_task) model.add_fusion(["test1", "test2"], adapter_fusion_config=v) # should not raise an exception model.config.to_json_string()
def test_add_adapter_fusion(self): for adater_fusion_config_name, adapter_fusion_config in ADAPTERFUSION_CONFIG_MAP.items( ): for config_name, adapter_config in ADAPTER_CONFIG_MAP.items(): for type_name, adapter_type in AdapterType.__members__.items(): for model_class in self.model_classes: model_config = model_class.config_class model = model_class(model_config()) # skip configs without invertible language adapters if adapter_type == AdapterType.text_lang and not adapter_config.invertible_adapter: continue with self.subTest(model_class=model_class, config=config_name, adapter_type=type_name): name1 = f"{type_name}-{config_name}-1" name2 = f"{type_name}-{config_name}-2" model.add_adapter(name1, adapter_type, config=adapter_config) model.add_adapter(name2, adapter_type, config=adapter_config) # adapter is correctly added to config self.assertTrue(name1 in model.config.adapters. adapter_list(adapter_type)) self.assertTrue(name2 in model.config.adapters. adapter_list(adapter_type)) self.assertEqual(adapter_config, model.config.adapters.get(name1)) self.assertEqual(adapter_config, model.config.adapters.get(name2)) model.add_fusion([name1, name2], adater_fusion_config_name) # check forward pass input_ids = ids_tensor((1, 128), 1000) input_data = {"input_ids": input_ids} if adapter_type == AdapterType.text_task or adapter_type == AdapterType.text_lang: input_data["adapter_names"] = [[name1, name2]] adapter_output = model(**input_data) base_output = model(input_ids) self.assertEqual(len(adapter_output), len(base_output)) self.assertFalse( torch.equal(adapter_output[0], base_output[0]))
def test_load_adapter_fusion(self): for adater_fusion_config_name, adapter_fusion_config in ADAPTERFUSION_CONFIG_MAP.items( ): for name, adapter_type in AdapterType.__members__.items(): for model_class in self.model_classes: with self.subTest(model_class=model_class, adapter_type=name): model_config = model_class.config_class model1 = model_class(model_config()) name1 = "name1" name2 = "name2" model1.add_adapter(name1, adapter_type) model1.add_adapter(name2, adapter_type) model1, model2 = create_twin_models(model1) model1.add_fusion([name1, name2], adater_fusion_config_name) with tempfile.TemporaryDirectory() as temp_dir: model1.save_adapter_fusion( temp_dir, ",".join([name1, name2])) model2.load_adapter_fusion(temp_dir) model1.eval() model2.eval() # check if adapter was correctly loaded self.assertTrue(model1.config.adapter_fusion_models == model2.config.adapter_fusion_models) # check equal output in_data = ids_tensor((1, 128), 1000) output1 = model1(in_data, adapter_names=[[name1, name2]]) output2 = model2(in_data, adapter_names=[[name1, name2]]) self.assertEqual(len(output1), len(output2)) self.assertTrue(torch.equal(output1[0], output2[0]))