Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DON'T MERGE This is for debugging only #1135

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/peft/tuners/ia3/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,6 +283,7 @@ def set_adapter(self, adapter_name):
warnings.warn("Adapter cannot be set when the model is merged. Unmerging the model first.")
module.unmerge()
module.set_adapter(adapter_name)
self.active_adapter = adapter_name

def _prepare_adapter_config(self, peft_config, model_config):
if peft_config.target_modules is None:
Expand Down
26 changes: 26 additions & 0 deletions tests/test_decoder_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,32 @@ def test_from_pretrained_config_construction(self, test_name, model_id, config_c
def test_merge_layers(self, test_name, model_id, config_cls, config_kwargs):
self._test_merge_layers(model_id, config_cls, config_kwargs)

@parameterized.expand(
PeftTestConfigManager.get_grid_parameters(
{
"model_ids": PEFT_DECODER_MODELS_TO_TEST,
"lora_kwargs": {"init_lora_weights": [False]},
"ia3_kwargs": {"init_ia3_weights": [False]},
"task_type": "CAUSAL_LM",
},
)
)
def test_merge_layers_multi_1(self, test_name, model_id, config_cls, config_kwargs):
self._test_merge_layers_multi_1(model_id, config_cls, config_kwargs)

@parameterized.expand(
PeftTestConfigManager.get_grid_parameters(
{
"model_ids": PEFT_DECODER_MODELS_TO_TEST,
"lora_kwargs": {"init_lora_weights": [False]},
"ia3_kwargs": {"init_ia3_weights": [False]},
"task_type": "CAUSAL_LM",
},
)
)
def test_merge_layers_multi_2(self, test_name, model_id, config_cls, config_kwargs):
self._test_merge_layers_multi_2(model_id, config_cls, config_kwargs)

@parameterized.expand(
PeftTestConfigManager.get_grid_parameters(
{
Expand Down
83 changes: 83 additions & 0 deletions tests/testing_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -561,6 +561,89 @@ def _test_merge_layers(self, model_id, config_cls, config_kwargs):
logits_merged_from_pretrained = model_from_pretrained(**dummy_input)[0]
self.assertTrue(torch.allclose(logits_merged, logits_merged_from_pretrained, atol=atol, rtol=rtol))

def _test_merge_layers_multi_1(self, model_id, config_cls, config_kwargs):
# Checking that merging the first adapter works as expected
supported_peft_types = [PeftType.LORA, PeftType.LOHA, PeftType.LOKR, PeftType.IA3]

if ("gpt2" in model_id.lower()) and (config_cls == IA3Config):
self.skipTest("Merging GPT2 adapters not supported for IA³ (yet)")

config = config_cls(
base_model_name_or_path=model_id,
**config_kwargs,
)

if config.peft_type not in supported_peft_types:
self.skipTest("Merging not supported for this PEFT type")

torch.manual_seed(0)
model = self.transformers_class.from_pretrained(model_id)
model = get_peft_model(model, config, "adapter-1")
model = model.to(self.torch_device)
model.eval()
assert model.active_adapters == ["adapter-1"]

dummy_input = self.prepare_inputs_for_testing()
with torch.no_grad():
logits_adapter_1 = model(**dummy_input)[0]

model.add_adapter("adapter-2", config)
model.set_adapter("adapter-2")
assert model.active_adapters == ["adapter-2"]

with torch.no_grad():
logits_adapter_2 = model(**dummy_input)[0]

self.assertFalse(torch.allclose(logits_adapter_1, logits_adapter_2, atol=1e-3, rtol=1e-3))

model.set_adapter("adapter-1")
model_unloaded = model.merge_and_unload() # should merge default
with torch.no_grad():
logits_merged = model_unloaded(**dummy_input)[0]

self.assertTrue(torch.allclose(logits_adapter_1, logits_merged, atol=1e-3, rtol=1e-3))

def _test_merge_layers_multi_2(self, model_id, config_cls, config_kwargs):
# same as _test_merge_layers_multi_1, but this time checking if merging the second adapter works
supported_peft_types = [PeftType.LORA, PeftType.LOHA, PeftType.LOKR, PeftType.IA3]

if ("gpt2" in model_id.lower()) and (config_cls == IA3Config):
self.skipTest("Merging GPT2 adapters not supported for IA³ (yet)")

config = config_cls(
base_model_name_or_path=model_id,
**config_kwargs,
)

if config.peft_type not in supported_peft_types:
self.skipTest("Merging not supported for this PEFT type")

torch.manual_seed(0)
model = self.transformers_class.from_pretrained(model_id)
model = get_peft_model(model, config, "adapter-1")
model = model.to(self.torch_device)
model.eval()
assert model.active_adapters == ["adapter-1"]

dummy_input = self.prepare_inputs_for_testing()
with torch.no_grad():
logits_adapter_1 = model(**dummy_input)[0]

model.add_adapter("adapter-2", config)
model.set_adapter("adapter-2")
assert model.active_adapters == ["adapter-2"]

with torch.no_grad():
logits_adapter_2 = model(**dummy_input)[0]

self.assertFalse(torch.allclose(logits_adapter_1, logits_adapter_2, atol=1e-3, rtol=1e-3))

model_unloaded = model.merge_and_unload() # should merge adapter-2
with torch.no_grad():
logits_merged = model_unloaded(**dummy_input)[0]

self.assertTrue(torch.allclose(logits_adapter_2, logits_merged, atol=1e-3, rtol=1e-3))

def _test_generate(self, model_id, config_cls, config_kwargs):
model = self.transformers_class.from_pretrained(model_id)
config = config_cls(
Expand Down