From 5edfd4d3e2909dd40c68a0314b11631489c2dc9f Mon Sep 17 00:00:00 2001 From: calpt Date: Mon, 25 Nov 2024 21:48:09 +0100 Subject: [PATCH] remove parallel grad checkpointing test --- tests/methods/base.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/tests/methods/base.py b/tests/methods/base.py index 86eb3e08c..0c508c310 100644 --- a/tests/methods/base.py +++ b/tests/methods/base.py @@ -422,16 +422,3 @@ def adapter_setup_fn(model): model.adapter_to("adapter1", torch_device) self._run_gradient_checkpointing_test_helper(adapter_setup_fn) - - def run_gradient_checkpointing_test_parallel_adapters(self, adapter_config): - def adapter_setup_fn(model): - model.add_adapter("adapter1", config=adapter_config) - model.add_adapter("adapter2", config=adapter_config) - self.add_head(model, "adapter1") - self.add_head(model, "adapter2") - model.active_adapters = ac.Parallel("adapter1", "adapter2") - model.train_adapter(ac.Parallel("adapter1", "adapter2")) - model.adapter_to("adapter1", torch_device) - model.adapter_to("adapter2", torch_device) - - self._run_gradient_checkpointing_test_helper(adapter_setup_fn)