Skip to content

Commit

Permalink
Remove deprecated functionality (#714)
Browse files Browse the repository at this point in the history
Changes in this PR:
- Remove deprecated add_fusion() & train_fusion() methods
- Remove deprecated support for passing lists to adapter activation
- Add "adapters." prefix to version identifier when saving adapters to
prevent confusion to adapter-transformers versions
  • Loading branch information
calpt authored Jul 5, 2024
1 parent d3e7784 commit eb93f9a
Show file tree
Hide file tree
Showing 21 changed files with 2,661 additions and 2,671 deletions.
40 changes: 19 additions & 21 deletions examples/pytorch/adapterfusion/run_fusion_glue.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import numpy as np

import adapters
from adapters import AdapterArguments, AdapterTrainer
from adapters import AdapterArguments, AdapterTrainer, Fuse
from transformers import AutoConfig, AutoModelForSequenceClassification, AutoTokenizer, EvalPrediction, GlueDataset
from transformers import GlueDataTrainingArguments as DataTrainingArguments
from transformers import (
Expand Down Expand Up @@ -162,28 +162,26 @@ def main():
model.load_adapter("qa/boolq@ukp", config=SeqBnConfig(), with_head=False)
model.load_adapter("sentiment/imdb@ukp", config=SeqBnConfig(), with_head=False)

adapter_setup = [
[
"sst-2",
"mnli",
"rte",
"mrpc",
"qqp",
"cosmosqa",
"csqa",
"hellaswag",
"socialiqa",
"winogrande",
"cb",
"sick",
"scitail",
"boolq",
"imdb",
]
]
adapter_setup = Fuse(
"sst-2",
"mnli",
"rte",
"mrpc",
"qqp",
"cosmosqa",
"csqa",
"hellaswag",
"socialiqa",
"winogrande",
"cb",
"sick",
"scitail",
"boolq",
"imdb",
)

# Add a fusion layer and tell the model to train fusion
model.add_adapter_fusion(adapter_setup[0], "dynamic")
model.add_adapter_fusion(adapter_setup, "dynamic")
model.train_adapter_fusion(adapter_setup)

# ~~~~~ Rest is again same as in standard training setup ~~~~~
Expand Down
2 changes: 1 addition & 1 deletion examples/pytorch/text-generation/run_generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ def main():
# Setup adapters
if args.load_adapter:
model.load_adapter(args.load_adapter, load_as="generation")
model.set_active_adapters(["generation"])
model.set_active_adapters("generation")

if args.fp16:
model.half()
Expand Down
2 changes: 1 addition & 1 deletion notebooks/04_Cross_Lingual_Transfer.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -761,7 +761,7 @@
},
"outputs": [],
"source": [
"model.train_adapter([\"copa\"])"
"model.train_adapter(\"copa\")"
]
},
{
Expand Down
4 changes: 3 additions & 1 deletion notebooks/08_NER_Wikiann.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,8 @@
"outputs": [],
"source": [
"from adapters import AdapterConfig\n",
"from adapters.composition import Stack\n",
"\n",
"target_language = \"gn\" # Choose any language that a bert-base-multilingual-cased language adapter is available for\n",
"source_language = \"en\" # We support \"en\", \"ja\", \"zh\", and \"ar\"\n",
"\n",
Expand All @@ -156,7 +158,7 @@
" leave_out=[11],\n",
")\n",
"# Set the adapters to be used in every forward pass\n",
"model.set_active_adapters([lang_adapter_name, \"wikiann\"])"
"model.set_active_adapters(Stack(lang_adapter_name, \"wikiann\"))"
]
},
{
Expand Down
Loading

0 comments on commit eb93f9a

Please sign in to comment.