From 58b0d4b0d8dbd65da02622ef39f416801d5b5c97 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Wed, 6 Mar 2024 10:08:54 -0500 Subject: [PATCH] update flash attention for gemma support: (#1368) --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index cd5171ebd1..718896783b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,7 +12,7 @@ fire PyYAML>=6.0 requests datasets>=2.15.0 -flash-attn==2.3.3 +flash-attn==2.5.5 sentencepiece wandb einops diff --git a/setup.py b/setup.py index 4a949a60d2..40dd0a6686 100644 --- a/setup.py +++ b/setup.py @@ -68,7 +68,7 @@ def parse_requirements(): dependency_links=dependency_links, extras_require={ "flash-attn": [ - "flash-attn==2.5.0", + "flash-attn==2.5.5", ], "fused-dense-lib": [ "fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.3.3#subdirectory=csrc/fused_dense_lib",