Skip to content

Commit

Permalink
fix typo in auto wrap policy (#793)
Browse files Browse the repository at this point in the history
  • Loading branch information
wukaixingxp authored Nov 20, 2024
2 parents 7579b61 + a62aff3 commit de3e32c
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions src/llama_recipes/finetuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,13 +246,13 @@ def main(**kwargs):
print_frozen_model_status(model, train_config, rank if train_config.enable_fsdp else 0)

mixed_precision_policy, wrapping_policy = get_policies(fsdp_config, rank)
# Create the FSDP wrapper for MllamaSelfAttentionDecoderLayer,MllamaSelfAttentionDecoderLayer,MllamaVisionEncoderLayer in vision models
# Create the FSDP wrapper for MllamaSelfAttentionDecoderLayer,MllamaCrossAttentionDecoderLayer,MllamaVisionEncoderLayer in vision models
if is_vision:
my_auto_wrapping_policy = fsdp_auto_wrap_policy(
model,
[
MllamaSelfAttentionDecoderLayer,
MllamaSelfAttentionDecoderLayer,
MllamaCrossAttentionDecoderLayer,
MllamaVisionEncoderLayer,
],
)
Expand Down

0 comments on commit de3e32c

Please sign in to comment.