Skip to content

Commit

Permalink
correctly seeting opt in bwd in test
Browse files Browse the repository at this point in the history
  • Loading branch information
SalmanMohammadi committed Feb 13, 2025
1 parent d2a3e65 commit 41ac1c9
Showing 1 changed file with 16 additions and 5 deletions.
21 changes: 16 additions & 5 deletions tests/recipes/test_full_finetune_distributed.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,10 +309,17 @@ def test_training_state_on_resume(
checkpointer.model_type={model_type.upper()} \
tokenizer.path='{tokenizer_path}' \
tokenizer.prompt_template=null \
clip_grad_norm=f"{'100' if not optim_in_bwd else 'null'}" \
optimizer_in_bwd={optim_in_bwd} \
""".split()

# "optimizer_in_bwd=True" would free gradient info before clip_grad, causing
# wrong grad_norm, so we only test one of them each time. But loss values
# should be the same.
if not optim_in_bwd:
cmd_1.append("clip_grad_norm=100")
cmd_1.append("optimizer_in_bwd=False")
else:
cmd_1.append("optimizer_in_bwd=True")

model_config = MODEL_TEST_CONFIGS[model_type]
cmd_1 = cmd_1 + self._get_test_config_overrides() + model_config

Expand Down Expand Up @@ -341,13 +348,17 @@ def test_training_state_on_resume(
tokenizer.path='{tokenizer_path}' \
tokenizer.prompt_template=null \
resume_from_checkpoint=True \
metric_logger.filename={log_file} \
clip_grad_norm=f"{'100' if not optim_in_bwd else 'null'}" \
optimizer_in_bwd={optim_in_bwd} \
metric_logger.filename={log_file}
""".split()

cmd_2 = cmd_2 + self._get_test_config_overrides() + model_config

if not optim_in_bwd:
cmd_1.append("clip_grad_norm=100")
cmd_1.append("optimizer_in_bwd=False")
else:
cmd_1.append("optimizer_in_bwd=True")

monkeypatch.setattr(sys, "argv", cmd_2)
runpy.run_path(TUNE_PATH, run_name="__main__")

Expand Down

0 comments on commit 41ac1c9

Please sign in to comment.