Skip to content

Commit

Permalink
Remove accumulation scheduler assert
Browse files Browse the repository at this point in the history
  • Loading branch information
evanbro authored Jun 27, 2023
1 parent 3805c24 commit d1f57d0
Showing 1 changed file with 0 additions and 6 deletions.
6 changes: 0 additions & 6 deletions src/lightning_horovod/strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,12 +147,6 @@ def _unpack_lightning_optimizer(opt: Optimizer) -> Optimizer:
for optimizer in optimizers:
hvd.broadcast_optimizer_state(optimizer, root_rank=0)

accumulation_scheduler = trainer.accumulation_scheduler
if accumulation_scheduler.epochs != [0]:
raise MisconfigurationException(
"Horovod currently does not support different `accumulate_grad_batches` at different epochs."
)

self.optimizers = self._wrap_optimizers(optimizers, trainer.accumulate_grad_batches)
for optimizer in self.optimizers:
# Synchronization will be performed explicitly following backward()
Expand Down

0 comments on commit d1f57d0

Please sign in to comment.