Skip to content

Commit

Permalink
freeze torch<2.3 until AMP compatibility is resolved (#51)
Browse files Browse the repository at this point in the history
* freeze `torch<2.3` until AMP compatibility is resolved
* Apply suggestions from code review
  • Loading branch information
Borda committed Jul 3, 2024
1 parent 00438d0 commit a8565eb
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 1 deletion.
2 changes: 2 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
numpy <2.0 # needed for older Torch
torch <2.3 # fixme: freeze until AMP compatibility is resolved
lightning >=2.0.0
hivemind >=1.1.0, <=1.1.5; sys_platform == 'linux'

Expand Down
2 changes: 1 addition & 1 deletion src/lightning_hivemind/strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ def _wrap_schedulers(self, opt: "hivemind.Optimizer") -> None:
raise ValueError(
f"The `ReduceLROnPlateau` scheduler is not currently supported with `{self.__class__.__name__}`."
)
scheduler_config.scheduler = HiveMindScheduler(optimizer=opt, scheduler=scheduler)
scheduler_config.scheduler = HiveMindScheduler(optimizer=opt, scheduler=scheduler) # type: ignore[assignment]

def on_train_batch_start(self, batch: Any, batch_idx: int, dataloader_idx: int = 0) -> None:
if not self._hivemind_initialized:
Expand Down

0 comments on commit a8565eb

Please sign in to comment.