From a8565eb96cc50b1f12cbbf27938eed858cd1594b Mon Sep 17 00:00:00 2001 From: Jirka Borovec <6035284+Borda@users.noreply.github.com> Date: Wed, 3 Jul 2024 23:51:37 +0200 Subject: [PATCH] freeze `torch<2.3` until AMP compatibility is resolved (#51) * freeze `torch<2.3` until AMP compatibility is resolved * Apply suggestions from code review --- requirements.txt | 2 ++ src/lightning_hivemind/strategy.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 865c6b7..9e0392c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,5 @@ +numpy <2.0 # needed for older Torch +torch <2.3 # fixme: freeze until AMP compatibility is resolved lightning >=2.0.0 hivemind >=1.1.0, <=1.1.5; sys_platform == 'linux' diff --git a/src/lightning_hivemind/strategy.py b/src/lightning_hivemind/strategy.py index fe6b809..07c1317 100644 --- a/src/lightning_hivemind/strategy.py +++ b/src/lightning_hivemind/strategy.py @@ -277,7 +277,7 @@ def _wrap_schedulers(self, opt: "hivemind.Optimizer") -> None: raise ValueError( f"The `ReduceLROnPlateau` scheduler is not currently supported with `{self.__class__.__name__}`." ) - scheduler_config.scheduler = HiveMindScheduler(optimizer=opt, scheduler=scheduler) + scheduler_config.scheduler = HiveMindScheduler(optimizer=opt, scheduler=scheduler) # type: ignore[assignment] def on_train_batch_start(self, batch: Any, batch_idx: int, dataloader_idx: int = 0) -> None: if not self._hivemind_initialized: