Переглянути джерело

DEBUG do not preschedule state averagign

justheuristic 3 роки тому
батько
коміт
095b386777
1 змінених файлів з 1 додано та 0 видалено
  1. 1 0
      hivemind/optim/experimental/optimizer.py

+ 1 - 0
hivemind/optim/experimental/optimizer.py

@@ -545,6 +545,7 @@ class Optimizer(torch.optim.Optimizer):
 
 
     def _maybe_schedule_state_averaging(self) -> None:
     def _maybe_schedule_state_averaging(self) -> None:
         """If next epoch is coming soon, schedule the next state averaging at estimated parameter averaging start"""
         """If next epoch is coming soon, schedule the next state averaging at estimated parameter averaging start"""
+        return
         next_epoch = max(self.local_epoch + 1, self.tracker.global_epoch)
         next_epoch = max(self.local_epoch + 1, self.tracker.global_epoch)
         if next_epoch % self.average_state_every != 0:
         if next_epoch % self.average_state_every != 0:
             return  # averaging is not performed at this epoch
             return  # averaging is not performed at this epoch