Przeglądaj źródła

DEBUG do not preschedule state averagign

justheuristic 3 lat temu
rodzic
commit
095b386777
1 zmienionych plików z 1 dodań i 0 usunięć
  1. 1 0
      hivemind/optim/experimental/optimizer.py

+ 1 - 0
hivemind/optim/experimental/optimizer.py

@@ -545,6 +545,7 @@ class Optimizer(torch.optim.Optimizer):
 
 
     def _maybe_schedule_state_averaging(self) -> None:
     def _maybe_schedule_state_averaging(self) -> None:
         """If next epoch is coming soon, schedule the next state averaging at estimated parameter averaging start"""
         """If next epoch is coming soon, schedule the next state averaging at estimated parameter averaging start"""
+        return
         next_epoch = max(self.local_epoch + 1, self.tracker.global_epoch)
         next_epoch = max(self.local_epoch + 1, self.tracker.global_epoch)
         if next_epoch % self.average_state_every != 0:
         if next_epoch % self.average_state_every != 0:
             return  # averaging is not performed at this epoch
             return  # averaging is not performed at this epoch