浏览代码

DEBUG do not preschedule state averagign

justheuristic 4 年之前
父节点
当前提交
095b386777
共有 1 个文件被更改,包括 1 次插入0 次删除
  1. 1 0
      hivemind/optim/experimental/optimizer.py

+ 1 - 0
hivemind/optim/experimental/optimizer.py

@@ -545,6 +545,7 @@ class Optimizer(torch.optim.Optimizer):
 
 
     def _maybe_schedule_state_averaging(self) -> None:
     def _maybe_schedule_state_averaging(self) -> None:
         """If next epoch is coming soon, schedule the next state averaging at estimated parameter averaging start"""
         """If next epoch is coming soon, schedule the next state averaging at estimated parameter averaging start"""
+        return
         next_epoch = max(self.local_epoch + 1, self.tracker.global_epoch)
         next_epoch = max(self.local_epoch + 1, self.tracker.global_epoch)
         if next_epoch % self.average_state_every != 0:
         if next_epoch % self.average_state_every != 0:
             return  # averaging is not performed at this epoch
             return  # averaging is not performed at this epoch