|
@@ -524,8 +524,12 @@ class Optimizer(torch.optim.Optimizer):
|
|
|
logger.exception(e)
|
|
|
|
|
|
if not began_averaging_gradients and self.scheduled_grads is not None and not self.scheduled_grads.done():
|
|
|
- logger.log(self.status_loglevel, f"Tagging along for a pre-scheduled gradient averaging round")
|
|
|
- self._tag_along_with_zero_weight(self.scheduled_grads)
|
|
|
+ if self.tracker.global_progress.num_peers > 1:
|
|
|
+ logger.log(self.status_loglevel, f"Tagging along for a pre-scheduled gradient averaging round")
|
|
|
+ self._tag_along_with_zero_weight(self.scheduled_grads)
|
|
|
+ else:
|
|
|
+ logger.log(self.status_loglevel, f"Skipping pre-scheduled averaging round: there are no other peers")
|
|
|
+ self.scheduled_grads.cancel()
|
|
|
self.scheduled_grads = None
|
|
|
return began_averaging_gradients
|
|
|
|