Skip to content

Commit ccca6b6

Browse files
author
SeanNaren
committed
debug
1 parent 072c272 commit ccca6b6

File tree

2 files changed

+2
-11
lines changed

2 files changed

+2
-11
lines changed

pytorch_lightning/core/optimizer.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -206,8 +206,7 @@ def dis_closure():
206206
else:
207207
# make sure to call optimizer_closure when accumulating
208208
with trainer.profiler.profile("closure"):
209-
with trainer.train_loop.block_ddp_sync_behaviour():
210-
closure()
209+
closure()
211210

212211
def __repr__(self):
213212
groups = [

pytorch_lightning/trainer/training_loop.py

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -752,15 +752,7 @@ def train_step_and_backward_closure():
752752

753753
@contextmanager
754754
def block_ddp_sync_behaviour(self):
755-
"""
756-
Blocks ddp sync gradients behaviour on backwards pass.
757-
This is useful for skipping sync when accumulating gradients, reducing communication overhead
758-
Returns: context manager with sync behaviour off
759-
"""
760-
if self.trainer.accelerator_backend is not None:
761-
yield self.trainer.accelerator_backend.block_ddp_plugin_sync_behaviour()
762-
else:
763-
yield
755+
yield
764756

765757
def _process_closure_result(
766758
self, batch_outputs: list, opt_idx: int

0 commit comments

Comments
 (0)