Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
52 commits
Select commit Hold shift + click to select a range
772ebff
Move progress bar disabling out of the Trainer
daniellepintz Jan 9, 2022
9e85570
fix test
daniellepintz Jan 9, 2022
c5b4c49
add test and update changelog
daniellepintz Jan 10, 2022
578ea1a
Merge branch 'master' into pbar
carmocca Jan 12, 2022
7ff1dc8
fix docstring
daniellepintz Jan 12, 2022
79c3696
Merge branch 'master' of https://github.com/PyTorchLightning/pytorch-…
daniellepintz Jan 14, 2022
31a6e8f
test debug
daniellepintz Jan 15, 2022
1de162a
debug 2
daniellepintz Jan 15, 2022
e3cb7de
debug rich
daniellepintz Jan 15, 2022
70ef542
debug rich 2
daniellepintz Jan 15, 2022
2ead1a1
debug tqdm
daniellepintz Jan 15, 2022
a95c7dd
Merge branch 'master' into pbar
awaelchli Jan 16, 2022
03cd465
add missing import
awaelchli Jan 16, 2022
68fc6e9
update submodule
awaelchli Jan 16, 2022
0f89745
revert after debug
daniellepintz Jan 16, 2022
daadd4b
Merge branch 'pbar' of github.com:daniellepintz/pytorch-lightning int…
daniellepintz Jan 16, 2022
b35b741
fix import
daniellepintz Jan 16, 2022
d4d61d5
remove mock patch
daniellepintz Jan 16, 2022
ea1ee85
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jan 16, 2022
670a2f4
debug change mock
daniellepintz Jan 16, 2022
14ce0b9
Merge branch 'pbar' of github.com:daniellepintz/pytorch-lightning int…
daniellepintz Jan 16, 2022
3e87df1
comment out test
daniellepintz Jan 16, 2022
36ecb92
test debug
daniellepintz Jan 16, 2022
36a8bdf
move test
daniellepintz Jan 16, 2022
2ccb27a
reset mock
daniellepintz Jan 16, 2022
e60e2b9
add standalone
daniellepintz Jan 17, 2022
2f48e6c
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jan 17, 2022
bef1a3e
Merge branch 'master' into pbar
Borda Jan 18, 2022
de98fd8
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jan 18, 2022
fbe95c6
Merge branch 'master' of https://github.com/PyTorchLightning/pytorch-…
daniellepintz Jan 20, 2022
39453a4
remove standalone
daniellepintz Jan 20, 2022
fe9f29d
Merge branch 'pbar' of github.com:daniellepintz/pytorch-lightning int…
daniellepintz Jan 20, 2022
72c9ea2
action-tmate debug
daniellepintz Jan 20, 2022
0b08185
remove tmpdir
daniellepintz Jan 22, 2022
d262677
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jan 22, 2022
d4303d1
comment out test contents
daniellepintz Jan 22, 2022
76b8c9e
Merge branch 'pbar' of github.com:daniellepintz/pytorch-lightning int…
daniellepintz Jan 22, 2022
f0b975e
debug
daniellepintz Jan 22, 2022
35c2eed
debug 2
daniellepintz Jan 22, 2022
470b4c3
debug 3
daniellepintz Jan 22, 2022
7278971
debug 4
daniellepintz Jan 23, 2022
0d1d82f
add limit batches
daniellepintz Jan 23, 2022
92dd0f1
debug
daniellepintz Jan 23, 2022
b644de8
enable_checkpointing=False
daniellepintz Jan 23, 2022
cf618d4
debug
daniellepintz Jan 23, 2022
bfaaefe
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jan 23, 2022
bbfaf2f
debug
daniellepintz Jan 23, 2022
4af0ee2
Merge branch 'pbar' of github.com:daniellepintz/pytorch-lightning int…
daniellepintz Jan 23, 2022
7ecce52
Merge branch 'master' of https://github.com/PyTorchLightning/pytorch-…
daniellepintz Feb 2, 2022
22e5f3e
fix
daniellepintz Feb 2, 2022
3c7ea8c
mock global rank
daniellepintz Feb 2, 2022
0dab6b9
change order
daniellepintz Feb 3, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -457,6 +457,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Fixed the lr-scheduler state not being dumped to checkpoint when using the deepspeed strategy ([#11307](https://github.com/PyTorchLightning/pytorch-lightning/pull/11307))


- Fixed bug where progress bar was not being disabled when not in rank zero during predict ([#11377](https://github.com/PyTorchLightning/pytorch-lightning/pull/11377))


- Fixed `SimpleProfiler` summary ([#11414](https://github.com/PyTorchLightning/pytorch-lightning/pull/11414))


Expand Down
8 changes: 3 additions & 5 deletions pytorch_lightning/callbacks/progress/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,11 +131,7 @@ def total_predict_batches(self) -> Union[int, float]:
return sum(self.trainer.num_predict_batches)

def disable(self) -> None:
"""You should provide a way to disable the progress bar.

The :class:`~pytorch_lightning.trainer.trainer.Trainer` will call this to disable the
output on processes that have a rank different from 0, e.g., in multi-node training.
"""
"""You should provide a way to disable the progress bar."""
raise NotImplementedError

def enable(self) -> None:
Expand All @@ -153,6 +149,8 @@ def print(self, *args: Any, **kwargs: Any) -> None:

def setup(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", stage: Optional[str] = None) -> None:
self._trainer = trainer
if not trainer.is_global_zero:
self.disable()

def get_metrics(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> Dict[str, Union[int, str]]:
r"""
Expand Down
6 changes: 0 additions & 6 deletions pytorch_lightning/trainer/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1280,9 +1280,6 @@ def _pre_training_routine(self):
def _run_train(self) -> None:
self._pre_training_routine()

if not self.is_global_zero and self.progress_bar_callback is not None:
self.progress_bar_callback.disable()

self._run_sanity_check()

# enable train mode
Expand All @@ -1294,9 +1291,6 @@ def _run_train(self) -> None:
self.fit_loop.run()

def _run_evaluate(self) -> _EVALUATE_OUTPUT:
if not self.is_global_zero and self.progress_bar_callback is not None:
self.progress_bar_callback.disable()

assert self.evaluating

# reload dataloaders
Expand Down
29 changes: 28 additions & 1 deletion tests/callbacks/test_tqdm_progress_bar.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from collections import defaultdict
from typing import Union
from unittest import mock
from unittest.mock import ANY, call
from unittest.mock import ANY, call, PropertyMock

import pytest
import torch
Expand Down Expand Up @@ -618,3 +618,30 @@ def test_step(self, batch, batch_idx):

trainer.test(model, verbose=False)
assert pbar.calls["test"] == []


@mock.patch("pytorch_lightning.trainer.trainer.Trainer.is_global_zero", new_callable=PropertyMock, return_value=False)
def test_tqdm_progress_bar_disabled_when_not_rank_zero(is_global_zero):
"""Test that the progress bar is disabled when not in global rank zero."""
progress_bar = TQDMProgressBar()
model = BoringModel()
trainer = Trainer(
callbacks=[progress_bar],
fast_dev_run=True,
)

progress_bar.enable()
trainer.fit(model)
assert progress_bar.is_disabled

progress_bar.enable()
trainer.predict(model)
assert progress_bar.is_disabled

progress_bar.enable()
trainer.validate(model)
assert progress_bar.is_disabled

progress_bar.enable()
trainer.test(model)
assert progress_bar.is_disabled