From b22ef1ba36d9905d47e7ee6ab6205cc8fef49868 Mon Sep 17 00:00:00 2001 From: Jirka Borovec Date: Tue, 1 Jun 2021 07:43:50 +0200 Subject: [PATCH 1/4] Update pre-commit and add new hooks (#7781) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * update precommit * Update .pre-commit-config.yaml Co-authored-by: Carlos Mocholí * Apply suggestions from code review Co-authored-by: Carlos Mocholí Co-authored-by: Carlos Mocholí --- .pre-commit-config.yaml | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 45eca43de93ac..8665960842dc7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,17 +19,28 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v2.3.0 hooks: - - id: trailing-whitespace - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-yaml + - id: check-docstring-first + - id: check-executables-have-shebangs + - id: check-toml - repo: https://github.com/PyCQA/isort rev: 5.7.0 hooks: - id: isort - args: [--settings-path, ./pyproject.toml] + name: Format imports - repo: https://github.com/pre-commit/mirrors-yapf rev: v0.30.0 hooks: - id: yapf - args: [--parallel, --in-place] + name: Format code + language: python + + - repo: https://github.com/PyCQA/flake8 + rev: 3.9.2 + hooks: + - id: flake8 + name: Check PEP8 From 0e1781ca49b96f1224d1d17d8177832a5c621c0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 31 May 2021 14:50:16 +0200 Subject: [PATCH 2/4] fix info message when max training time reached (#7780) * call time_elapsed * elapsed formatting * format * update test * changelog --- pytorch_lightning/callbacks/timer.py | 3 ++- tests/callbacks/test_timer.py | 7 +++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/pytorch_lightning/callbacks/timer.py b/pytorch_lightning/callbacks/timer.py index 9b93499c82ea1..ba42419141253 100644 --- a/pytorch_lightning/callbacks/timer.py +++ b/pytorch_lightning/callbacks/timer.py @@ -170,4 +170,5 @@ def _check_time_remaining(self, trainer: 'pl.Trainer') -> None: should_stop = trainer.accelerator.broadcast(should_stop) trainer.should_stop = trainer.should_stop or should_stop if should_stop and self._verbose: - rank_zero_info(f"Time limit reached. Elapsed time is {self.time_elapsed}. Signaling Trainer to stop.") + elapsed = timedelta(seconds=int(self.time_elapsed(RunningStage.TRAINING))) + rank_zero_info(f"Time limit reached. Elapsed time is {elapsed}. Signaling Trainer to stop.") diff --git a/tests/callbacks/test_timer.py b/tests/callbacks/test_timer.py index c27eebbeb7805..16e01a6adcaf4 100644 --- a/tests/callbacks/test_timer.py +++ b/tests/callbacks/test_timer.py @@ -95,7 +95,7 @@ def test_timer_time_remaining(time_mock): assert round(timer.time_elapsed()) == 3 -def test_timer_stops_training(tmpdir): +def test_timer_stops_training(tmpdir, caplog): """ Test that the timer stops training before reaching max_epochs """ model = BoringModel() duration = timedelta(milliseconds=100) @@ -106,9 +106,12 @@ def test_timer_stops_training(tmpdir): max_epochs=1000, callbacks=[timer], ) - trainer.fit(model) + with caplog.at_level(logging.INFO): + trainer.fit(model) assert trainer.global_step > 1 assert trainer.current_epoch < 999 + assert "Time limit reached." in caplog.text + assert "Signaling Trainer to stop." in caplog.text @pytest.mark.parametrize("interval", ["step", "epoch"]) From 26503945d6e821cb5edfd0e159dc8fde9edf9916 Mon Sep 17 00:00:00 2001 From: Kaushik B <45285388+kaushikb11@users.noreply.github.com> Date: Wed, 26 May 2021 21:50:13 +0530 Subject: [PATCH 3/4] Add `__len__` method to IndexBatchSamplerWrapper (#7681) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- pytorch_lightning/overrides/distributed.py | 3 +++ tests/overrides/test_distributed.py | 13 +++++++++++++ 2 files changed, 16 insertions(+) diff --git a/pytorch_lightning/overrides/distributed.py b/pytorch_lightning/overrides/distributed.py index d064040d8e019..559e1161ce676 100644 --- a/pytorch_lightning/overrides/distributed.py +++ b/pytorch_lightning/overrides/distributed.py @@ -132,6 +132,9 @@ def __iter__(self) -> Iterator[List[int]]: self.batch_indices = batch yield batch + def __len__(self) -> int: + return len(self._sampler) + @property def drop_last(self) -> bool: return self._sampler.drop_last diff --git a/tests/overrides/test_distributed.py b/tests/overrides/test_distributed.py index d09ac9c8bad06..e742eb6ecccd9 100644 --- a/tests/overrides/test_distributed.py +++ b/tests/overrides/test_distributed.py @@ -11,11 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from collections.abc import Iterable + import pytest from torch.utils.data import BatchSampler, SequentialSampler from pytorch_lightning import seed_everything from pytorch_lightning.overrides.distributed import IndexBatchSamplerWrapper, UnrepeatedDistributedSampler +from pytorch_lightning.utilities.data import has_len @pytest.mark.parametrize("shuffle", [False, True]) @@ -54,3 +57,13 @@ def test_index_batch_sampler(tmpdir): for batch in index_batch_sampler: assert index_batch_sampler.batch_indices == batch + + +def test_index_batch_sampler_methods(): + dataset = range(15) + sampler = SequentialSampler(dataset) + batch_sampler = BatchSampler(sampler, 3, False) + index_batch_sampler = IndexBatchSamplerWrapper(batch_sampler) + + assert isinstance(index_batch_sampler, Iterable) + assert has_len(index_batch_sampler) From f7027c1199ad71e39452ba5d06c30539ab46e2b2 Mon Sep 17 00:00:00 2001 From: tchaton Date: Tue, 1 Jun 2021 12:48:54 +0100 Subject: [PATCH 4/4] update changelog + increment version --- CHANGELOG.md | 7 +++++++ pytorch_lightning/__about__.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f4159606cddbf..9e1517ebc2b36 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). +## [1.3.4] - 2021-06-01 + +### Fixed + +- Fixed info message when max training time reached ([#7780](https://github.com/PyTorchLightning/pytorch-lightning/pull/7780)) +- Fixed missing `__len__` method to `IndexBatchSamplerWrapper` ([#7681](https://github.com/PyTorchLightning/pytorch-lightning/pull/7681)) + ## [1.3.3] - 2021-05-27 ### Changed diff --git a/pytorch_lightning/__about__.py b/pytorch_lightning/__about__.py index f62cef02182c1..d8a5e153425a2 100644 --- a/pytorch_lightning/__about__.py +++ b/pytorch_lightning/__about__.py @@ -1,7 +1,7 @@ import time _this_year = time.strftime("%Y") -__version__ = '1.3.3' +__version__ = '1.3.4' __author__ = 'William Falcon et al.' __author_email__ = 'waf2107@columbia.edu' __license__ = 'Apache-2.0'