diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 45eca43de93ac..8665960842dc7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,17 +19,28 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v2.3.0 hooks: - - id: trailing-whitespace - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-yaml + - id: check-docstring-first + - id: check-executables-have-shebangs + - id: check-toml - repo: https://github.com/PyCQA/isort rev: 5.7.0 hooks: - id: isort - args: [--settings-path, ./pyproject.toml] + name: Format imports - repo: https://github.com/pre-commit/mirrors-yapf rev: v0.30.0 hooks: - id: yapf - args: [--parallel, --in-place] + name: Format code + language: python + + - repo: https://github.com/PyCQA/flake8 + rev: 3.9.2 + hooks: + - id: flake8 + name: Check PEP8 diff --git a/CHANGELOG.md b/CHANGELOG.md index f4159606cddbf..9e1517ebc2b36 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). +## [1.3.4] - 2021-06-01 + +### Fixed + +- Fixed info message when max training time reached ([#7780](https://github.com/PyTorchLightning/pytorch-lightning/pull/7780)) +- Fixed missing `__len__` method to `IndexBatchSamplerWrapper` ([#7681](https://github.com/PyTorchLightning/pytorch-lightning/pull/7681)) + ## [1.3.3] - 2021-05-27 ### Changed diff --git a/pytorch_lightning/__about__.py b/pytorch_lightning/__about__.py index f62cef02182c1..d8a5e153425a2 100644 --- a/pytorch_lightning/__about__.py +++ b/pytorch_lightning/__about__.py @@ -1,7 +1,7 @@ import time _this_year = time.strftime("%Y") -__version__ = '1.3.3' +__version__ = '1.3.4' __author__ = 'William Falcon et al.' __author_email__ = 'waf2107@columbia.edu' __license__ = 'Apache-2.0' diff --git a/pytorch_lightning/callbacks/timer.py b/pytorch_lightning/callbacks/timer.py index 9b93499c82ea1..ba42419141253 100644 --- a/pytorch_lightning/callbacks/timer.py +++ b/pytorch_lightning/callbacks/timer.py @@ -170,4 +170,5 @@ def _check_time_remaining(self, trainer: 'pl.Trainer') -> None: should_stop = trainer.accelerator.broadcast(should_stop) trainer.should_stop = trainer.should_stop or should_stop if should_stop and self._verbose: - rank_zero_info(f"Time limit reached. Elapsed time is {self.time_elapsed}. Signaling Trainer to stop.") + elapsed = timedelta(seconds=int(self.time_elapsed(RunningStage.TRAINING))) + rank_zero_info(f"Time limit reached. Elapsed time is {elapsed}. Signaling Trainer to stop.") diff --git a/pytorch_lightning/overrides/distributed.py b/pytorch_lightning/overrides/distributed.py index d064040d8e019..559e1161ce676 100644 --- a/pytorch_lightning/overrides/distributed.py +++ b/pytorch_lightning/overrides/distributed.py @@ -132,6 +132,9 @@ def __iter__(self) -> Iterator[List[int]]: self.batch_indices = batch yield batch + def __len__(self) -> int: + return len(self._sampler) + @property def drop_last(self) -> bool: return self._sampler.drop_last diff --git a/tests/callbacks/test_timer.py b/tests/callbacks/test_timer.py index c27eebbeb7805..16e01a6adcaf4 100644 --- a/tests/callbacks/test_timer.py +++ b/tests/callbacks/test_timer.py @@ -95,7 +95,7 @@ def test_timer_time_remaining(time_mock): assert round(timer.time_elapsed()) == 3 -def test_timer_stops_training(tmpdir): +def test_timer_stops_training(tmpdir, caplog): """ Test that the timer stops training before reaching max_epochs """ model = BoringModel() duration = timedelta(milliseconds=100) @@ -106,9 +106,12 @@ def test_timer_stops_training(tmpdir): max_epochs=1000, callbacks=[timer], ) - trainer.fit(model) + with caplog.at_level(logging.INFO): + trainer.fit(model) assert trainer.global_step > 1 assert trainer.current_epoch < 999 + assert "Time limit reached." in caplog.text + assert "Signaling Trainer to stop." in caplog.text @pytest.mark.parametrize("interval", ["step", "epoch"]) diff --git a/tests/overrides/test_distributed.py b/tests/overrides/test_distributed.py index d09ac9c8bad06..e742eb6ecccd9 100644 --- a/tests/overrides/test_distributed.py +++ b/tests/overrides/test_distributed.py @@ -11,11 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from collections.abc import Iterable + import pytest from torch.utils.data import BatchSampler, SequentialSampler from pytorch_lightning import seed_everything from pytorch_lightning.overrides.distributed import IndexBatchSamplerWrapper, UnrepeatedDistributedSampler +from pytorch_lightning.utilities.data import has_len @pytest.mark.parametrize("shuffle", [False, True]) @@ -54,3 +57,13 @@ def test_index_batch_sampler(tmpdir): for batch in index_batch_sampler: assert index_batch_sampler.batch_indices == batch + + +def test_index_batch_sampler_methods(): + dataset = range(15) + sampler = SequentialSampler(dataset) + batch_sampler = BatchSampler(sampler, 3, False) + index_batch_sampler = IndexBatchSamplerWrapper(batch_sampler) + + assert isinstance(index_batch_sampler, Iterable) + assert has_len(index_batch_sampler)