Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 14 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,28 @@ repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.3.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: trailing-whitespace
- id: check-yaml
- id: check-docstring-first
- id: check-executables-have-shebangs
- id: check-toml

- repo: https://github.com/PyCQA/isort
rev: 5.7.0
hooks:
- id: isort
args: [--settings-path, ./pyproject.toml]
name: Format imports

- repo: https://github.com/pre-commit/mirrors-yapf
rev: v0.30.0
hooks:
- id: yapf
args: [--parallel, --in-place]
name: Format code
language: python

- repo: https://github.com/PyCQA/flake8
rev: 3.9.2
hooks:
- id: flake8
name: Check PEP8
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).


## [1.3.4] - 2021-06-01

### Fixed

- Fixed info message when max training time reached ([#7780](https://github.com/PyTorchLightning/pytorch-lightning/pull/7780))
- Fixed missing `__len__` method to `IndexBatchSamplerWrapper` ([#7681](https://github.com/PyTorchLightning/pytorch-lightning/pull/7681))

## [1.3.3] - 2021-05-27

### Changed
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/__about__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import time

_this_year = time.strftime("%Y")
__version__ = '1.3.3'
__version__ = '1.3.4'
__author__ = 'William Falcon et al.'
__author_email__ = '[email protected]'
__license__ = 'Apache-2.0'
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/callbacks/timer.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,4 +170,5 @@ def _check_time_remaining(self, trainer: 'pl.Trainer') -> None:
should_stop = trainer.accelerator.broadcast(should_stop)
trainer.should_stop = trainer.should_stop or should_stop
if should_stop and self._verbose:
rank_zero_info(f"Time limit reached. Elapsed time is {self.time_elapsed}. Signaling Trainer to stop.")
elapsed = timedelta(seconds=int(self.time_elapsed(RunningStage.TRAINING)))
rank_zero_info(f"Time limit reached. Elapsed time is {elapsed}. Signaling Trainer to stop.")
3 changes: 3 additions & 0 deletions pytorch_lightning/overrides/distributed.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,9 @@ def __iter__(self) -> Iterator[List[int]]:
self.batch_indices = batch
yield batch

def __len__(self) -> int:
return len(self._sampler)

@property
def drop_last(self) -> bool:
return self._sampler.drop_last
Expand Down
7 changes: 5 additions & 2 deletions tests/callbacks/test_timer.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def test_timer_time_remaining(time_mock):
assert round(timer.time_elapsed()) == 3


def test_timer_stops_training(tmpdir):
def test_timer_stops_training(tmpdir, caplog):
""" Test that the timer stops training before reaching max_epochs """
model = BoringModel()
duration = timedelta(milliseconds=100)
Expand All @@ -106,9 +106,12 @@ def test_timer_stops_training(tmpdir):
max_epochs=1000,
callbacks=[timer],
)
trainer.fit(model)
with caplog.at_level(logging.INFO):
trainer.fit(model)
assert trainer.global_step > 1
assert trainer.current_epoch < 999
assert "Time limit reached." in caplog.text
assert "Signaling Trainer to stop." in caplog.text


@pytest.mark.parametrize("interval", ["step", "epoch"])
Expand Down
13 changes: 13 additions & 0 deletions tests/overrides/test_distributed.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,14 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections.abc import Iterable

import pytest
from torch.utils.data import BatchSampler, SequentialSampler

from pytorch_lightning import seed_everything
from pytorch_lightning.overrides.distributed import IndexBatchSamplerWrapper, UnrepeatedDistributedSampler
from pytorch_lightning.utilities.data import has_len


@pytest.mark.parametrize("shuffle", [False, True])
Expand Down Expand Up @@ -54,3 +57,13 @@ def test_index_batch_sampler(tmpdir):

for batch in index_batch_sampler:
assert index_batch_sampler.batch_indices == batch


def test_index_batch_sampler_methods():
dataset = range(15)
sampler = SequentialSampler(dataset)
batch_sampler = BatchSampler(sampler, 3, False)
index_batch_sampler = IndexBatchSamplerWrapper(batch_sampler)

assert isinstance(index_batch_sampler, Iterable)
assert has_len(index_batch_sampler)