Skip to content

Commit de7ef41

Browse files
authored
remove deprecated reload_dataloaders_every_epoch from Trainer (#10481)
1 parent 98de69b commit de7ef41

File tree

6 files changed

+5
-67
lines changed

6 files changed

+5
-67
lines changed

CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -133,6 +133,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
133133
- Removed deprecated `CheckpointConnector.hpc_load` property in favor of `CheckpointConnector.restore` ([#10525](https://github.com/PyTorchLightning/pytorch-lightning/pull/10525))
134134

135135

136+
- Removed deprecated `reload_dataloaders_every_epoch` from `Trainer` in favour of `reload_dataloaders_every_n_epochs` ([#10481](https://github.com/PyTorchLightning/pytorch-lightning/pull/10481))
137+
138+
136139

137140
### Fixed
138141

pytorch_lightning/trainer/connectors/data_connector.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,6 @@ def on_trainer_init(
6464
self,
6565
check_val_every_n_epoch: int,
6666
reload_dataloaders_every_n_epochs: int,
67-
reload_dataloaders_every_epoch: bool,
6867
prepare_data_per_node: Optional[bool] = None,
6968
) -> None:
7069
self.trainer.datamodule = None
@@ -83,13 +82,6 @@ def on_trainer_init(
8382

8483
self.trainer.check_val_every_n_epoch = check_val_every_n_epoch
8584

86-
if reload_dataloaders_every_epoch:
87-
reload_dataloaders_every_n_epochs = int(reload_dataloaders_every_epoch)
88-
rank_zero_deprecation(
89-
"`reload_dataloaders_every_epoch` is deprecated in v1.4 and will be removed in v1.6."
90-
" Please use `reload_dataloaders_every_n_epochs` in Trainer."
91-
)
92-
9385
if not isinstance(reload_dataloaders_every_n_epochs, int) or (reload_dataloaders_every_n_epochs < 0):
9486
raise MisconfigurationException(
9587
f"`reload_dataloaders_every_n_epochs` should be an int >= 0, got {reload_dataloaders_every_n_epochs}."

pytorch_lightning/trainer/trainer.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -162,7 +162,6 @@ def __init__(
162162
benchmark: bool = False,
163163
deterministic: bool = False,
164164
reload_dataloaders_every_n_epochs: int = 0,
165-
reload_dataloaders_every_epoch: bool = False,
166165
auto_lr_find: Union[bool, str] = False,
167166
replace_sampler_ddp: bool = True,
168167
detect_anomaly: bool = False,
@@ -341,12 +340,6 @@ def __init__(
341340
342341
reload_dataloaders_every_n_epochs: Set to a non-negative integer to reload dataloaders every n epochs.
343342
344-
reload_dataloaders_every_epoch: Set to True to reload dataloaders every epoch.
345-
346-
.. deprecated:: v1.4
347-
``reload_dataloaders_every_epoch`` has been deprecated in v1.4 and will be removed in v1.6.
348-
Please use ``reload_dataloaders_every_n_epochs``.
349-
350343
replace_sampler_ddp: Explicitly enables or disables sampler replacement. If not specified this
351344
will toggled automatically when DDP is used. By default it will add ``shuffle=True`` for
352345
train sampler and ``shuffle=False`` for val/test sampler. If you want to customize it,
@@ -515,7 +508,6 @@ def __init__(
515508
self._data_connector.on_trainer_init(
516509
check_val_every_n_epoch,
517510
reload_dataloaders_every_n_epochs,
518-
reload_dataloaders_every_epoch,
519511
prepare_data_per_node,
520512
)
521513

tests/deprecated_api/test_remove_1-6.py

Lines changed: 0 additions & 49 deletions
This file was deleted.

tests/models/test_hooks.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -866,7 +866,7 @@ def call(hook, fn, *args, **kwargs):
866866
limit_predict_batches=batches,
867867
enable_progress_bar=False,
868868
enable_model_summary=False,
869-
reload_dataloaders_every_epoch=True,
869+
reload_dataloaders_every_n_epochs=True,
870870
)
871871

872872
called = []

tests/trainer/test_dataloaders.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1276,7 +1276,7 @@ def validation_step(self, batch, batch_idx):
12761276
# the val dataloader on the first epoch because this only tracks the training epoch
12771277
# meaning multiple passes through the validation data within a single training epoch
12781278
# would not have the dataloader reloaded.
1279-
# This breaks the assumption behind reload_dataloaders_every_epoch=True
1279+
# This breaks the assumption behind reload_dataloaders_every_n_epochs=True
12801280
call.val_dataloader(),
12811281
call.train_dataloader(),
12821282
call.val_dataloader(),

0 commit comments

Comments
 (0)