Skip to content

Commit 3daa4c9

Browse files
AndresAlgabaawaelchliakihironittaotaj
authored
Remove deprecated on_init_start_end (#14867)
Co-authored-by: awaelchli <[email protected]> Co-authored-by: Akihiro Nitta <[email protected]> Co-authored-by: otaj <[email protected]>
1 parent 2721a2f commit 3daa4c9

File tree

13 files changed

+8
-135
lines changed

13 files changed

+8
-135
lines changed

docs/source-pytorch/extensions/callbacks.rst

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -171,18 +171,6 @@ teardown
171171
.. automethod:: pytorch_lightning.callbacks.Callback.teardown
172172
:noindex:
173173

174-
on_init_start
175-
^^^^^^^^^^^^^
176-
177-
.. automethod:: pytorch_lightning.callbacks.Callback.on_init_start
178-
:noindex:
179-
180-
on_init_end
181-
^^^^^^^^^^^
182-
183-
.. automethod:: pytorch_lightning.callbacks.Callback.on_init_end
184-
:noindex:
185-
186174
on_fit_start
187175
^^^^^^^^^^^^
188176

src/pytorch_lightning/CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -248,6 +248,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
248248

249249
- Removed the deprecated `Trainer.use_amp` and `LightningModule.use_amp` attributes ([#14832](https://github.com/Lightning-AI/lightning/pull/14832))
250250

251+
- Removed the deprecated callback hooks `Callback.on_init_start` and `Callback.on_init_end` ([#14867](https://github.com/Lightning-AI/lightning/pull/14867))
252+
251253

252254
- Removed the deprecated `Trainer.run_stage` in favor of `Trainer.{fit,validate,test,predict}` ([#14870](https://github.com/Lightning-AI/lightning/pull/14870))
253255

src/pytorch_lightning/callbacks/callback.py

Lines changed: 0 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -78,22 +78,6 @@ def setup(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", stage: s
7878
def teardown(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", stage: str) -> None:
7979
"""Called when fit, validate, test, predict, or tune ends."""
8080

81-
def on_init_start(self, trainer: "pl.Trainer") -> None:
82-
r"""
83-
.. deprecated:: v1.6
84-
This callback hook was deprecated in v1.6 and will be removed in v1.8.
85-
86-
Called when the trainer initialization begins, model has not yet been set.
87-
"""
88-
89-
def on_init_end(self, trainer: "pl.Trainer") -> None:
90-
r"""
91-
.. deprecated:: v1.6
92-
This callback hook was deprecated in v1.6 and will be removed in v1.8.
93-
94-
Called when the trainer initialization ends, model has not yet been set.
95-
"""
96-
9781
def on_fit_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
9882
"""Called when fit begins."""
9983

src/pytorch_lightning/callbacks/lambda_function.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,8 +44,6 @@ def __init__(
4444
setup: Optional[Callable] = None,
4545
on_configure_sharded_model: Optional[Callable] = None,
4646
teardown: Optional[Callable] = None,
47-
on_init_start: Optional[Callable] = None,
48-
on_init_end: Optional[Callable] = None,
4947
on_fit_start: Optional[Callable] = None,
5048
on_fit_end: Optional[Callable] = None,
5149
on_sanity_check_start: Optional[Callable] = None,

src/pytorch_lightning/core/module.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1180,10 +1180,6 @@ def configure_callbacks(self):
11801180
early_stop = EarlyStopping(monitor="val_acc", mode="max")
11811181
checkpoint = ModelCheckpoint(monitor="val_loss")
11821182
return [early_stop, checkpoint]
1183-
1184-
Note:
1185-
Certain callback methods like :meth:`~pytorch_lightning.callbacks.base.Callback.on_init_start`
1186-
will never be invoked on the new callbacks returned here.
11871183
"""
11881184
return []
11891185

src/pytorch_lightning/trainer/configuration_validator.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -210,13 +210,6 @@ def _check_on_pretrain_routine(model: "pl.LightningModule") -> None:
210210

211211
def _check_deprecated_callback_hooks(trainer: "pl.Trainer") -> None:
212212
for callback in trainer.callbacks:
213-
if is_overridden(method_name="on_init_start", instance=callback):
214-
rank_zero_deprecation(
215-
"The `on_init_start` callback hook was deprecated in v1.6 and will be removed in v1.8."
216-
)
217-
if is_overridden(method_name="on_init_end", instance=callback):
218-
rank_zero_deprecation("The `on_init_end` callback hook was deprecated in v1.6 and will be removed in v1.8.")
219-
220213
if is_overridden(method_name="on_configure_sharded_model", instance=callback):
221214
rank_zero_deprecation(
222215
"The `on_configure_sharded_model` callback hook was deprecated in"

src/pytorch_lightning/trainer/connectors/logger_connector/fx_validator.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,8 +57,6 @@ class _LogOptions(TypedDict):
5757
"optimizer_zero_grad": _LogOptions(
5858
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
5959
),
60-
"on_init_start": None,
61-
"on_init_end": None,
6260
"on_fit_start": None,
6361
"on_fit_end": None,
6462
"on_sanity_check_start": None,

src/pytorch_lightning/trainer/trainer.py

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -453,9 +453,6 @@ def __init__(
453453
accumulate_grad_batches,
454454
)
455455

456-
# hook
457-
self._call_callback_hooks("on_init_start")
458-
459456
# init data flags
460457
self.check_val_every_n_epoch: Optional[int]
461458
self._data_connector.on_trainer_init(
@@ -523,9 +520,6 @@ def __init__(
523520
num_sanity_val_steps,
524521
)
525522

526-
# Callback system
527-
self._call_callback_hooks("on_init_end")
528-
529523
def _setup_on_init(self) -> None:
530524
setup._log_device_info(self)
531525

@@ -1333,15 +1327,6 @@ def _call_callback_hooks(
13331327
**kwargs: Any,
13341328
) -> None:
13351329
log.debug(f"{self.__class__.__name__}: calling callback hook: {hook_name}")
1336-
# TODO: remove if block in v1.8
1337-
if hook_name in ("on_init_start", "on_init_end"):
1338-
# these `Callback` hooks are the only ones that do not take a lightning module.
1339-
# we also don't profile bc profiler hasn't been set yet
1340-
for callback in self.callbacks:
1341-
fn = getattr(callback, hook_name)
1342-
if callable(fn):
1343-
fn(self, *args, **kwargs)
1344-
return
13451330

13461331
pl_module = self.lightning_module
13471332
if pl_module:

tests/tests_pytorch/callbacks/test_callbacks.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
# limitations under the License.
1414
from pathlib import Path
1515
from re import escape
16-
from unittest.mock import call, Mock
16+
from unittest.mock import Mock
1717

1818
import pytest
1919

@@ -39,13 +39,8 @@ def configure_callbacks(self):
3939
)
4040

4141
def assert_expected_calls(_trainer, model_callback, trainer_callback):
42-
# some methods in callbacks configured through model won't get called
43-
uncalled_methods = [call.on_init_start(_trainer), call.on_init_end(_trainer)]
44-
for uncalled in uncalled_methods:
45-
assert uncalled not in model_callback.method_calls
46-
4742
# assert that the rest of calls are the same as for trainer callbacks
48-
expected_calls = [m for m in trainer_callback.method_calls if m not in uncalled_methods]
43+
expected_calls = [m for m in trainer_callback.method_calls if m]
4944
assert expected_calls
5045
assert model_callback.method_calls == expected_calls
5146

tests/tests_pytorch/callbacks/test_lambda_function.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ def call(hook, *_, **__):
4949
callbacks=[LambdaCallback(**hooks_args)],
5050
)
5151
with pytest.deprecated_call(
52-
match="`on_init_start` callback hook was deprecated in v1.6 and will be removed in v1.8."
52+
match="`on_configure_sharded_model` callback hook was deprecated in v1.6 and will be removed in v1.8"
5353
):
5454
trainer.fit(model)
5555
ckpt_path = trainer.checkpoint_callback.best_model_path
@@ -65,15 +65,15 @@ def call(hook, *_, **__):
6565
callbacks=[LambdaCallback(**hooks_args)],
6666
)
6767
with pytest.deprecated_call(
68-
match="`on_init_start` callback hook was deprecated in v1.6 and will be removed in v1.8."
68+
match="`on_configure_sharded_model` callback hook was deprecated in v1.6 and will be removed in v1.8"
6969
):
7070
trainer.fit(model, ckpt_path=ckpt_path)
7171
with pytest.deprecated_call(
72-
match="`on_init_start` callback hook was deprecated in v1.6 and will be removed in v1.8."
72+
match="`on_configure_sharded_model` callback hook was deprecated in v1.6 and will be removed in v1.8"
7373
):
7474
trainer.test(model)
7575
with pytest.deprecated_call(
76-
match="`on_init_start` callback hook was deprecated in v1.6 and will be removed in v1.8."
76+
match="`on_configure_sharded_model` callback hook was deprecated in v1.6 and will be removed in v1.8"
7777
):
7878
trainer.predict(model)
7979

0 commit comments

Comments
 (0)