Skip to content

Commit f447839

Browse files
kaushikb11carmocca
andauthored
Add warning_cache.deprecation and set warning stacklevel [1/2] (#8005)
Co-authored-by: Carlos Mocholi <[email protected]>
1 parent 599d6db commit f447839

File tree

12 files changed

+38
-32
lines changed

12 files changed

+38
-32
lines changed

pytorch_lightning/callbacks/model_checkpoint.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -650,10 +650,10 @@ def _add_backward_monitor_support(self, trainer: 'pl.Trainer') -> None:
650650
self.save_top_k = 1
651651

652652
if deprecation_warning:
653-
warning_cache.warn(
653+
warning_cache.deprecation(
654654
"Relying on `self.log('val_loss', ...)` to set the ModelCheckpoint monitor is deprecated in v1.2"
655655
" and will be removed in v1.4. Please, create your own `mc = ModelCheckpoint(monitor='your_monitor')`"
656-
" and use it as `Trainer(callbacks=[mc])`.", DeprecationWarning
656+
" and use it as `Trainer(callbacks=[mc])`.",
657657
)
658658

659659
def _validate_monitor_key(self, trainer: 'pl.Trainer') -> None:

pytorch_lightning/core/lightning.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -168,9 +168,10 @@ def example_input_array(self, example: Any) -> None:
168168

169169
@property
170170
def datamodule(self) -> Any:
171-
rank_zero_deprecation(
171+
warning_cache.deprecation(
172172
"The `LightningModule.datamodule` property is deprecated in v1.3 and will be removed in v1.5."
173-
" Access the datamodule through using `self.trainer.datamodule` instead."
173+
" Access the datamodule through using `self.trainer.datamodule` instead.",
174+
stacklevel=5,
174175
)
175176
return self._datamodule
176177

@@ -223,10 +224,10 @@ def _apply_batch_transfer_handler(
223224
if is_param_in_hook_signature(self.transfer_batch_to_device, 'dataloader_idx'):
224225
batch = self.transfer_batch_to_device(batch, device, dataloader_idx)
225226
else:
226-
warning_cache.warn(
227+
warning_cache.deprecation(
227228
"`transfer_batch_to_device` hook signature has changed in v1.4."
228229
" `dataloader_idx` parameter has been added to it. Support for"
229-
" the old signature will be removed in v1.6", DeprecationWarning
230+
" the old signature will be removed in v1.6"
230231
)
231232
batch = self.transfer_batch_to_device(batch, device)
232233

pytorch_lightning/loggers/wandb.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -137,9 +137,9 @@ def __init__(
137137
)
138138

139139
if sync_step is not None:
140-
warning_cache.warn(
140+
warning_cache.deprecation(
141141
"`WandbLogger(sync_step=(True|False))` is deprecated in v1.2.1 and will be removed in v1.5."
142-
" Metrics are now logged separately and automatically synchronized.", DeprecationWarning
142+
" Metrics are now logged separately and automatically synchronized."
143143
)
144144

145145
super().__init__()

pytorch_lightning/loops/training_batch_loop.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -488,10 +488,10 @@ def build_train_args(self, batch: Any, batch_idx: int, opt_idx: int, hiddens: Te
488488
if len(self.trainer.optimizers) > 1:
489489
if self.trainer.has_arg("training_step", "optimizer_idx"):
490490
if not self.trainer.lightning_module.automatic_optimization:
491-
self.warning_cache.warn(
491+
self.warning_cache.deprecation(
492492
"`training_step` hook signature has changed in v1.3."
493493
" `optimizer_idx` argument has been removed in case of manual optimization. Support for"
494-
" the old signature will be removed in v1.5", DeprecationWarning
494+
" the old signature will be removed in v1.5",
495495
)
496496
args.append(opt_idx)
497497
elif not self.trainer.has_arg(
@@ -682,10 +682,10 @@ def _build_kwargs(self, batch: Any, batch_idx: int, opt_idx: int, hiddens: Optio
682682
has_opt_idx_in_train_step = is_param_in_hook_signature(training_step_fx, "optimizer_idx")
683683
if has_opt_idx_in_train_step:
684684
if not lightning_module.automatic_optimization:
685-
self.warning_cache.warn(
685+
self.warning_cache.deprecation(
686686
"`training_step` hook signature has changed in v1.3."
687687
" `optimizer_idx` argument has been removed in case of manual optimization. Support for"
688-
" the old signature will be removed in v1.5", DeprecationWarning
688+
" the old signature will be removed in v1.5",
689689
)
690690
step_kwargs['optimizer_idx'] = opt_idx
691691
elif not has_opt_idx_in_train_step and lightning_module.automatic_optimization:

pytorch_lightning/loops/training_epoch_loop.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -231,10 +231,10 @@ def _on_train_epoch_end_hook(self, processed_epoch_output: List[List[STEP_OUTPUT
231231
if is_overridden(hook_name, model_ref):
232232
hook_fx = getattr(model_ref, hook_name)
233233
if is_param_in_hook_signature(hook_fx, "outputs"):
234-
self.warning_cache.warn(
234+
self.warning_cache.deprecation(
235235
"The signature of `ModelHooks.on_train_epoch_end` has changed in v1.3."
236236
" `outputs` parameter has been deprecated."
237-
" Support for the old signature will be removed in v1.5", DeprecationWarning
237+
" Support for the old signature will be removed in v1.5",
238238
)
239239
model_ref.on_train_epoch_end(processed_epoch_output)
240240
else:

pytorch_lightning/plugins/training_type/deepspeed.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515
import json
1616
import logging
1717
import os
18-
import warnings
1918
from collections import OrderedDict
2019
from pathlib import Path
2120
from typing import Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Union
@@ -30,7 +29,7 @@
3029
from pytorch_lightning.trainer.optimizers import _get_default_scheduler_config
3130
from pytorch_lightning.utilities import AMPType
3231
from pytorch_lightning.utilities.apply_func import apply_to_collection
33-
from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_only
32+
from pytorch_lightning.utilities.distributed import _warn, rank_zero_info, rank_zero_only
3433
from pytorch_lightning.utilities.exceptions import MisconfigurationException
3534
from pytorch_lightning.utilities.imports import _DEEPSPEED_AVAILABLE
3635

@@ -260,10 +259,11 @@ def __init__(
260259
)
261260

262261
if cpu_offload or cpu_offload_params or cpu_offload_use_pin_memory:
263-
warnings.warn(
262+
_warn(
264263
"The usage of `cpu_offload`, `cpu_offload_params`, and `cpu_offload_use_pin_memory` "
265264
"is deprecated since v1.4 and will be removed in v1.5."
266-
" From now on use `offload_optimizer`, `offload_parameters` and `pin_memory`.", DeprecationWarning
265+
" From now on use `offload_optimizer`, `offload_parameters` and `pin_memory`.",
266+
category=DeprecationWarning
267267
)
268268
offload_optimizer = cpu_offload
269269
offload_parameters = cpu_offload_params

pytorch_lightning/profiler/base.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
from pathlib import Path
2020
from typing import Any, Callable, Dict, Optional, TextIO, Union
2121

22-
from pytorch_lightning.utilities import rank_zero_warn
22+
from pytorch_lightning.utilities import rank_zero_deprecation
2323
from pytorch_lightning.utilities.cloud_io import get_filesystem
2424

2525
log = logging.getLogger(__name__)
@@ -63,10 +63,9 @@ def __init__(
6363
self.dirpath = dirpath
6464
self.filename = filename
6565
if output_filename is not None:
66-
rank_zero_warn(
66+
rank_zero_deprecation(
6767
"`Profiler` signature has changed in v1.3. The `output_filename` parameter has been removed in"
6868
" favor of `dirpath` and `filename`. Support for the old signature will be removed in v1.5",
69-
DeprecationWarning
7069
)
7170
filepath = Path(output_filename)
7271
self.dirpath = filepath.parent

pytorch_lightning/profiler/pytorch.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
from torch.autograd.profiler import record_function
2525

2626
from pytorch_lightning.profiler.base import BaseProfiler
27-
from pytorch_lightning.utilities.distributed import rank_zero_warn
27+
from pytorch_lightning.utilities.distributed import rank_zero_deprecation, rank_zero_warn
2828
from pytorch_lightning.utilities.exceptions import MisconfigurationException
2929
from pytorch_lightning.utilities.imports import _KINETO_AVAILABLE
3030

@@ -349,9 +349,9 @@ def __deprecation_check(
349349
record_functions = set()
350350

351351
if profiled_functions is not None:
352-
rank_zero_warn(
352+
rank_zero_deprecation(
353353
"`PyTorchProfiler.profiled_functions` has been renamed to"
354-
" `record_functions` in v1.3 and will be removed in v1.5", DeprecationWarning
354+
" `record_functions` in v1.3 and will be removed in v1.5",
355355
)
356356
if not record_functions:
357357
record_functions |= set(profiled_functions)

pytorch_lightning/trainer/callback_hook.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -97,10 +97,10 @@ def on_train_epoch_end(self, outputs: EPOCH_OUTPUT):
9797
"""
9898
for callback in self.callbacks:
9999
if is_param_in_hook_signature(callback.on_train_epoch_end, "outputs"):
100-
warning_cache.warn(
100+
warning_cache.deprecation(
101101
"The signature of `Callback.on_train_epoch_end` has changed in v1.3."
102102
" `outputs` parameter has been removed."
103-
" Support for the old signature will be removed in v1.5", DeprecationWarning
103+
" Support for the old signature will be removed in v1.5"
104104
)
105105
callback.on_train_epoch_end(self, self.lightning_module, outputs)
106106
else:

pytorch_lightning/utilities/device_parser.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616

1717
import torch
1818

19-
from pytorch_lightning.utilities import _TPU_AVAILABLE, rank_zero_warn
19+
from pytorch_lightning.utilities import _TPU_AVAILABLE, rank_zero_deprecation
2020
from pytorch_lightning.utilities.exceptions import MisconfigurationException
2121
from pytorch_lightning.utilities.imports import _compare_version
2222

@@ -121,12 +121,11 @@ def _normalize_parse_gpu_string_input(s: Union[int, str, List[int]]) -> Union[in
121121
else:
122122
num_gpus = int(s.strip())
123123
if _compare_version("pytorch_lightning", operator.lt, "1.5"):
124-
rank_zero_warn(
124+
rank_zero_deprecation(
125125
f"Parsing of the Trainer argument gpus='{s}' (string) will change in the future."
126126
" In the current version of Lightning, this will select"
127127
f" CUDA device with index {num_gpus}, but from v1.5 it will select gpus"
128128
f" {list(range(num_gpus))} (same as gpus={s} (int)).",
129-
DeprecationWarning,
130129
)
131130
return [num_gpus]
132131
return num_gpus

0 commit comments

Comments
 (0)