|
25 | 25 | from pytorch_lightning.plugins.precision import ApexMixedPrecisionPlugin, NativeMixedPrecisionPlugin, PrecisionPlugin |
26 | 26 | from pytorch_lightning.plugins.training_type import DataParallelPlugin, TrainingTypePlugin |
27 | 27 | from pytorch_lightning.trainer.states import TrainerFn |
| 28 | +from pytorch_lightning.utilities import rank_zero_deprecation |
28 | 29 | from pytorch_lightning.utilities.apply_func import apply_to_collection, move_data_to_device |
29 | 30 | from pytorch_lightning.utilities.enums import AMPType, LightningEnum |
30 | 31 | from pytorch_lightning.utilities.types import STEP_OUTPUT |
@@ -56,6 +57,15 @@ def __init__( |
56 | 57 | self.training_type_plugin = training_type_plugin |
57 | 58 |
|
58 | 59 | if precision_plugin: |
| 60 | + """ |
| 61 | + .. deprecated |
| 62 | + precision_plugin parameter is deprecated will be removed soon. |
| 63 | + Use :`training_type_plugin(precision_plugin) instead. |
| 64 | + """ |
| 65 | + rank_zero_deprecation( |
| 66 | + f"`{self.__class__.__name__}.precision` was and will be removed soon" |
| 67 | + f" Use `training_type_plugin.precision_plugin.precision` instead." |
| 68 | + ) |
59 | 69 | self.training_type_plugin._precision_plugin = precision_plugin |
60 | 70 |
|
61 | 71 | self.optimizers: List = [] |
@@ -213,7 +223,7 @@ def optimizer_step( |
213 | 223 | opt_idx: int, |
214 | 224 | closure: Callable[[], Any], |
215 | 225 | model: Optional[Union["pl.LightningModule", Module]] = None, |
216 | | - **kwargs: Any |
| 226 | + **kwargs: Any, |
217 | 227 | ) -> None: |
218 | 228 | """performs the actual optimizer step. |
219 | 229 |
|
@@ -270,8 +280,16 @@ def amp_backend(self) -> Optional[LightningEnum]: |
270 | 280 |
|
271 | 281 | @property |
272 | 282 | def precision(self) -> Union[str, int]: |
273 | | - """deprecated.""" |
274 | | - return self.training_type_plugin.precision |
| 283 | + """ |
| 284 | + .. deprecated |
| 285 | + This method is deprecated will be removed soon. |
| 286 | + Use :`training_type_plugin.precision_plugin.precision` instead. |
| 287 | + """ |
| 288 | + rank_zero_deprecation( |
| 289 | + f"`{self.__class__.__name__}.precision` was and will be removed soon" |
| 290 | + f" Use `training_type_plugin.precision_plugin.precision` instead." |
| 291 | + ) |
| 292 | + return self.training_type_plugin.precision_plugin.precision |
275 | 293 |
|
276 | 294 | @property |
277 | 295 | def scaler(self) -> Optional["GradScaler"]: |
|
0 commit comments