Skip to content

Commit b2fe6bd

Browse files
Remove support for passing strategy strings to accelerator (#12696)
Co-authored-by: Kushashwa Ravi Shrimali <[email protected]>
1 parent 9d343ba commit b2fe6bd

File tree

6 files changed

+44
-344
lines changed

6 files changed

+44
-344
lines changed

CHANGELOG.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
102102

103103
- Removed deprecated `GPUStatsMonitor` callback ([#12554](https://github.com/PyTorchLightning/pytorch-lightning/pull/12554))
104104

105+
106+
- Removed support for passing strategy names or strategy classes to the accelerator Trainer argument ([#12696](https://github.com/PyTorchLightning/pytorch-lightning/pull/12696))
107+
108+
105109
### Fixed
106110

107111
- Run main progress bar updates independent of val progress bar updates in `TQDMProgressBar` ([#12563](https://github.com/PyTorchLightning/pytorch-lightning/pull/12563))

pytorch_lightning/trainer/connectors/accelerator_connector.py

Lines changed: 8 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -111,11 +111,9 @@ def __init__(
111111
components such as the Accelerator and Precision plugins.
112112
113113
A. accelerator flag could be:
114-
1. strategy class (deprecated in 1.5 will be removed in 1.7)
115-
2. strategy str (deprecated in 1.5 will be removed in 1.7)
116-
3. accelerator class
117-
4. accelerator str
118-
5. accelerator auto
114+
1. accelerator class
115+
2. accelerator str
116+
3. accelerator auto
119117
120118
B. strategy flag could be :
121119
1. strategy class
@@ -160,7 +158,6 @@ def __init__(
160158

161159
# Raise an exception if there are conflicts between flags
162160
# Set each valid flag to `self._x_flag` after validation
163-
# Example: If accelerator is set to a strategy type, set `self._strategy_flag = accelerator`.
164161
# For devices: Assign gpus, ipus, etc. to the accelerator flag and devices flag
165162
self._strategy_flag: Optional[Union[Strategy, str]] = None
166163
self._accelerator_flag: Optional[Union[Accelerator, str]] = None
@@ -231,10 +228,9 @@ def _check_config_and_set_final_flags(
231228
) -> None:
232229
"""This method checks:
233230
234-
1. strategy: strategy, accelerator and plugin can all be set to strategies
231+
1. strategy: strategy and plugin can be set to strategies
235232
2. accelerator: if the value of the accelerator argument is a type of accelerator (instance or string),
236-
set self._accelerator_flag accordingly. If the value is strategy related (instance or string),
237-
it gets handled by 1.
233+
set self._accelerator_flag accordingly.
238234
3. precision: The final value of the precision flag may be determined either by the precision argument or
239235
by a plugin instance.
240236
4. plugins: a plugin could occur as a value of the strategy argument (handled by 1), or the precision
@@ -258,17 +254,6 @@ def _check_config_and_set_final_flags(
258254
"`Trainer(strategy='tpu_spawn')` is not a valid strategy,"
259255
" you can use `Trainer(strategy='ddp_spawn', accelerator='tpu')` instead."
260256
)
261-
# handle duplications and conflict
262-
if isinstance(accelerator, Strategy) and strategy != accelerator:
263-
raise MisconfigurationException(
264-
f"Incompatible values set in `strategy` and `accelerator` arguments."
265-
f"Received both strategy={strategy} and accelerator={accelerator}"
266-
)
267-
if isinstance(accelerator, str) and accelerator in self._registered_strategies and strategy != accelerator:
268-
raise MisconfigurationException(
269-
f"strategy {strategy} already set through `strategy` flag,"
270-
f" but have also passed {accelerator} in through the accelerator flag."
271-
)
272257
if plugins:
273258
for plugin in plugins:
274259
if isinstance(plugin, Strategy):
@@ -285,14 +270,6 @@ def _check_config_and_set_final_flags(
285270
if accelerator is not None:
286271
if accelerator in self._accelerator_types or accelerator == "auto" or isinstance(accelerator, Accelerator):
287272
self._accelerator_flag = accelerator
288-
elif accelerator in self._registered_strategies or isinstance(accelerator, Strategy):
289-
rank_zero_deprecation(
290-
f"Passing `Trainer(accelerator={accelerator!r})` has been deprecated"
291-
f" in v1.5 and will be removed in v1.7. Use `Trainer(strategy={accelerator!r})` instead."
292-
)
293-
self._strategy_flag = accelerator
294-
elif accelerator == "ddp_cpu" and not self._strategy_flag:
295-
self._strategy_flag = accelerator
296273

297274
if precision is not None:
298275
if str(precision) not in self._precision_types:
@@ -431,7 +408,7 @@ def _check_device_config_and_set_final_flags(
431408
if self._devices_flag == "auto" and self._accelerator_flag is None:
432409
raise MisconfigurationException(
433410
f"You passed `devices={devices}` but haven't specified"
434-
" `accelerator=('auto'|'tpu'|'gpu'|'ipu'|'cpu'|'hpu)` for the devices mapping"
411+
" `accelerator=('auto'|'tpu'|'gpu'|'ipu'|'cpu'|'hpu)` for the devices mapping."
435412
)
436413

437414
def _map_deprecated_devices_specific_info_to_accelerator_and_device_flag(
@@ -607,22 +584,6 @@ def _check_strategy_and_fallback(self) -> None:
607584
# TODO this logic should apply to both str and object config
608585
strategy_flag = "" if isinstance(self._strategy_flag, Strategy) else self._strategy_flag
609586

610-
if strategy_flag == "ddp_cpu":
611-
if _TPU_AVAILABLE:
612-
raise MisconfigurationException(
613-
"`accelerator='ddp_cpu'` is not supported on TPU machines. "
614-
"Learn more: https://github.com/PyTorchLightning/pytorch-lightning/issues/7810"
615-
)
616-
if self._devices_flag == 1 and self._num_nodes_flag > 1:
617-
strategy_flag = DDPStrategy.strategy_name
618-
else:
619-
strategy_flag = "ddp_spawn"
620-
if self._accelerator_flag == "gpu":
621-
rank_zero_warn(
622-
"You requested one or more GPUs, but set `accelerator='ddp_cpu'`. Training will not use GPUs."
623-
)
624-
self._accelerator_flag = "cpu"
625-
self.accelerator = CPUAccelerator()
626587
if strategy_flag in ("ddp_spawn", "ddp_spawn_find_unused_parameters_false") and (
627588
TorchElasticEnvironment.detect() or KubeflowEnvironment.detect() or self._is_slurm_managing_tasks()
628589
):
@@ -643,7 +604,7 @@ def _handle_horovod(self) -> None:
643604

644605
if not _HOROVOD_AVAILABLE:
645606
raise MisconfigurationException(
646-
'Requested `accelerator="horovod"`, but Horovod is not installed.'
607+
'Requested `strategy="horovod"`, but Horovod is not installed.'
647608
"Install with \n $HOROVOD_WITH_PYTORCH=1 pip install horovod[pytorch]"
648609
)
649610

@@ -792,8 +753,7 @@ def _lazy_init_strategy(self) -> None:
792753

793754
if _IS_INTERACTIVE and self.strategy.launcher and not self.strategy.launcher.is_interactive_compatible:
794755
raise MisconfigurationException(
795-
f"`Trainer(strategy={self.strategy.strategy_name!r})` or"
796-
f" `Trainer(accelerator={self.strategy.strategy_name!r})` is not compatible with an interactive"
756+
f"`Trainer(strategy={self.strategy.strategy_name!r})` is not compatible with an interactive"
797757
" environment. Run your code as a script, or choose one of the compatible strategies:"
798758
f" Trainer(strategy=None|{'|'.join(_StrategyType.interactive_compatible_types())})."
799759
" In case you are spawning processes yourself, make sure to include the Trainer"

pytorch_lightning/utilities/enums.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,6 @@ class DistributedType(LightningEnum, metaclass=_OnAccessEnumMeta):
115115
DP = "dp"
116116
DDP = "ddp"
117117
DDP2 = "ddp2"
118-
DDP_CPU = "ddp_cpu"
119118
DDP_SPAWN = "ddp_spawn"
120119
TPU_SPAWN = "tpu_spawn"
121120
DEEPSPEED = "deepspeed"
@@ -240,7 +239,6 @@ class _StrategyType(LightningEnum):
240239
DP = "dp"
241240
DDP = "ddp"
242241
DDP2 = "ddp2"
243-
DDP_CPU = "ddp_cpu"
244242
DDP_SPAWN = "ddp_spawn"
245243
TPU_SPAWN = "tpu_spawn"
246244
DEEPSPEED = "deepspeed"

0 commit comments

Comments
 (0)