diff --git a/CHANGELOG.md b/CHANGELOG.md index c90de22d359dc..7a1872048248d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -102,6 +102,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - `stat_scores_multiple_classes` is deprecated in favor of `stat_scores` ([#4839](https://github.com/PyTorchLightning/pytorch-lightning/pull/4839)) +- Moved accelerators and plugins to its `legacy` pkg ([#5645](https://github.com/PyTorchLightning/pytorch-lightning/pull/5645)) + + ### Removed - Removed deprecated checkpoint argument `filepath` ([#5321](https://github.com/PyTorchLightning/pytorch-lightning/pull/5321)) diff --git a/benchmarks/test_sharded_parity.py b/benchmarks/test_sharded_parity.py index 01975493590e9..d16e192bcc1bb 100644 --- a/benchmarks/test_sharded_parity.py +++ b/benchmarks/test_sharded_parity.py @@ -21,10 +21,10 @@ import torch from pytorch_lightning import seed_everything, Trainer -from pytorch_lightning.plugins.ddp_plugin import DDPPlugin -from pytorch_lightning.plugins.sharded_plugin import DDPShardedPlugin +from pytorch_lightning.plugins.legacy.ddp_plugin import DDPPlugin +from pytorch_lightning.plugins.legacy.sharded_plugin import DDPShardedPlugin from pytorch_lightning.utilities import _FAIRSCALE_AVAILABLE, _NATIVE_AMP_AVAILABLE -from tests.backends import DDPLauncher +from tests.accelerators.legacy import DDPLauncher from tests.base.boring_model import BoringModel, RandomDataset diff --git a/dockers/tpu-tests/tpu_test_cases.jsonnet b/dockers/tpu-tests/tpu_test_cases.jsonnet index 7e4d841387800..f9976134df0dc 100644 --- a/dockers/tpu-tests/tpu_test_cases.jsonnet +++ b/dockers/tpu-tests/tpu_test_cases.jsonnet @@ -21,7 +21,10 @@ local tputests = base.BaseTest { command: utils.scriptCommand( ||| cd pytorch-lightning - coverage run --source=pytorch_lightning -m pytest tests/models/test_tpu.py tests/backends/test_tpu_backend.py pytorch_lightning/utilities/xla_device_utils.py -v + coverage run --source=pytorch_lightning -m pytest -v \ + pytorch_lightning/utilities/xla_device_utils.py \ + tests/accelerators/legacy/test_tpu_backend.py \ + tests/models/test_tpu.py test_exit_code=$? echo "\n||| END PYTEST LOGS |||\n" coverage xml diff --git a/docs/source/advanced/multi_gpu.rst b/docs/source/advanced/multi_gpu.rst index 639d8d8c61249..e385c1daf6801 100644 --- a/docs/source/advanced/multi_gpu.rst +++ b/docs/source/advanced/multi_gpu.rst @@ -580,9 +580,9 @@ Below are the possible configurations we support. Implement Your Own Distributed (DDP) training ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -If you need your own way to init PyTorch DDP you can override :meth:`pytorch_lightning.plugins.ddp_plugin.DDPPlugin.init_ddp_connection`. +If you need your own way to init PyTorch DDP you can override :meth:`pytorch_lightning.plugins.legacy.ddp_plugin.DDPPlugin.init_ddp_connection`. -If you also need to use your own DDP implementation, override :meth:`pytorch_lightning.plugins.ddp_plugin.DDPPlugin.configure_ddp`. +If you also need to use your own DDP implementation, override :meth:`pytorch_lightning.plugins.legacy.ddp_plugin.DDPPlugin.configure_ddp`. ---------- @@ -692,7 +692,7 @@ This should be kept within the ``sequential_module`` variable within your ``Ligh .. code-block:: python - from pytorch_lightning.plugins.ddp_sequential_plugin import DDPSequentialPlugin + from pytorch_lightning.plugins.legacy.ddp_sequential_plugin import DDPSequentialPlugin from pytorch_lightning import LightningModule class MyModel(LightningModule): diff --git a/docs/source/extensions/accelerators.rst b/docs/source/extensions/accelerators.rst index bc9abebcd90d8..c860c361edcb9 100644 --- a/docs/source/extensions/accelerators.rst +++ b/docs/source/extensions/accelerators.rst @@ -16,7 +16,7 @@ To link up arbitrary hardware, implement your own Accelerator subclass .. code-block:: python - from pytorch_lightning.accelerators.accelerator import Accelerator + from pytorch_lightning.accelerators.legacy.accelerator import Accelerator class MyAccelerator(Accelerator): def __init__(self, trainer, cluster_environment=None): @@ -124,59 +124,59 @@ Available Accelerators CPU Accelerator =============== -.. autoclass:: pytorch_lightning.accelerators.cpu_accelerator.CPUAccelerator +.. autoclass:: pytorch_lightning.accelerators.legacy.cpu_accelerator.CPUAccelerator :noindex: DDP Accelerator =============== -.. autoclass:: pytorch_lightning.accelerators.ddp_accelerator.DDPAccelerator +.. autoclass:: pytorch_lightning.accelerators.legacy.ddp_accelerator.DDPAccelerator :noindex: DDP2 Accelerator ================ -.. autoclass:: pytorch_lightning.accelerators.ddp2_accelerator.DDP2Accelerator +.. autoclass:: pytorch_lightning.accelerators.legacy.ddp2_accelerator.DDP2Accelerator :noindex: DDP CPU HPC Accelerator ======================= -.. autoclass:: pytorch_lightning.accelerators.ddp_cpu_hpc_accelerator.DDPCPUHPCAccelerator +.. autoclass:: pytorch_lightning.accelerators.legacy.ddp_cpu_hpc_accelerator.DDPCPUHPCAccelerator :noindex: DDP CPU Spawn Accelerator ========================= -.. autoclass:: pytorch_lightning.accelerators.ddp_cpu_spawn_accelerator.DDPCPUSpawnAccelerator +.. autoclass:: pytorch_lightning.accelerators.legacy.ddp_cpu_spawn_accelerator.DDPCPUSpawnAccelerator :noindex: DDP HPC Accelerator =================== -.. autoclass:: pytorch_lightning.accelerators.ddp_hpc_accelerator.DDPHPCAccelerator +.. autoclass:: pytorch_lightning.accelerators.legacy.ddp_hpc_accelerator.DDPHPCAccelerator :noindex: DDP Spawn Accelerator ===================== -.. autoclass:: pytorch_lightning.accelerators.ddp_spawn_accelerator.DDPSpawnAccelerator +.. autoclass:: pytorch_lightning.accelerators.legacy.ddp_spawn_accelerator.DDPSpawnAccelerator :noindex: GPU Accelerator =============== -.. autoclass:: pytorch_lightning.accelerators.gpu_accelerator.GPUAccelerator +.. autoclass:: pytorch_lightning.accelerators.legacy.gpu_accelerator.GPUAccelerator :noindex: Horovod Accelerator =================== -.. autoclass:: pytorch_lightning.accelerators.horovod_accelerator.HorovodAccelerator +.. autoclass:: pytorch_lightning.accelerators.legacy.horovod_accelerator.HorovodAccelerator :noindex: TPU Accelerator =============== -.. autoclass:: pytorch_lightning.accelerators.tpu_accelerator.TPUAccelerator +.. autoclass:: pytorch_lightning.accelerators.legacy.tpu_accelerator.TPUAccelerator :noindex: diff --git a/docs/source/extensions/plugins.rst b/docs/source/extensions/plugins.rst index 084fa7d8256a8..89b660f99cb76 100644 --- a/docs/source/extensions/plugins.rst +++ b/docs/source/extensions/plugins.rst @@ -19,16 +19,16 @@ For example, to customize your own DistributedDataParallel you could do somethin ApexPlugin ********** -.. autoclass:: pytorch_lightning.plugins.apex.ApexPlugin +.. autoclass:: pytorch_lightning.plugins.legacy.apex.ApexPlugin *************** NativeAMPPlugin *************** -.. autoclass:: pytorch_lightning.plugins.native_amp.NativeAMPPlugin +.. autoclass:: pytorch_lightning.plugins.legacy.native_amp.NativeAMPPlugin ********* DDPPlugin ********* -.. autoclass:: pytorch_lightning.plugins.ddp_plugin.DDPPlugin +.. autoclass:: pytorch_lightning.plugins.legacy.ddp_plugin.DDPPlugin diff --git a/pl_examples/basic_examples/conv_sequential_example.py b/pl_examples/basic_examples/conv_sequential_example.py index 84efb4bea7670..38e077071d59e 100644 --- a/pl_examples/basic_examples/conv_sequential_example.py +++ b/pl_examples/basic_examples/conv_sequential_example.py @@ -32,7 +32,7 @@ from pl_examples import cli_lightning_logo from pytorch_lightning import Trainer from pytorch_lightning.metrics.functional import accuracy -from pytorch_lightning.plugins.ddp_sequential_plugin import DDPSequentialPlugin +from pytorch_lightning.plugins.legacy.ddp_sequential_plugin import DDPSequentialPlugin from pytorch_lightning.utilities import _BOLTS_AVAILABLE, _FAIRSCALE_PIPE_AVAILABLE if _BOLTS_AVAILABLE: diff --git a/pytorch_lightning/accelerators/__init__.py b/pytorch_lightning/accelerators/__init__.py index d8bf7061de11f..a97edb21e504d 100644 --- a/pytorch_lightning/accelerators/__init__.py +++ b/pytorch_lightning/accelerators/__init__.py @@ -11,15 +11,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from pytorch_lightning.accelerators.accelerator import Accelerator # noqa: F401 -from pytorch_lightning.accelerators.cpu_accelerator import CPUAccelerator # noqa: F401 -from pytorch_lightning.accelerators.ddp2_accelerator import DDP2Accelerator # noqa: F401 -from pytorch_lightning.accelerators.ddp_accelerator import DDPAccelerator # noqa: F401 -from pytorch_lightning.accelerators.ddp_cpu_hpc_accelerator import DDPCPUHPCAccelerator # noqa: F401 -from pytorch_lightning.accelerators.ddp_cpu_spawn_accelerator import DDPCPUSpawnAccelerator # noqa: F401 -from pytorch_lightning.accelerators.ddp_hpc_accelerator import DDPHPCAccelerator # noqa: F401 -from pytorch_lightning.accelerators.ddp_spawn_accelerator import DDPSpawnAccelerator # noqa: F401 -from pytorch_lightning.accelerators.dp_accelerator import DataParallelAccelerator # noqa: F401 -from pytorch_lightning.accelerators.gpu_accelerator import GPUAccelerator # noqa: F401 -from pytorch_lightning.accelerators.horovod_accelerator import HorovodAccelerator # noqa: F401 -from pytorch_lightning.accelerators.tpu_accelerator import TPUAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.cpu_accelerator import CPUAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.ddp2_accelerator import DDP2Accelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.ddp_accelerator import DDPAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.ddp_cpu_hpc_accelerator import DDPCPUHPCAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.ddp_cpu_spawn_accelerator import DDPCPUSpawnAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.ddp_hpc_accelerator import DDPHPCAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.ddp_spawn_accelerator import DDPSpawnAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.dp_accelerator import DataParallelAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.gpu_accelerator import GPUAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.horovod_accelerator import HorovodAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.tpu_accelerator import TPUAccelerator # noqa: F401 diff --git a/pytorch_lightning/accelerators/legacy/__init__.py b/pytorch_lightning/accelerators/legacy/__init__.py new file mode 100644 index 0000000000000..a97edb21e504d --- /dev/null +++ b/pytorch_lightning/accelerators/legacy/__init__.py @@ -0,0 +1,25 @@ +# Copyright The PyTorch Lightning team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.cpu_accelerator import CPUAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.ddp2_accelerator import DDP2Accelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.ddp_accelerator import DDPAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.ddp_cpu_hpc_accelerator import DDPCPUHPCAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.ddp_cpu_spawn_accelerator import DDPCPUSpawnAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.ddp_hpc_accelerator import DDPHPCAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.ddp_spawn_accelerator import DDPSpawnAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.dp_accelerator import DataParallelAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.gpu_accelerator import GPUAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.horovod_accelerator import HorovodAccelerator # noqa: F401 +from pytorch_lightning.accelerators.legacy.tpu_accelerator import TPUAccelerator # noqa: F401 diff --git a/pytorch_lightning/accelerators/accelerator.py b/pytorch_lightning/accelerators/legacy/accelerator.py similarity index 98% rename from pytorch_lightning/accelerators/accelerator.py rename to pytorch_lightning/accelerators/legacy/accelerator.py index c30224d160649..0788b26f845be 100644 --- a/pytorch_lightning/accelerators/accelerator.py +++ b/pytorch_lightning/accelerators/legacy/accelerator.py @@ -19,8 +19,8 @@ from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule -from pytorch_lightning.plugins.ddp_plugin import DDPPlugin -from pytorch_lightning.plugins.rpc_plugin import RPCPlugin +from pytorch_lightning.plugins.legacy.ddp_plugin import DDPPlugin +from pytorch_lightning.plugins.legacy.rpc_plugin import RPCPlugin from pytorch_lightning.utilities.apply_func import move_data_to_device from pytorch_lightning.utilities.parsing import AttributeDict diff --git a/pytorch_lightning/accelerators/accelerator_connector.py b/pytorch_lightning/accelerators/legacy/accelerator_connector.py similarity index 99% rename from pytorch_lightning/accelerators/accelerator_connector.py rename to pytorch_lightning/accelerators/legacy/accelerator_connector.py index d9dcc5cbd0a88..07a213450865b 100644 --- a/pytorch_lightning/accelerators/accelerator_connector.py +++ b/pytorch_lightning/accelerators/legacy/accelerator_connector.py @@ -17,7 +17,7 @@ from pytorch_lightning import _logger as log from pytorch_lightning import accelerators -from pytorch_lightning.accelerators.accelerator import Accelerator +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator from pytorch_lightning.cluster_environments.slurm_environment import SLURMEnvironment from pytorch_lightning.cluster_environments.torchelastic_environment import TorchElasticEnvironment from pytorch_lightning.utilities import ( diff --git a/pytorch_lightning/accelerators/cpu_accelerator.py b/pytorch_lightning/accelerators/legacy/cpu_accelerator.py similarity index 97% rename from pytorch_lightning/accelerators/cpu_accelerator.py rename to pytorch_lightning/accelerators/legacy/cpu_accelerator.py index 3c2eac7dbb7ad..f34162c602a55 100644 --- a/pytorch_lightning/accelerators/cpu_accelerator.py +++ b/pytorch_lightning/accelerators/legacy/cpu_accelerator.py @@ -15,7 +15,7 @@ import torch -from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.utilities import AMPType from pytorch_lightning.utilities.exceptions import MisconfigurationException diff --git a/pytorch_lightning/accelerators/ddp2_accelerator.py b/pytorch_lightning/accelerators/legacy/ddp2_accelerator.py similarity index 97% rename from pytorch_lightning/accelerators/ddp2_accelerator.py rename to pytorch_lightning/accelerators/legacy/ddp2_accelerator.py index 0448bf8628d2c..e4712f1270c57 100644 --- a/pytorch_lightning/accelerators/ddp2_accelerator.py +++ b/pytorch_lightning/accelerators/legacy/ddp2_accelerator.py @@ -18,13 +18,13 @@ from torch.nn.parallel import DistributedDataParallel from pytorch_lightning import _logger as log -from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.core.step_result import Result from pytorch_lightning.distributed.dist import LightningDistributed -from pytorch_lightning.plugins.ddp_plugin import DDPPlugin -from pytorch_lightning.plugins.rpc_plugin import RPCPlugin +from pytorch_lightning.plugins.legacy.ddp_plugin import DDPPlugin +from pytorch_lightning.plugins.legacy.rpc_plugin import RPCPlugin from pytorch_lightning.utilities import AMPType from pytorch_lightning.utilities.distributed import all_gather_ddp_if_available, rank_zero_only, sync_ddp_if_available diff --git a/pytorch_lightning/accelerators/ddp_accelerator.py b/pytorch_lightning/accelerators/legacy/ddp_accelerator.py similarity index 98% rename from pytorch_lightning/accelerators/ddp_accelerator.py rename to pytorch_lightning/accelerators/legacy/ddp_accelerator.py index 04599e4c6d32f..0899114b147c1 100644 --- a/pytorch_lightning/accelerators/ddp_accelerator.py +++ b/pytorch_lightning/accelerators/legacy/ddp_accelerator.py @@ -24,12 +24,12 @@ from torch.nn.parallel import DistributedDataParallel from pytorch_lightning import _logger as log -from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.distributed.dist import LightningDistributed -from pytorch_lightning.plugins.ddp_plugin import DDPPlugin -from pytorch_lightning.plugins.rpc_plugin import RPCPlugin +from pytorch_lightning.plugins.legacy.ddp_plugin import DDPPlugin +from pytorch_lightning.plugins.legacy.rpc_plugin import RPCPlugin from pytorch_lightning.utilities import _HYDRA_AVAILABLE, AMPType from pytorch_lightning.utilities.distributed import ( all_gather_ddp_if_available, diff --git a/pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py b/pytorch_lightning/accelerators/legacy/ddp_cpu_hpc_accelerator.py similarity index 90% rename from pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py rename to pytorch_lightning/accelerators/legacy/ddp_cpu_hpc_accelerator.py index 7db8e3defdb21..8ec4d18509cab 100644 --- a/pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py +++ b/pytorch_lightning/accelerators/legacy/ddp_cpu_hpc_accelerator.py @@ -13,9 +13,9 @@ # limitations under the License from typing import Optional -from pytorch_lightning.accelerators.ddp_hpc_accelerator import DDPHPCAccelerator +from pytorch_lightning.accelerators.legacy.ddp_hpc_accelerator import DDPHPCAccelerator from pytorch_lightning.cluster_environments import ClusterEnvironment -from pytorch_lightning.plugins.ddp_plugin import DDPPlugin +from pytorch_lightning.plugins.legacy.ddp_plugin import DDPPlugin class DDPCPUHPCAccelerator(DDPHPCAccelerator): diff --git a/pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py b/pytorch_lightning/accelerators/legacy/ddp_cpu_spawn_accelerator.py similarity index 98% rename from pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py rename to pytorch_lightning/accelerators/legacy/ddp_cpu_spawn_accelerator.py index 2820763a61307..4609ef88c55a4 100644 --- a/pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py +++ b/pytorch_lightning/accelerators/legacy/ddp_cpu_spawn_accelerator.py @@ -20,12 +20,12 @@ from torch.nn.parallel import DistributedDataParallel from pytorch_lightning import _logger as log -from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.distributed.dist import LightningDistributed -from pytorch_lightning.plugins.ddp_plugin import DDPPlugin -from pytorch_lightning.plugins.rpc_plugin import RPCPlugin +from pytorch_lightning.plugins.legacy.ddp_plugin import DDPPlugin +from pytorch_lightning.plugins.legacy.rpc_plugin import RPCPlugin from pytorch_lightning.utilities import AMPType from pytorch_lightning.utilities.distributed import ( all_gather_ddp_if_available, diff --git a/pytorch_lightning/accelerators/ddp_hpc_accelerator.py b/pytorch_lightning/accelerators/legacy/ddp_hpc_accelerator.py similarity index 97% rename from pytorch_lightning/accelerators/ddp_hpc_accelerator.py rename to pytorch_lightning/accelerators/legacy/ddp_hpc_accelerator.py index ad953da6d1b23..f61423583435f 100644 --- a/pytorch_lightning/accelerators/ddp_hpc_accelerator.py +++ b/pytorch_lightning/accelerators/legacy/ddp_hpc_accelerator.py @@ -19,12 +19,12 @@ from torch.nn.parallel import DistributedDataParallel from pytorch_lightning import _logger as log -from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.distributed.dist import LightningDistributed -from pytorch_lightning.plugins.ddp_plugin import DDPPlugin -from pytorch_lightning.plugins.rpc_plugin import RPCPlugin +from pytorch_lightning.plugins.legacy.ddp_plugin import DDPPlugin +from pytorch_lightning.plugins.legacy.rpc_plugin import RPCPlugin from pytorch_lightning.utilities import AMPType from pytorch_lightning.utilities.distributed import all_gather_ddp_if_available, rank_zero_only, sync_ddp_if_available diff --git a/pytorch_lightning/accelerators/ddp_spawn_accelerator.py b/pytorch_lightning/accelerators/legacy/ddp_spawn_accelerator.py similarity index 98% rename from pytorch_lightning/accelerators/ddp_spawn_accelerator.py rename to pytorch_lightning/accelerators/legacy/ddp_spawn_accelerator.py index 2ff5fa0cc01b6..c768db3dd16b4 100644 --- a/pytorch_lightning/accelerators/ddp_spawn_accelerator.py +++ b/pytorch_lightning/accelerators/legacy/ddp_spawn_accelerator.py @@ -21,12 +21,12 @@ from torch.nn.parallel import DistributedDataParallel from pytorch_lightning import _logger as log -from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.distributed import LightningDistributed -from pytorch_lightning.plugins.ddp_plugin import DDPPlugin -from pytorch_lightning.plugins.rpc_plugin import RPCPlugin +from pytorch_lightning.plugins.legacy.ddp_plugin import DDPPlugin +from pytorch_lightning.plugins.legacy.rpc_plugin import RPCPlugin from pytorch_lightning.utilities import AMPType from pytorch_lightning.utilities.cloud_io import atomic_save from pytorch_lightning.utilities.cloud_io import load as pl_load diff --git a/pytorch_lightning/accelerators/dp_accelerator.py b/pytorch_lightning/accelerators/legacy/dp_accelerator.py similarity index 98% rename from pytorch_lightning/accelerators/dp_accelerator.py rename to pytorch_lightning/accelerators/legacy/dp_accelerator.py index 8eb1b199a6b09..ec2cb54531e4c 100644 --- a/pytorch_lightning/accelerators/dp_accelerator.py +++ b/pytorch_lightning/accelerators/legacy/dp_accelerator.py @@ -16,7 +16,7 @@ import torch from torch import optim -from pytorch_lightning.accelerators.accelerator import Accelerator +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.core.step_result import Result diff --git a/pytorch_lightning/accelerators/gpu_accelerator.py b/pytorch_lightning/accelerators/legacy/gpu_accelerator.py similarity index 97% rename from pytorch_lightning/accelerators/gpu_accelerator.py rename to pytorch_lightning/accelerators/legacy/gpu_accelerator.py index 62486f04a5581..e1410a2946c7c 100644 --- a/pytorch_lightning/accelerators/gpu_accelerator.py +++ b/pytorch_lightning/accelerators/legacy/gpu_accelerator.py @@ -15,7 +15,7 @@ import torch -from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.distributed.dist import LightningDistributed from pytorch_lightning.utilities import AMPType diff --git a/pytorch_lightning/accelerators/horovod_accelerator.py b/pytorch_lightning/accelerators/legacy/horovod_accelerator.py similarity index 98% rename from pytorch_lightning/accelerators/horovod_accelerator.py rename to pytorch_lightning/accelerators/legacy/horovod_accelerator.py index 57f39125c62c2..4a15d765b817b 100644 --- a/pytorch_lightning/accelerators/horovod_accelerator.py +++ b/pytorch_lightning/accelerators/legacy/horovod_accelerator.py @@ -17,7 +17,7 @@ import torch from torch.optim.lr_scheduler import _LRScheduler -from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.utilities import _HOROVOD_AVAILABLE, AMPType, DeviceType from pytorch_lightning.utilities.distributed import rank_zero_only diff --git a/pytorch_lightning/accelerators/tpu_accelerator.py b/pytorch_lightning/accelerators/legacy/tpu_accelerator.py similarity index 99% rename from pytorch_lightning/accelerators/tpu_accelerator.py rename to pytorch_lightning/accelerators/legacy/tpu_accelerator.py index 4b626a67e0533..0f4014df04a8a 100644 --- a/pytorch_lightning/accelerators/tpu_accelerator.py +++ b/pytorch_lightning/accelerators/legacy/tpu_accelerator.py @@ -21,7 +21,7 @@ from torch.optim import Optimizer from pytorch_lightning import _logger as log -from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core import LightningModule from pytorch_lightning.utilities import ( diff --git a/pytorch_lightning/cluster_environments/cluster_environment.py b/pytorch_lightning/cluster_environments/cluster_environment.py index 5196e44411082..2139f5bac0020 100644 --- a/pytorch_lightning/cluster_environments/cluster_environment.py +++ b/pytorch_lightning/cluster_environments/cluster_environment.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from pytorch_lightning.plugins.plugin import LightningPlugin +from pytorch_lightning.plugins.legacy.plugin import LightningPlugin class ClusterEnvironment(LightningPlugin): diff --git a/pytorch_lightning/plugins/legacy/__init__.py b/pytorch_lightning/plugins/legacy/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/pytorch_lightning/plugins/apex.py b/pytorch_lightning/plugins/legacy/apex.py similarity index 98% rename from pytorch_lightning/plugins/apex.py rename to pytorch_lightning/plugins/legacy/apex.py index 6b26a8b3a8232..49a9c57fd5927 100644 --- a/pytorch_lightning/plugins/apex.py +++ b/pytorch_lightning/plugins/legacy/apex.py @@ -17,7 +17,7 @@ from torch.optim.optimizer import Optimizer from pytorch_lightning.core.lightning import LightningModule -from pytorch_lightning.plugins.precision_plugin import PrecisionPlugin +from pytorch_lightning.plugins.legacy.precision_plugin import PrecisionPlugin from pytorch_lightning.utilities import _APEX_AVAILABLE, AMPType from pytorch_lightning.utilities.distributed import rank_zero_warn diff --git a/pytorch_lightning/plugins/ddp_plugin.py b/pytorch_lightning/plugins/legacy/ddp_plugin.py similarity index 99% rename from pytorch_lightning/plugins/ddp_plugin.py rename to pytorch_lightning/plugins/legacy/ddp_plugin.py index ad9fb1cc3b58f..8da0c34dedfdf 100644 --- a/pytorch_lightning/plugins/ddp_plugin.py +++ b/pytorch_lightning/plugins/legacy/ddp_plugin.py @@ -22,7 +22,7 @@ from pytorch_lightning import _logger as log from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.overrides.data_parallel import LightningDistributedModule, prepare_for_backward -from pytorch_lightning.plugins.plugin import LightningPlugin +from pytorch_lightning.plugins.legacy.plugin import LightningPlugin from pytorch_lightning.utilities import DeviceType diff --git a/pytorch_lightning/plugins/ddp_sequential_plugin.py b/pytorch_lightning/plugins/legacy/ddp_sequential_plugin.py similarity index 99% rename from pytorch_lightning/plugins/ddp_sequential_plugin.py rename to pytorch_lightning/plugins/legacy/ddp_sequential_plugin.py index f8dcecd1e546d..b9d71b4ab85b0 100644 --- a/pytorch_lightning/plugins/ddp_sequential_plugin.py +++ b/pytorch_lightning/plugins/legacy/ddp_sequential_plugin.py @@ -21,7 +21,7 @@ from pytorch_lightning import _logger as log from pytorch_lightning import LightningModule -from pytorch_lightning.plugins.rpc_plugin import RPCPlugin +from pytorch_lightning.plugins.legacy.rpc_plugin import RPCPlugin from pytorch_lightning.utilities import _FAIRSCALE_PIPE_AVAILABLE, rank_zero_only from pytorch_lightning.utilities.exceptions import MisconfigurationException diff --git a/pytorch_lightning/plugins/native_amp.py b/pytorch_lightning/plugins/legacy/native_amp.py similarity index 97% rename from pytorch_lightning/plugins/native_amp.py rename to pytorch_lightning/plugins/legacy/native_amp.py index 9df1ba3262afa..0a38a90acb79f 100644 --- a/pytorch_lightning/plugins/native_amp.py +++ b/pytorch_lightning/plugins/legacy/native_amp.py @@ -17,7 +17,7 @@ from torch.optim import Optimizer from pytorch_lightning.core.optimizer import LightningOptimizer -from pytorch_lightning.plugins.precision_plugin import PrecisionPlugin +from pytorch_lightning.plugins.legacy.precision_plugin import PrecisionPlugin class NativeAMPPlugin(PrecisionPlugin): diff --git a/pytorch_lightning/plugins/plugin.py b/pytorch_lightning/plugins/legacy/plugin.py similarity index 100% rename from pytorch_lightning/plugins/plugin.py rename to pytorch_lightning/plugins/legacy/plugin.py diff --git a/pytorch_lightning/plugins/plugin_connector.py b/pytorch_lightning/plugins/legacy/plugin_connector.py similarity index 95% rename from pytorch_lightning/plugins/plugin_connector.py rename to pytorch_lightning/plugins/legacy/plugin_connector.py index 2dbb657199704..90822e2286722 100644 --- a/pytorch_lightning/plugins/plugin_connector.py +++ b/pytorch_lightning/plugins/legacy/plugin_connector.py @@ -15,11 +15,11 @@ from typing import List, Optional, Union from pytorch_lightning.cluster_environments import ClusterEnvironment -from pytorch_lightning.plugins.apex import ApexPlugin -from pytorch_lightning.plugins.ddp_plugin import DDPPlugin -from pytorch_lightning.plugins.native_amp import NativeAMPPlugin -from pytorch_lightning.plugins.plugin import LightningPlugin -from pytorch_lightning.plugins.sharded_plugin import DDPShardedPlugin +from pytorch_lightning.plugins.legacy.apex import ApexPlugin +from pytorch_lightning.plugins.legacy.ddp_plugin import DDPPlugin +from pytorch_lightning.plugins.legacy.native_amp import NativeAMPPlugin +from pytorch_lightning.plugins.legacy.plugin import LightningPlugin +from pytorch_lightning.plugins.legacy.sharded_plugin import DDPShardedPlugin from pytorch_lightning.utilities import AMPType, rank_zero_warn from pytorch_lightning.utilities.exceptions import MisconfigurationException diff --git a/pytorch_lightning/plugins/precision_plugin.py b/pytorch_lightning/plugins/legacy/precision_plugin.py similarity index 95% rename from pytorch_lightning/plugins/precision_plugin.py rename to pytorch_lightning/plugins/legacy/precision_plugin.py index aaac3ede3c623..1041e9d6b0faf 100644 --- a/pytorch_lightning/plugins/precision_plugin.py +++ b/pytorch_lightning/plugins/legacy/precision_plugin.py @@ -15,7 +15,7 @@ from torch.optim import Optimizer -from pytorch_lightning.plugins.plugin import LightningPlugin +from pytorch_lightning.plugins.legacy.plugin import LightningPlugin class PrecisionPlugin(LightningPlugin): diff --git a/pytorch_lightning/plugins/rpc_plugin.py b/pytorch_lightning/plugins/legacy/rpc_plugin.py similarity index 98% rename from pytorch_lightning/plugins/rpc_plugin.py rename to pytorch_lightning/plugins/legacy/rpc_plugin.py index bbcc1f16e4b1d..b731634d44bed 100644 --- a/pytorch_lightning/plugins/rpc_plugin.py +++ b/pytorch_lightning/plugins/legacy/rpc_plugin.py @@ -18,7 +18,7 @@ import torch from pytorch_lightning.core.lightning import LightningModule -from pytorch_lightning.plugins.ddp_plugin import DDPPlugin +from pytorch_lightning.plugins.legacy.ddp_plugin import DDPPlugin from pytorch_lightning.utilities import _RPC_AVAILABLE DEFAULT_RPC_TIMEOUT_SEC = 60. diff --git a/pytorch_lightning/plugins/sharded_native_amp_plugin.py b/pytorch_lightning/plugins/legacy/sharded_native_amp_plugin.py similarity index 94% rename from pytorch_lightning/plugins/sharded_native_amp_plugin.py rename to pytorch_lightning/plugins/legacy/sharded_native_amp_plugin.py index 5ddd29521203d..f507c8c3bd6c0 100644 --- a/pytorch_lightning/plugins/sharded_native_amp_plugin.py +++ b/pytorch_lightning/plugins/legacy/sharded_native_amp_plugin.py @@ -15,7 +15,7 @@ from torch.optim import Optimizer -from pytorch_lightning.plugins.native_amp import NativeAMPPlugin +from pytorch_lightning.plugins.legacy.native_amp import NativeAMPPlugin from pytorch_lightning.utilities import _FAIRSCALE_AVAILABLE, _NATIVE_AMP_AVAILABLE if _NATIVE_AMP_AVAILABLE and _FAIRSCALE_AVAILABLE: diff --git a/pytorch_lightning/plugins/sharded_plugin.py b/pytorch_lightning/plugins/legacy/sharded_plugin.py similarity index 95% rename from pytorch_lightning/plugins/sharded_plugin.py rename to pytorch_lightning/plugins/legacy/sharded_plugin.py index 53439ebc2a3df..a30f0c891514c 100644 --- a/pytorch_lightning/plugins/sharded_plugin.py +++ b/pytorch_lightning/plugins/legacy/sharded_plugin.py @@ -15,8 +15,8 @@ from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.core.optimizer import is_lightning_optimizer -from pytorch_lightning.plugins.ddp_plugin import DDPPlugin -from pytorch_lightning.plugins.sharded_native_amp_plugin import ShardedNativeAMPPlugin +from pytorch_lightning.plugins.legacy.ddp_plugin import DDPPlugin +from pytorch_lightning.plugins.legacy.sharded_native_amp_plugin import ShardedNativeAMPPlugin from pytorch_lightning.utilities import _FAIRSCALE_AVAILABLE, AMPType, rank_zero_only from pytorch_lightning.utilities.exceptions import MisconfigurationException diff --git a/pytorch_lightning/trainer/connectors/precision_connector.py b/pytorch_lightning/trainer/connectors/precision_connector.py index 4633e328cb3fa..551e855cdd53f 100644 --- a/pytorch_lightning/trainer/connectors/precision_connector.py +++ b/pytorch_lightning/trainer/connectors/precision_connector.py @@ -13,8 +13,8 @@ # limitations under the License. from pytorch_lightning import _logger as log -from pytorch_lightning.plugins.apex import ApexPlugin -from pytorch_lightning.plugins.native_amp import NativeAMPPlugin +from pytorch_lightning.plugins.legacy.apex import ApexPlugin +from pytorch_lightning.plugins.legacy.native_amp import NativeAMPPlugin from pytorch_lightning.utilities import _APEX_AVAILABLE, _NATIVE_AMP_AVAILABLE, AMPType, rank_zero_warn diff --git a/pytorch_lightning/trainer/data_loading.py b/pytorch_lightning/trainer/data_loading.py index 38198c9f39e10..5031357b41615 100644 --- a/pytorch_lightning/trainer/data_loading.py +++ b/pytorch_lightning/trainer/data_loading.py @@ -21,7 +21,7 @@ from torch.utils.data import DataLoader, RandomSampler, SequentialSampler from torch.utils.data.distributed import DistributedSampler -from pytorch_lightning.accelerators.accelerator import Accelerator +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator from pytorch_lightning.core import LightningModule from pytorch_lightning.trainer.supporters import CombinedLoader from pytorch_lightning.utilities import rank_zero_warn diff --git a/pytorch_lightning/trainer/properties.py b/pytorch_lightning/trainer/properties.py index eb8e47ce93195..760a621db6914 100644 --- a/pytorch_lightning/trainer/properties.py +++ b/pytorch_lightning/trainer/properties.py @@ -17,7 +17,7 @@ from argparse import ArgumentParser, Namespace from typing import cast, List, Optional, Type, TypeVar, Union -from pytorch_lightning.accelerators.accelerator import Accelerator +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator from pytorch_lightning.callbacks import Callback, EarlyStopping, ModelCheckpoint, ProgressBarBase from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.loggers.base import LightningLoggerBase diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index f1499edb10db5..b97377a150e53 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -23,14 +23,14 @@ from torch.utils.data import DataLoader from pytorch_lightning import _logger as log -from pytorch_lightning.accelerators.accelerator import Accelerator -from pytorch_lightning.accelerators.accelerator_connector import AcceleratorConnector +from pytorch_lightning.accelerators.legacy.accelerator import Accelerator +from pytorch_lightning.accelerators.legacy.accelerator_connector import AcceleratorConnector from pytorch_lightning.callbacks import Callback from pytorch_lightning.core.datamodule import LightningDataModule from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.core.step_result import Result from pytorch_lightning.loggers import LightningLoggerBase -from pytorch_lightning.plugins.plugin_connector import PluginConnector +from pytorch_lightning.plugins.legacy.plugin_connector import PluginConnector from pytorch_lightning.profiler import BaseProfiler from pytorch_lightning.trainer.callback_hook import TrainerCallbackHookMixin from pytorch_lightning.trainer.configuration_validator import ConfigValidator diff --git a/setup.cfg b/setup.cfg index a1f25a8cb6610..c13a2c26982fd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -112,7 +112,7 @@ ignore_errors = True ignore_errors = True # todo: add proper typing to this module... -[mypy-pytorch_lightning.accelerators.*] +[mypy-pytorch_lightning.accelerators.legacy.*] ignore_errors = True # todo: add proper typing to this module... @@ -123,10 +123,6 @@ ignore_errors = True [mypy-pytorch_lightning.loggers.*] ignore_errors = True -# todo: add proper typing to this module... -[mypy-pytorch_lightning.logging.*] -ignore_errors = True - # todo: add proper typing to this module... [mypy-pytorch_lightning.metrics.*] ignore_errors = True @@ -144,7 +140,7 @@ ignore_errors = True ignore_errors = True # todo: add proper typing to this module... -[mypy-pytorch_lightning.plugins.*] +[mypy-pytorch_lightning.plugins.legacy.*] ignore_errors = True # todo: add proper typing to this module... diff --git a/tests/accelerators/__init__.py b/tests/accelerators/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/tests/backends/__init__.py b/tests/accelerators/legacy/__init__.py similarity index 76% rename from tests/backends/__init__.py rename to tests/accelerators/legacy/__init__.py index 2f23544ab7c9e..e165d9d4dbbcf 100644 --- a/tests/backends/__init__.py +++ b/tests/accelerators/legacy/__init__.py @@ -1,3 +1,5 @@ +# todo: feel free to move any of these "legacy" tests up... + try: from dtrun.launcher import DDPLauncher except ImportError: diff --git a/tests/backends/ddp_model.py b/tests/accelerators/legacy/ddp_model.py similarity index 100% rename from tests/backends/ddp_model.py rename to tests/accelerators/legacy/ddp_model.py diff --git a/tests/backends/test_accelerator_connector.py b/tests/accelerators/legacy/test_accelerator_connector.py similarity index 100% rename from tests/backends/test_accelerator_connector.py rename to tests/accelerators/legacy/test_accelerator_connector.py diff --git a/tests/backends/test_ddp.py b/tests/accelerators/legacy/test_ddp.py similarity index 98% rename from tests/backends/test_ddp.py rename to tests/accelerators/legacy/test_ddp.py index e8596572b87cf..252489bb48276 100644 --- a/tests/backends/test_ddp.py +++ b/tests/accelerators/legacy/test_ddp.py @@ -16,7 +16,7 @@ import pytest import torch -from tests.backends import ddp_model, DDPLauncher +from tests.accelerators.legacy import ddp_model, DDPLauncher from tests.utilities.distributed import call_training_script diff --git a/tests/backends/test_ddp_spawn.py b/tests/accelerators/legacy/test_ddp_spawn.py similarity index 100% rename from tests/backends/test_ddp_spawn.py rename to tests/accelerators/legacy/test_ddp_spawn.py diff --git a/tests/backends/test_dp.py b/tests/accelerators/legacy/test_dp.py similarity index 100% rename from tests/backends/test_dp.py rename to tests/accelerators/legacy/test_dp.py diff --git a/tests/backends/test_tpu_backend.py b/tests/accelerators/legacy/test_tpu_backend.py similarity index 100% rename from tests/backends/test_tpu_backend.py rename to tests/accelerators/legacy/test_tpu_backend.py diff --git a/tests/core/test_datamodules.py b/tests/core/test_datamodules.py index dd7f7e8614f6f..4a0f0499b20e8 100644 --- a/tests/core/test_datamodules.py +++ b/tests/core/test_datamodules.py @@ -20,7 +20,7 @@ import torch from pytorch_lightning import LightningDataModule, Trainer -from pytorch_lightning.accelerators.gpu_accelerator import GPUAccelerator +from pytorch_lightning.accelerators.legacy.gpu_accelerator import GPUAccelerator from pytorch_lightning.callbacks import ModelCheckpoint from pytorch_lightning.trainer.states import TrainerState from tests.base import BoringDataModule, BoringModel diff --git a/tests/deprecated_api/test_remove_1-4.py b/tests/deprecated_api/test_remove_1-4.py index 00f02076fccef..fc3b201d88a74 100644 --- a/tests/deprecated_api/test_remove_1-4.py +++ b/tests/deprecated_api/test_remove_1-4.py @@ -19,7 +19,7 @@ from pytorch_lightning import Trainer from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel -from pytorch_lightning.plugins.ddp_plugin import DDPPlugin +from pytorch_lightning.plugins.legacy.ddp_plugin import DDPPlugin from tests.base import BoringModel from tests.deprecated_api import _soft_unimport_module diff --git a/tests/models/test_gpu.py b/tests/models/test_gpu.py index 7cfeb8f0ae53e..dc8f3f1e4d50d 100644 --- a/tests/models/test_gpu.py +++ b/tests/models/test_gpu.py @@ -21,7 +21,7 @@ import tests.base.develop_pipelines as tpipes import tests.base.develop_utils as tutils from pytorch_lightning import Trainer -from pytorch_lightning.accelerators.gpu_accelerator import GPUAccelerator +from pytorch_lightning.accelerators.legacy.gpu_accelerator import GPUAccelerator from pytorch_lightning.utilities import device_parser from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.base import BoringModel diff --git a/tests/models/test_hooks.py b/tests/models/test_hooks.py index a25a8181e763a..f45d3f423164d 100644 --- a/tests/models/test_hooks.py +++ b/tests/models/test_hooks.py @@ -19,7 +19,7 @@ import torch from pytorch_lightning import Trainer -from pytorch_lightning.accelerators.gpu_accelerator import GPUAccelerator +from pytorch_lightning.accelerators.legacy.gpu_accelerator import GPUAccelerator from pytorch_lightning.trainer.states import TrainerState from tests.base import BoringModel, EvalModelTemplate, RandomDataset diff --git a/tests/models/test_horovod.py b/tests/models/test_horovod.py index 752ce4b60d42f..85e91c4ae9d84 100644 --- a/tests/models/test_horovod.py +++ b/tests/models/test_horovod.py @@ -26,7 +26,7 @@ import tests.base.develop_pipelines as tpipes import tests.base.develop_utils as tutils from pytorch_lightning import Trainer -from pytorch_lightning.accelerators.horovod_accelerator import HorovodAccelerator +from pytorch_lightning.accelerators.legacy.horovod_accelerator import HorovodAccelerator from pytorch_lightning.metrics.classification.accuracy import Accuracy from pytorch_lightning.trainer.states import TrainerState from pytorch_lightning.utilities import _APEX_AVAILABLE, _HOROVOD_AVAILABLE, _NATIVE_AMP_AVAILABLE @@ -340,7 +340,7 @@ def _compute_batch(): # num_workers = 8 # init_lr = hparams.get('learning_rate') * num_workers # -# with patch('pytorch_lightning.accelerators.horovod_backend.hvd.size') as mock_hvd_size: +# with patch('pytorch_lightning.accelerators.legacy.horovod_backend.hvd.size') as mock_hvd_size: # mock_hvd_size.return_value = 8 # # # fit model diff --git a/tests/models/test_sync_batchnorm.py b/tests/models/test_sync_batchnorm.py index fe00acff62624..444067d82bd9e 100644 --- a/tests/models/test_sync_batchnorm.py +++ b/tests/models/test_sync_batchnorm.py @@ -17,7 +17,7 @@ import torch.nn.functional as F from pytorch_lightning import LightningModule, seed_everything, Trainer -from pytorch_lightning.plugins.ddp_plugin import DDPPlugin +from pytorch_lightning.plugins.legacy.ddp_plugin import DDPPlugin from pytorch_lightning.trainer.states import TrainerState from pytorch_lightning.utilities import FLOAT16_EPSILON from tests.base.datamodules import MNISTDataModule diff --git a/tests/plugins/legacy/__init__.py b/tests/plugins/legacy/__init__.py new file mode 100644 index 0000000000000..b1fca65e60042 --- /dev/null +++ b/tests/plugins/legacy/__init__.py @@ -0,0 +1 @@ +# todo: feel free to move any of these "legacy" tests up... diff --git a/tests/plugins/test_amp_plugin.py b/tests/plugins/legacy/test_amp_plugin.py similarity index 98% rename from tests/plugins/test_amp_plugin.py rename to tests/plugins/legacy/test_amp_plugin.py index 1e98740f99d62..48833e292564a 100644 --- a/tests/plugins/test_amp_plugin.py +++ b/tests/plugins/legacy/test_amp_plugin.py @@ -6,7 +6,7 @@ from pytorch_lightning import Trainer from pytorch_lightning.callbacks import Callback -from pytorch_lightning.plugins.native_amp import NativeAMPPlugin +from pytorch_lightning.plugins.legacy.native_amp import NativeAMPPlugin from pytorch_lightning.utilities import _NATIVE_AMP_AVAILABLE from tests.base.boring_model import BoringModel diff --git a/tests/plugins/test_apex_plugin.py b/tests/plugins/legacy/test_apex_plugin.py similarity index 97% rename from tests/plugins/test_apex_plugin.py rename to tests/plugins/legacy/test_apex_plugin.py index df6d76547bcf6..1f452933ec6a0 100644 --- a/tests/plugins/test_apex_plugin.py +++ b/tests/plugins/legacy/test_apex_plugin.py @@ -5,7 +5,7 @@ from pytorch_lightning import Trainer from pytorch_lightning.callbacks import Callback -from pytorch_lightning.plugins.apex import ApexPlugin +from pytorch_lightning.plugins.legacy.apex import ApexPlugin from pytorch_lightning.utilities import _APEX_AVAILABLE from tests.base.boring_model import BoringModel diff --git a/tests/plugins/test_ddp_plugin.py b/tests/plugins/legacy/test_ddp_plugin.py similarity index 97% rename from tests/plugins/test_ddp_plugin.py rename to tests/plugins/legacy/test_ddp_plugin.py index fe8fc555ba06c..4bdaad74b67ab 100644 --- a/tests/plugins/test_ddp_plugin.py +++ b/tests/plugins/legacy/test_ddp_plugin.py @@ -6,8 +6,8 @@ from pytorch_lightning import Trainer from pytorch_lightning.callbacks import Callback -from pytorch_lightning.plugins.ddp_plugin import DDPPlugin -from pytorch_lightning.plugins.sharded_plugin import DDPShardedPlugin +from pytorch_lightning.plugins.legacy.ddp_plugin import DDPPlugin +from pytorch_lightning.plugins.legacy.sharded_plugin import DDPShardedPlugin from pytorch_lightning.utilities import _FAIRSCALE_AVAILABLE from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.base.boring_model import BoringModel diff --git a/tests/plugins/test_ddp_sequential_plugin.py b/tests/plugins/legacy/test_ddp_sequential_plugin.py similarity index 98% rename from tests/plugins/test_ddp_sequential_plugin.py rename to tests/plugins/legacy/test_ddp_sequential_plugin.py index 460d195f6723b..ddb1bd6768e29 100644 --- a/tests/plugins/test_ddp_sequential_plugin.py +++ b/tests/plugins/legacy/test_ddp_sequential_plugin.py @@ -20,7 +20,7 @@ from torch import nn from pytorch_lightning import LightningModule, Trainer -from pytorch_lightning.plugins.ddp_sequential_plugin import DDPSequentialPlugin +from pytorch_lightning.plugins.legacy.ddp_sequential_plugin import DDPSequentialPlugin from pytorch_lightning.utilities import _FAIRSCALE_PIPE_AVAILABLE from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.base.boring_model import RandomDataset diff --git a/tests/plugins/test_plugin.py b/tests/plugins/legacy/test_plugin.py similarity index 97% rename from tests/plugins/test_plugin.py rename to tests/plugins/legacy/test_plugin.py index 05789596879b4..4b01b4402611d 100644 --- a/tests/plugins/test_plugin.py +++ b/tests/plugins/legacy/test_plugin.py @@ -17,8 +17,8 @@ import pytest from pytorch_lightning import Callback, Trainer -from pytorch_lightning.plugins.ddp_plugin import DDPPlugin -from pytorch_lightning.plugins.native_amp import NativeAMPPlugin +from pytorch_lightning.plugins.legacy.ddp_plugin import DDPPlugin +from pytorch_lightning.plugins.legacy.native_amp import NativeAMPPlugin from pytorch_lightning.utilities import AMPType from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.base.boring_model import BoringModel diff --git a/tests/plugins/test_plugin_properties.py b/tests/plugins/legacy/test_plugin_properties.py similarity index 91% rename from tests/plugins/test_plugin_properties.py rename to tests/plugins/legacy/test_plugin_properties.py index 5466bd07cd03a..1a6556c0f76ff 100644 --- a/tests/plugins/test_plugin_properties.py +++ b/tests/plugins/legacy/test_plugin_properties.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from pytorch_lightning import Trainer -from pytorch_lightning.plugins.plugin_connector import LightningCustomPlugins, PluginConnector +from pytorch_lightning.plugins.legacy.plugin_connector import LightningCustomPlugins, PluginConnector def test_available_plugins_trainer(): diff --git a/tests/plugins/test_rpc_plugin.py b/tests/plugins/legacy/test_rpc_plugin.py similarity index 98% rename from tests/plugins/test_rpc_plugin.py rename to tests/plugins/legacy/test_rpc_plugin.py index a28cd4b50e4f4..77937c16058dc 100644 --- a/tests/plugins/test_rpc_plugin.py +++ b/tests/plugins/legacy/test_rpc_plugin.py @@ -7,7 +7,7 @@ from pytorch_lightning import LightningModule, Trainer from pytorch_lightning.callbacks import Callback -from pytorch_lightning.plugins.rpc_plugin import RPCPlugin +from pytorch_lightning.plugins.legacy.rpc_plugin import RPCPlugin from pytorch_lightning.utilities import _RPC_AVAILABLE from tests.base.boring_model import BoringModel diff --git a/tests/plugins/test_sharded_plugin.py b/tests/plugins/legacy/test_sharded_plugin.py similarity index 98% rename from tests/plugins/test_sharded_plugin.py rename to tests/plugins/legacy/test_sharded_plugin.py index 80226bc8ef941..834aa059be3a6 100644 --- a/tests/plugins/test_sharded_plugin.py +++ b/tests/plugins/legacy/test_sharded_plugin.py @@ -7,8 +7,8 @@ from pytorch_lightning import Trainer from pytorch_lightning.callbacks import Callback -from pytorch_lightning.plugins.sharded_native_amp_plugin import ShardedNativeAMPPlugin -from pytorch_lightning.plugins.sharded_plugin import _FAIRSCALE_AVAILABLE, DDPShardedPlugin +from pytorch_lightning.plugins.legacy.sharded_native_amp_plugin import ShardedNativeAMPPlugin +from pytorch_lightning.plugins.legacy.sharded_plugin import _FAIRSCALE_AVAILABLE, DDPShardedPlugin from pytorch_lightning.utilities import _APEX_AVAILABLE, _NATIVE_AMP_AVAILABLE from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.base.boring_model import BoringModel diff --git a/tests/special_tests.sh b/tests/special_tests.sh index 70dd10ce3d60e..f40366a4cd83e 100644 --- a/tests/special_tests.sh +++ b/tests/special_tests.sh @@ -16,10 +16,10 @@ set -e export PL_RUNNING_SPECIAL_TESTS=1 DEFAULTS="-m coverage run --source pytorch_lightning -a -m pytest --verbose --capture=no" python ${DEFAULTS} tests/trainer/optimization/test_manual_optimization.py::test_step_with_optimizer_closure_with_different_frequencies_ddp -python ${DEFAULTS} tests/plugins/test_rpc_plugin.py::test_rpc_function_calls_ddp -python ${DEFAULTS} tests/plugins/test_ddp_sequential_plugin.py::test_ddp_sequential_plugin_ddp_rpc_manual -python ${DEFAULTS} tests/plugins/test_ddp_sequential_plugin.py::test_ddp_sequential_plugin_ddp_rpc_manual_amp -python ${DEFAULTS} tests/plugins/test_ddp_sequential_plugin.py::test_ddp_sequential_plugin_ddp_rpc_automatic +python ${DEFAULTS} tests/plugins/legacy/test_rpc_plugin.py::test_rpc_function_calls_ddp +python ${DEFAULTS} tests/plugins/legacy/test_ddp_sequential_plugin.py::test_ddp_sequential_plugin_ddp_rpc_manual +python ${DEFAULTS} tests/plugins/legacy/test_ddp_sequential_plugin.py::test_ddp_sequential_plugin_ddp_rpc_manual_amp +python ${DEFAULTS} tests/plugins/legacy/test_ddp_sequential_plugin.py::test_ddp_sequential_plugin_ddp_rpc_automatic python ${DEFAULTS} tests/utilities/test_all_gather_grad.py::test_all_gather_collection # python ${DEFAULTS} tests/plugins/test_ddp_sequential_plugin.py::test_ddp_sequential_plugin_ddp_rpc_with_wrong_balance python ${DEFAULTS} tests/trainer/logging_/test_train_loop_logging_1_0.py::test_logging_sync_dist_true_ddp diff --git a/tests/trainer/properties/test_get_model.py b/tests/trainer/properties/test_get_model.py index 16434f390b90a..170baa6d0fd67 100644 --- a/tests/trainer/properties/test_get_model.py +++ b/tests/trainer/properties/test_get_model.py @@ -17,7 +17,7 @@ import torch from pytorch_lightning import Trainer -from tests.backends import DDPLauncher +from tests.accelerators.legacy import DDPLauncher from tests.base.boring_model import BoringModel