diff --git a/pytorch_lightning/accelerators/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py similarity index 99% rename from pytorch_lightning/accelerators/accelerator_connector.py rename to pytorch_lightning/trainer/connectors/accelerator_connector.py index 644b382b6bba2..2c4eafb6ed0e8 100644 --- a/pytorch_lightning/accelerators/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -60,7 +60,7 @@ import horovod.torch as hvd -class BackendConnector(object): +class AcceleratorConnector(object): def __init__( self, diff --git a/pytorch_lightning/trainer/deprecated_api.py b/pytorch_lightning/trainer/deprecated_api.py index a6aeeb7d73f78..71b557bf75a2c 100644 --- a/pytorch_lightning/trainer/deprecated_api.py +++ b/pytorch_lightning/trainer/deprecated_api.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from pytorch_lightning.accelerators.accelerator_connector import BackendConnector +from pytorch_lightning.trainer.connectors.accelerator_connector import AcceleratorConnector from pytorch_lightning.trainer.states import RunningStage from pytorch_lightning.utilities import DeviceType, DistributedType, rank_zero_warn @@ -22,7 +22,7 @@ class DeprecatedDistDeviceAttributes: _device_type: DeviceType _running_stage: RunningStage num_gpus: int - accelerator_connector: BackendConnector + accelerator_connector: AcceleratorConnector @property def on_cpu(self) -> bool: diff --git a/pytorch_lightning/trainer/properties.py b/pytorch_lightning/trainer/properties.py index 1f0cc52870f7e..feb0b3f4d8fb3 100644 --- a/pytorch_lightning/trainer/properties.py +++ b/pytorch_lightning/trainer/properties.py @@ -21,7 +21,7 @@ from torch.optim import Optimizer from pytorch_lightning.accelerators import Accelerator -from pytorch_lightning.accelerators.accelerator_connector import BackendConnector +from pytorch_lightning.trainer.connectors.accelerator_connector import AcceleratorConnector from pytorch_lightning.callbacks import EarlyStopping, ModelCheckpoint, ProgressBarBase from pytorch_lightning.callbacks.base import Callback from pytorch_lightning.core.lightning import LightningModule @@ -51,7 +51,7 @@ class TrainerProperties(ABC): _state: TrainerState _weights_save_path: str - accelerator_connector: BackendConnector + accelerator_connector: AcceleratorConnector callbacks: List[Callback] checkpoint_connector: CheckpointConnector limit_val_batches: int diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 2453a08ba9067..b6d9769f883ac 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -22,7 +22,7 @@ from pytorch_lightning import _logger as log from pytorch_lightning.accelerators import Accelerator -from pytorch_lightning.accelerators.accelerator_connector import BackendConnector +from pytorch_lightning.trainer.connectors.accelerator_connector import AcceleratorConnector from pytorch_lightning.callbacks import Callback from pytorch_lightning.core.datamodule import LightningDataModule from pytorch_lightning.core.lightning import LightningModule @@ -308,7 +308,7 @@ def __init__( self.data_connector = DataConnector(self) self.optimizer_connector = OptimizerConnector(self) - self.accelerator_connector = BackendConnector( + self.accelerator_connector = AcceleratorConnector( num_processes, tpu_cores, distributed_backend, auto_select_gpus, gpus, num_nodes, sync_batchnorm, benchmark, replace_sampler_ddp, deterministic, precision, amp_backend, amp_level, plugins )