From dc71f0b59d8a7ceec6ca4f1612e648bc63249fbc Mon Sep 17 00:00:00 2001 From: rohitgr7 Date: Tue, 26 May 2020 00:19:23 +0530 Subject: [PATCH] Remove unused param tpu_core_idx --- pytorch_lightning/trainer/distrib_parts.py | 2 +- pytorch_lightning/trainer/trainer.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/trainer/distrib_parts.py b/pytorch_lightning/trainer/distrib_parts.py index dabe72f27d823..4ff8b7485916c 100644 --- a/pytorch_lightning/trainer/distrib_parts.py +++ b/pytorch_lightning/trainer/distrib_parts.py @@ -501,7 +501,7 @@ def single_gpu_train(self, model): self.run_pretrain_routine(model) - def tpu_train(self, tpu_core_idx, model): + def tpu_train(self, model): # put model on tpu self._device = xm.xla_device(self.tpu_id) if self.tpu_id is not None else xm.xla_device() model.to(self._device) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 25ecd5435987e..741759e876953 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -885,7 +885,7 @@ def fit( # train if self.tpu_id is not None: - self.tpu_train(self.tpu_id, model) + self.tpu_train(model) else: xmp.spawn(self.tpu_train, args=(model,), nprocs=self.tpu_cores, start_method=start_method)