File tree Expand file tree Collapse file tree 2 files changed +4
-4
lines changed
src/pytorch_lightning/strategies Expand file tree Collapse file tree 2 files changed +4
-4
lines changed Original file line number Diff line number Diff line change @@ -74,7 +74,6 @@ module = [
7474 " pytorch_lightning.strategies.parallel" ,
7575 " pytorch_lightning.strategies.sharded" ,
7676 " pytorch_lightning.strategies.sharded_spawn" ,
77- " pytorch_lightning.strategies.single_device" ,
7877 " pytorch_lightning.strategies.single_tpu" ,
7978 " pytorch_lightning.strategies.tpu_spawn" ,
8079 " pytorch_lightning.strategies.strategy" ,
Original file line number Diff line number Diff line change 2121import pytorch_lightning as pl
2222from pytorch_lightning .plugins .io .checkpoint_plugin import CheckpointIO
2323from pytorch_lightning .plugins .precision import PrecisionPlugin
24- from pytorch_lightning .strategies .strategy import Strategy
24+ from pytorch_lightning .strategies .strategy import Strategy , TBroadcast
2525from pytorch_lightning .utilities .types import _DEVICE
2626
2727
@@ -66,6 +66,7 @@ def root_device(self) -> torch.device:
6666 return self ._root_device
6767
6868 def model_to_device (self ) -> None :
69+ assert self .model is not None , "self.model must be set before self.model.to()"
6970 self .model .to (self .root_device )
7071
7172 def setup (self , trainer : pl .Trainer ) -> None :
@@ -76,10 +77,10 @@ def setup(self, trainer: pl.Trainer) -> None:
7677 def is_global_zero (self ) -> bool :
7778 return True
7879
79- def barrier (self , * args , ** kwargs ) -> None :
80+ def barrier (self , * args : Any , ** kwargs : Any ) -> None :
8081 pass
8182
82- def broadcast (self , obj : object , src : int = 0 ) -> object :
83+ def broadcast (self , obj : TBroadcast , src : int = 0 ) -> TBroadcast :
8384 return obj
8485
8586 @classmethod
You can’t perform that action at this time.
0 commit comments