We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 19e67d1 commit ae2cd9bCopy full SHA for ae2cd9b
pytorch_lightning/trainer/connectors/accelerator_connector.py
@@ -112,12 +112,6 @@ def __init__(
112
self._training_type_plugin: Optional[TrainingTypePlugin] = None
113
self._cluster_environment: Optional[ClusterEnvironment] = None
114
115
- # init the default rank if exists
116
- # we need to call this here or NVIDIA flags and other messaging in init will show on all ranks
117
- # this way we only show it on rank 0
118
- if "LOCAL_RANK" in os.environ:
119
- rank_zero_only.rank = int(os.environ["LOCAL_RANK"])
120
-
121
# for gpus allow int, string and gpu list
122
if auto_select_gpus and isinstance(gpus, int):
123
self.gpus = pick_multiple_gpus(gpus)
0 commit comments