Skip to content

Commit aea4f20

Browse files
committed
formatting errors
1 parent 413ac34 commit aea4f20

File tree

6 files changed

+8
-9
lines changed

6 files changed

+8
-9
lines changed

pytorch_lightning/core/saving.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -154,6 +154,6 @@ def save_hparams_to_yaml(config_yaml, hparams: Union[dict, Namespace]) -> None:
154154
def convert(val: str) -> Union[int, float, bool, str]:
155155
try:
156156
return ast.literal_eval(val)
157-
except (ValueError, SyntaxError) as e:
158-
log.debug(e)
157+
except (ValueError, SyntaxError) as err:
158+
log.debug(err)
159159
return val

pytorch_lightning/loggers/comet.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ def __init__(self,
125125
if experiment_name:
126126
try:
127127
self.name = experiment_name
128-
except TypeError as e:
128+
except TypeError:
129129
log.exception("Failed to set experiment name for comet.ml logger")
130130
self._kwargs = kwargs
131131

pytorch_lightning/overrides/data_parallel.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -177,9 +177,9 @@ def _worker(i, module, input, kwargs, device=None):
177177

178178
with lock:
179179
results[i] = output
180-
except Exception as e:
180+
except Exception as ex:
181181
with lock:
182-
results[i] = e
182+
results[i] = ex
183183

184184
# TODO: fix hack (maybe not a hack)
185185
# make sure each module knows what training state it's in...

pytorch_lightning/trainer/distrib_data_parallel.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,7 @@ def configure_slurm_ddp(self, num_gpu_nodes):
277277
should_fake = int(os.environ['FAKE_SLURM_MANAGING_TASKS'])
278278
if should_fake:
279279
self.is_slurm_managing_tasks = True
280-
except Exception as e:
280+
except Exception:
281281
pass
282282

283283
# notify user the that slurm is managing tasks

pytorch_lightning/trainer/training_io.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,6 @@
8484
"""
8585

8686
import os
87-
import pickle
8887
import re
8988
import signal
9089
from abc import ABC
@@ -211,7 +210,7 @@ def register_slurm_signal_handlers(self):
211210
job_name = os.environ['SLURM_JOB_NAME']
212211
if job_name != 'bash':
213212
on_slurm = True
214-
except Exception as e:
213+
except Exception:
215214
pass
216215

217216
if on_slurm:

tests/trainer/test_trainer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -820,5 +820,5 @@ def __init__(self, **kwargs):
820820
assert trainer.fast_dev_run
821821

822822
# when we pass in an unknown arg, the base class should complain
823-
with pytest.raises(TypeError, match=r"__init__\(\) got an unexpected keyword argument 'abcdefg'") as e:
823+
with pytest.raises(TypeError, match=r"__init__\(\) got an unexpected keyword argument 'abcdefg'"):
824824
TrainerSubclass(abcdefg='unknown_arg')

0 commit comments

Comments
 (0)