Skip to content

Commit deab3f4

Browse files
committed
change default logger to dedicated one
1 parent e586ed4 commit deab3f4

File tree

14 files changed

+19
-16
lines changed

14 files changed

+19
-16
lines changed

pytorch_lightning/callbacks/early_stopping.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,12 @@
55
66
"""
77

8-
import logging as log
98
import warnings
109

1110
import numpy as np
1211

1312
from .base import Callback
14-
13+
from ..core import root_logger
1514

1615
class EarlyStopping(Callback):
1716
r"""

pytorch_lightning/callbacks/model_checkpoint.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,12 +7,12 @@
77

88
import os
99
import glob
10-
import logging as log
1110
import warnings
1211

1312
import numpy as np
1413

1514
from .base import Callback
15+
from ..core import root_logger
1616

1717

1818
class ModelCheckpoint(Callback):

pytorch_lightning/core/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,9 @@ def test_dataloader(self):
102102
103103
"""
104104

105+
import logging
106+
root_logger = logging.getLogger("lightning")
107+
105108
from .decorators import data_loader
106109
from .lightning import LightningModule
107110

pytorch_lightning/core/lightning.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import collections
22
import inspect
3-
import logging as log
43
import os
54
import warnings
65
from abc import ABC, abstractmethod
@@ -11,6 +10,7 @@
1110
import torch.distributed as dist
1211
from torch.optim import Adam
1312

13+
from pytorch_lightning.core import root_logger as log
1414
from pytorch_lightning.core.decorators import data_loader
1515
from pytorch_lightning.core.grads import GradInformation
1616
from pytorch_lightning.core.hooks import ModelHooks
@@ -26,7 +26,6 @@
2626
except ImportError:
2727
XLA_AVAILABLE = False
2828

29-
3029
class LightningModule(ABC, GradInformation, ModelIO, ModelHooks):
3130

3231
def __init__(self, *args, **kwargs):

pytorch_lightning/core/memory.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,14 @@
33
'''
44

55
import gc
6-
import logging as log
76
import os
87
import subprocess
98
from subprocess import PIPE
109

1110
import numpy as np
1211
import torch
1312

13+
from ..core import root_logger as log
1414

1515
class ModelSummary(object):
1616

pytorch_lightning/core/saving.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
1-
import os
21
import csv
3-
import logging as log
2+
import os
43
from argparse import Namespace
54

5+
from ..core import root_logger as log
6+
67

78
class ModelIO(object):
89

pytorch_lightning/profiler/profiler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import pstats
77
import io
88
from abc import ABC, abstractmethod
9-
import logging as log
9+
from ..core import root_logger as log
1010

1111

1212
class BaseProfiler(ABC):

pytorch_lightning/trainer/auto_mix_precision.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,14 @@
11

22
from abc import ABC
33

4+
from ..core import root_logger as log
5+
46
try:
57
from apex import amp
68

79
APEX_AVAILABLE = True
810
except ImportError:
911
APEX_AVAILABLE = False
10-
import logging as log
1112

1213

1314
class TrainerAMPMixin(ABC):

pytorch_lightning/trainer/distrib_data_parallel.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,14 +113,14 @@ def train_fx(trial_hparams, cluster_manager, _):
113113
114114
"""
115115

116-
import logging as log
117116
import os
118117
import re
119118
import warnings
120119
from abc import ABC, abstractmethod
121120
from typing import Union
122121

123122
import torch
123+
from pytorch_lightning.core import root_logger as log
124124
from pytorch_lightning.loggers import LightningLoggerBase
125125

126126
from pytorch_lightning.utilities.debugging import MisconfigurationException

pytorch_lightning/trainer/distrib_parts.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -335,12 +335,12 @@
335335
"""
336336

337337
from abc import ABC, abstractmethod
338-
import logging as log
339338
import os
340339
import signal
341340

342341
import torch
343342

343+
from pytorch_lightning.core import root_logger as log
344344
from pytorch_lightning.overrides.data_parallel import (
345345
LightningDistributedDataParallel,
346346
LightningDataParallel,

0 commit comments

Comments
 (0)