From 833f0dda35387b82e31b00ad9cc1f8d1bfc39191 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Thu, 14 Jan 2021 02:23:48 +0100 Subject: [PATCH 01/18] duplicate logs --- pl_examples/domain_templates/computer_vision_fine_tuning.py | 3 ++- pytorch_lightning/__init__.py | 2 +- pytorch_lightning/accelerators/accelerator_connector.py | 3 ++- pytorch_lightning/accelerators/ddp2_accelerator.py | 3 ++- pytorch_lightning/accelerators/ddp_accelerator.py | 3 ++- pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py | 3 ++- pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py | 3 ++- pytorch_lightning/accelerators/ddp_hpc_accelerator.py | 3 ++- pytorch_lightning/accelerators/ddp_spawn_accelerator.py | 3 ++- pytorch_lightning/accelerators/dp_accelerator.py | 3 ++- pytorch_lightning/accelerators/gpu_accelerator.py | 3 ++- pytorch_lightning/accelerators/horovod_accelerator.py | 3 ++- pytorch_lightning/accelerators/tpu_accelerator.py | 3 ++- pytorch_lightning/callbacks/early_stopping.py | 3 ++- pytorch_lightning/callbacks/model_checkpoint.py | 3 ++- pytorch_lightning/cluster_environments/slurm_environment.py | 3 ++- .../cluster_environments/torchelastic_environment.py | 3 ++- pytorch_lightning/core/lightning.py | 3 ++- pytorch_lightning/core/saving.py | 3 ++- pytorch_lightning/loggers/comet.py | 3 ++- pytorch_lightning/loggers/csv_logs.py | 3 ++- pytorch_lightning/loggers/mlflow.py | 3 ++- pytorch_lightning/loggers/neptune.py | 3 ++- pytorch_lightning/loggers/tensorboard.py | 3 ++- pytorch_lightning/plugins/ddp_plugin.py | 3 ++- pytorch_lightning/plugins/ddp_sequential_plugin.py | 3 ++- pytorch_lightning/profiler/profilers.py | 3 ++- pytorch_lightning/trainer/configuration_validator.py | 3 ++- pytorch_lightning/trainer/connectors/checkpoint_connector.py | 3 ++- pytorch_lightning/trainer/connectors/precision_connector.py | 3 ++- pytorch_lightning/trainer/connectors/slurm_connector.py | 3 ++- pytorch_lightning/trainer/trainer.py | 3 ++- pytorch_lightning/trainer/training_tricks.py | 3 ++- pytorch_lightning/tuner/batch_size_scaling.py | 3 ++- pytorch_lightning/tuner/lr_finder.py | 3 ++- pytorch_lightning/utilities/distributed.py | 3 ++- pytorch_lightning/utilities/seed.py | 3 ++- pytorch_lightning/utilities/upgrade_checkpoint.py | 3 ++- 38 files changed, 75 insertions(+), 38 deletions(-) diff --git a/pl_examples/domain_templates/computer_vision_fine_tuning.py b/pl_examples/domain_templates/computer_vision_fine_tuning.py index 733fd8646142e..92a95ce60c53e 100644 --- a/pl_examples/domain_templates/computer_vision_fine_tuning.py +++ b/pl_examples/domain_templates/computer_vision_fine_tuning.py @@ -58,7 +58,8 @@ import pytorch_lightning as pl from pl_examples import cli_lightning_logo -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) BN_TYPES = (torch.nn.BatchNorm1d, torch.nn.BatchNorm2d, torch.nn.BatchNorm3d) DATA_URL = "https://storage.googleapis.com/mledu-datasets/cats_and_dogs_filtered.zip" diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index 1f672dcd9aac8..a735407a0ca0e 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -38,7 +38,7 @@ - https://pytorch-lightning.readthedocs.io/en/stable """ -_logger = python_logging.getLogger("lightning") +_logger = python_logging.getLogger(__name__) _logger.addHandler(python_logging.StreamHandler()) _logger.setLevel(python_logging.INFO) diff --git a/pytorch_lightning/accelerators/accelerator_connector.py b/pytorch_lightning/accelerators/accelerator_connector.py index c911225d0b29f..be4ed7cbe5eb2 100644 --- a/pytorch_lightning/accelerators/accelerator_connector.py +++ b/pytorch_lightning/accelerators/accelerator_connector.py @@ -16,7 +16,8 @@ import torch from pytorch_lightning.utilities import HOROVOD_AVAILABLE -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning import accelerators from pytorch_lightning.accelerators.accelerator import Accelerator from pytorch_lightning.cluster_environments.slurm_environment import SLURMEnvironment diff --git a/pytorch_lightning/accelerators/ddp2_accelerator.py b/pytorch_lightning/accelerators/ddp2_accelerator.py index 373406589d855..d334743149ba0 100644 --- a/pytorch_lightning/accelerators/ddp2_accelerator.py +++ b/pytorch_lightning/accelerators/ddp2_accelerator.py @@ -18,7 +18,8 @@ import torch.distributed as torch_distrib from torch.nn.parallel import DistributedDataParallel -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule diff --git a/pytorch_lightning/accelerators/ddp_accelerator.py b/pytorch_lightning/accelerators/ddp_accelerator.py index 0fde9da158c94..794b053b43484 100644 --- a/pytorch_lightning/accelerators/ddp_accelerator.py +++ b/pytorch_lightning/accelerators/ddp_accelerator.py @@ -23,7 +23,8 @@ import torch.distributed as torch_distrib from torch.nn.parallel import DistributedDataParallel -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule diff --git a/pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py b/pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py index 4694a31438ca6..98a36b0673d9b 100644 --- a/pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py +++ b/pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py @@ -13,7 +13,8 @@ # limitations under the License from typing import Optional -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.accelerators.ddp_hpc_accelerator import DDPHPCAccelerator from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.plugins.ddp_plugin import DDPPlugin diff --git a/pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py b/pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py index f9ccaa200bbf4..19de2859fbe2e 100644 --- a/pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py +++ b/pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py @@ -19,7 +19,8 @@ import torch.multiprocessing as mp from torch.nn.parallel import DistributedDataParallel -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule diff --git a/pytorch_lightning/accelerators/ddp_hpc_accelerator.py b/pytorch_lightning/accelerators/ddp_hpc_accelerator.py index bdc4631b5d017..7917ddb7efdca 100644 --- a/pytorch_lightning/accelerators/ddp_hpc_accelerator.py +++ b/pytorch_lightning/accelerators/ddp_hpc_accelerator.py @@ -19,7 +19,8 @@ import torch.distributed as torch_distrib from torch.nn.parallel import DistributedDataParallel -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule diff --git a/pytorch_lightning/accelerators/ddp_spawn_accelerator.py b/pytorch_lightning/accelerators/ddp_spawn_accelerator.py index eb4ff24e39dd4..7a39fc9d822d0 100644 --- a/pytorch_lightning/accelerators/ddp_spawn_accelerator.py +++ b/pytorch_lightning/accelerators/ddp_spawn_accelerator.py @@ -20,7 +20,8 @@ import torch.multiprocessing as mp from torch.nn.parallel import DistributedDataParallel -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule diff --git a/pytorch_lightning/accelerators/dp_accelerator.py b/pytorch_lightning/accelerators/dp_accelerator.py index 03c9ebb442fb2..66a1932f19bd7 100644 --- a/pytorch_lightning/accelerators/dp_accelerator.py +++ b/pytorch_lightning/accelerators/dp_accelerator.py @@ -16,7 +16,8 @@ import torch from torch import optim -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule diff --git a/pytorch_lightning/accelerators/gpu_accelerator.py b/pytorch_lightning/accelerators/gpu_accelerator.py index d65b19bbd9bb1..2ad4f78307690 100644 --- a/pytorch_lightning/accelerators/gpu_accelerator.py +++ b/pytorch_lightning/accelerators/gpu_accelerator.py @@ -15,7 +15,8 @@ import torch -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.distributed.dist import LightningDistributed diff --git a/pytorch_lightning/accelerators/horovod_accelerator.py b/pytorch_lightning/accelerators/horovod_accelerator.py index 6e11a13064513..5ae732aa6527a 100644 --- a/pytorch_lightning/accelerators/horovod_accelerator.py +++ b/pytorch_lightning/accelerators/horovod_accelerator.py @@ -17,7 +17,8 @@ import torch from torch.optim.lr_scheduler import _LRScheduler -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.utilities import AMPType, HOROVOD_AVAILABLE diff --git a/pytorch_lightning/accelerators/tpu_accelerator.py b/pytorch_lightning/accelerators/tpu_accelerator.py index 286004bc0976e..5e5e9bd7e49fc 100644 --- a/pytorch_lightning/accelerators/tpu_accelerator.py +++ b/pytorch_lightning/accelerators/tpu_accelerator.py @@ -20,7 +20,8 @@ import torch.multiprocessing as mp from torch.optim import Optimizer -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core import LightningModule diff --git a/pytorch_lightning/callbacks/early_stopping.py b/pytorch_lightning/callbacks/early_stopping.py index 3e15d8462350c..0b66b0409333c 100644 --- a/pytorch_lightning/callbacks/early_stopping.py +++ b/pytorch_lightning/callbacks/early_stopping.py @@ -25,7 +25,8 @@ import numpy as np import torch -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.callbacks.base import Callback from pytorch_lightning.metrics.metric import Metric from pytorch_lightning.utilities import rank_zero_info, rank_zero_warn, TPU_AVAILABLE diff --git a/pytorch_lightning/callbacks/model_checkpoint.py b/pytorch_lightning/callbacks/model_checkpoint.py index e5c960b3c002b..0f9e5fb3af876 100644 --- a/pytorch_lightning/callbacks/model_checkpoint.py +++ b/pytorch_lightning/callbacks/model_checkpoint.py @@ -31,7 +31,8 @@ import torch import yaml -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.callbacks.base import Callback from pytorch_lightning.metrics.metric import Metric from pytorch_lightning.utilities import rank_zero_info, rank_zero_only, rank_zero_warn diff --git a/pytorch_lightning/cluster_environments/slurm_environment.py b/pytorch_lightning/cluster_environments/slurm_environment.py index 6df1cf680c57f..eabf5f11ce8f1 100644 --- a/pytorch_lightning/cluster_environments/slurm_environment.py +++ b/pytorch_lightning/cluster_environments/slurm_environment.py @@ -14,7 +14,8 @@ import os import re -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.cluster_environments.cluster_environment import ClusterEnvironment diff --git a/pytorch_lightning/cluster_environments/torchelastic_environment.py b/pytorch_lightning/cluster_environments/torchelastic_environment.py index a4d769518d252..b3c70d62548b6 100644 --- a/pytorch_lightning/cluster_environments/torchelastic_environment.py +++ b/pytorch_lightning/cluster_environments/torchelastic_environment.py @@ -13,7 +13,8 @@ # limitations under the License. import os -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.utilities import rank_zero_warn from pytorch_lightning.cluster_environments.cluster_environment import ClusterEnvironment diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index f750c8aff7caf..4dc80645984b7 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -30,7 +30,8 @@ from torch.nn import Module from torch.optim.optimizer import Optimizer -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.core.grads import GradInformation from pytorch_lightning.core.hooks import CheckpointHooks, DataHooks, ModelHooks from pytorch_lightning.core.memory import ModelSummary diff --git a/pytorch_lightning/core/saving.py b/pytorch_lightning/core/saving.py index 12a29246888f7..d88243a8f1bf9 100644 --- a/pytorch_lightning/core/saving.py +++ b/pytorch_lightning/core/saving.py @@ -25,7 +25,8 @@ import torch import yaml -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.utilities import AttributeDict, OMEGACONF_AVAILABLE, rank_zero_warn from pytorch_lightning.utilities.apply_func import apply_to_collection from pytorch_lightning.utilities.cloud_io import get_filesystem diff --git a/pytorch_lightning/loggers/comet.py b/pytorch_lightning/loggers/comet.py index 869bce831f0c2..6c85d093e79d4 100644 --- a/pytorch_lightning/loggers/comet.py +++ b/pytorch_lightning/loggers/comet.py @@ -24,7 +24,8 @@ import torch from torch import is_tensor -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities import rank_zero_only, _module_available from pytorch_lightning.utilities.exceptions import MisconfigurationException diff --git a/pytorch_lightning/loggers/csv_logs.py b/pytorch_lightning/loggers/csv_logs.py index d47cff1db0e1b..79bbf102d6f0a 100644 --- a/pytorch_lightning/loggers/csv_logs.py +++ b/pytorch_lightning/loggers/csv_logs.py @@ -27,7 +27,8 @@ import torch -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.core.saving import save_hparams_to_yaml from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities.distributed import rank_zero_only, rank_zero_warn diff --git a/pytorch_lightning/loggers/mlflow.py b/pytorch_lightning/loggers/mlflow.py index 4987d050c925d..5159c0111e726 100644 --- a/pytorch_lightning/loggers/mlflow.py +++ b/pytorch_lightning/loggers/mlflow.py @@ -22,7 +22,8 @@ from typing import Any, Dict, Optional, Union -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities import rank_zero_only, rank_zero_warn, _module_available diff --git a/pytorch_lightning/loggers/neptune.py b/pytorch_lightning/loggers/neptune.py index 9f3c3787a417e..2a8eea2c0ef1a 100644 --- a/pytorch_lightning/loggers/neptune.py +++ b/pytorch_lightning/loggers/neptune.py @@ -22,7 +22,8 @@ import torch from torch import is_tensor -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities import rank_zero_only, _module_available diff --git a/pytorch_lightning/loggers/tensorboard.py b/pytorch_lightning/loggers/tensorboard.py index f8e984c6ff5bc..8a3984c49f445 100644 --- a/pytorch_lightning/loggers/tensorboard.py +++ b/pytorch_lightning/loggers/tensorboard.py @@ -25,7 +25,8 @@ from torch.utils.tensorboard import SummaryWriter from torch.utils.tensorboard.summary import hparams -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.core.saving import save_hparams_to_yaml from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment diff --git a/pytorch_lightning/plugins/ddp_plugin.py b/pytorch_lightning/plugins/ddp_plugin.py index 6d5ad1e9e2119..2ff43efc7b82e 100644 --- a/pytorch_lightning/plugins/ddp_plugin.py +++ b/pytorch_lightning/plugins/ddp_plugin.py @@ -6,7 +6,8 @@ import torch.distributed as torch_distrib from torch.optim import Optimizer -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel from pytorch_lightning.plugins.plugin import LightningPlugin diff --git a/pytorch_lightning/plugins/ddp_sequential_plugin.py b/pytorch_lightning/plugins/ddp_sequential_plugin.py index 069b1754fbce0..c95bda5c97b52 100644 --- a/pytorch_lightning/plugins/ddp_sequential_plugin.py +++ b/pytorch_lightning/plugins/ddp_sequential_plugin.py @@ -19,7 +19,8 @@ import torch.distributed as torch_distrib from torch.nn.parallel import DistributedDataParallel -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning import LightningModule from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel from pytorch_lightning.plugins.rpc_plugin import RPCPlugin diff --git a/pytorch_lightning/profiler/profilers.py b/pytorch_lightning/profiler/profilers.py index ac410b0231453..952da20cba2d5 100644 --- a/pytorch_lightning/profiler/profilers.py +++ b/pytorch_lightning/profiler/profilers.py @@ -27,7 +27,8 @@ import fsspec import numpy as np -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.utilities.cloud_io import get_filesystem diff --git a/pytorch_lightning/trainer/configuration_validator.py b/pytorch_lightning/trainer/configuration_validator.py index 20992255ba29e..026caa37dd13a 100644 --- a/pytorch_lightning/trainer/configuration_validator.py +++ b/pytorch_lightning/trainer/configuration_validator.py @@ -11,7 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.utilities import rank_zero_warn from pytorch_lightning.utilities.exceptions import MisconfigurationException diff --git a/pytorch_lightning/trainer/connectors/checkpoint_connector.py b/pytorch_lightning/trainer/connectors/checkpoint_connector.py index 03d46132fb177..02148e1697a00 100644 --- a/pytorch_lightning/trainer/connectors/checkpoint_connector.py +++ b/pytorch_lightning/trainer/connectors/checkpoint_connector.py @@ -20,7 +20,8 @@ import torch import pytorch_lightning -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.callbacks import ModelCheckpoint from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.utilities import AMPType, APEX_AVAILABLE, OMEGACONF_AVAILABLE, rank_zero_info, rank_zero_warn diff --git a/pytorch_lightning/trainer/connectors/precision_connector.py b/pytorch_lightning/trainer/connectors/precision_connector.py index 822c3ef634fdc..3c12af26e9cd2 100644 --- a/pytorch_lightning/trainer/connectors/precision_connector.py +++ b/pytorch_lightning/trainer/connectors/precision_connector.py @@ -12,7 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.plugins.apex import ApexPlugin from pytorch_lightning.plugins.native_amp import NativeAMPPlugin from pytorch_lightning.utilities import AMPType, APEX_AVAILABLE, NATIVE_AMP_AVAILABLE, rank_zero_warn diff --git a/pytorch_lightning/trainer/connectors/slurm_connector.py b/pytorch_lightning/trainer/connectors/slurm_connector.py index 4cb954a8e92fc..306bc2446733f 100644 --- a/pytorch_lightning/trainer/connectors/slurm_connector.py +++ b/pytorch_lightning/trainer/connectors/slurm_connector.py @@ -2,7 +2,8 @@ import re import signal from subprocess import call -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.utilities.distributed import rank_zero_info import torch.distributed as torch_distrib import torch diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index c3ef0e507789e..c8d7cb7b73b79 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -22,7 +22,8 @@ import torch from torch.utils.data import DataLoader -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator from pytorch_lightning.accelerators.accelerator_connector import AcceleratorConnector from pytorch_lightning.callbacks import Callback diff --git a/pytorch_lightning/trainer/training_tricks.py b/pytorch_lightning/trainer/training_tricks.py index be9793e9e5cdb..0180be5d4e268 100644 --- a/pytorch_lightning/trainer/training_tricks.py +++ b/pytorch_lightning/trainer/training_tricks.py @@ -17,7 +17,8 @@ import torch from torch import Tensor -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.core.lightning import LightningModule EPSILON = 1e-6 diff --git a/pytorch_lightning/tuner/batch_size_scaling.py b/pytorch_lightning/tuner/batch_size_scaling.py index 52662f6172d8d..c106b5400b8fe 100644 --- a/pytorch_lightning/tuner/batch_size_scaling.py +++ b/pytorch_lightning/tuner/batch_size_scaling.py @@ -21,7 +21,8 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.memory import is_oom_error, garbage_collection_cuda from pytorch_lightning.loggers.base import DummyLogger -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.utilities.cloud_io import get_filesystem diff --git a/pytorch_lightning/tuner/lr_finder.py b/pytorch_lightning/tuner/lr_finder.py index 2982454d02f70..d2f0b79b2057c 100644 --- a/pytorch_lightning/tuner/lr_finder.py +++ b/pytorch_lightning/tuner/lr_finder.py @@ -22,7 +22,8 @@ from torch.optim.lr_scheduler import _LRScheduler from torch.utils.data import DataLoader -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.callbacks import Callback from pytorch_lightning.core.datamodule import LightningDataModule from pytorch_lightning.core.lightning import LightningModule diff --git a/pytorch_lightning/utilities/distributed.py b/pytorch_lightning/utilities/distributed.py index 2a0b989e9b9cd..33ffcbbb9b4d5 100644 --- a/pytorch_lightning/utilities/distributed.py +++ b/pytorch_lightning/utilities/distributed.py @@ -19,7 +19,8 @@ import torch -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) if torch.distributed.is_available(): from torch.distributed import ReduceOp, group diff --git a/pytorch_lightning/utilities/seed.py b/pytorch_lightning/utilities/seed.py index 16bc39bd7f142..1189a6155c0d5 100644 --- a/pytorch_lightning/utilities/seed.py +++ b/pytorch_lightning/utilities/seed.py @@ -20,7 +20,8 @@ import numpy as np import torch -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.utilities import rank_zero_warn diff --git a/pytorch_lightning/utilities/upgrade_checkpoint.py b/pytorch_lightning/utilities/upgrade_checkpoint.py index 2e767542cd9bd..59d3e50037dd4 100644 --- a/pytorch_lightning/utilities/upgrade_checkpoint.py +++ b/pytorch_lightning/utilities/upgrade_checkpoint.py @@ -16,7 +16,8 @@ import torch -from pytorch_lightning import _logger as log +import logging +log = logging.getLogger(__name__) from pytorch_lightning.callbacks import EarlyStopping, ModelCheckpoint KEYS_MAPPING = { From c1ca85a2fc66f0ea4abbc7b384decd354efa7daa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Sat, 16 Jan 2021 00:33:55 +0100 Subject: [PATCH 02/18] remove unused --- pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py | 2 -- pytorch_lightning/accelerators/dp_accelerator.py | 2 -- pytorch_lightning/accelerators/gpu_accelerator.py | 2 -- pytorch_lightning/accelerators/horovod_accelerator.py | 2 -- pytorch_lightning/callbacks/early_stopping.py | 2 -- pytorch_lightning/trainer/configuration_validator.py | 2 -- pytorch_lightning/trainer/connectors/checkpoint_connector.py | 2 -- 7 files changed, 14 deletions(-) diff --git a/pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py b/pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py index 98a36b0673d9b..9b7c5c5613b5c 100644 --- a/pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py +++ b/pytorch_lightning/accelerators/ddp_cpu_hpc_accelerator.py @@ -13,8 +13,6 @@ # limitations under the License from typing import Optional -import logging -log = logging.getLogger(__name__) from pytorch_lightning.accelerators.ddp_hpc_accelerator import DDPHPCAccelerator from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.plugins.ddp_plugin import DDPPlugin diff --git a/pytorch_lightning/accelerators/dp_accelerator.py b/pytorch_lightning/accelerators/dp_accelerator.py index 66a1932f19bd7..4eb35c898e50f 100644 --- a/pytorch_lightning/accelerators/dp_accelerator.py +++ b/pytorch_lightning/accelerators/dp_accelerator.py @@ -16,8 +16,6 @@ import torch from torch import optim -import logging -log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule diff --git a/pytorch_lightning/accelerators/gpu_accelerator.py b/pytorch_lightning/accelerators/gpu_accelerator.py index 2ad4f78307690..51272d50707a4 100644 --- a/pytorch_lightning/accelerators/gpu_accelerator.py +++ b/pytorch_lightning/accelerators/gpu_accelerator.py @@ -15,8 +15,6 @@ import torch -import logging -log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.distributed.dist import LightningDistributed diff --git a/pytorch_lightning/accelerators/horovod_accelerator.py b/pytorch_lightning/accelerators/horovod_accelerator.py index 5ae732aa6527a..fbcf7d621343a 100644 --- a/pytorch_lightning/accelerators/horovod_accelerator.py +++ b/pytorch_lightning/accelerators/horovod_accelerator.py @@ -17,8 +17,6 @@ import torch from torch.optim.lr_scheduler import _LRScheduler -import logging -log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.utilities import AMPType, HOROVOD_AVAILABLE diff --git a/pytorch_lightning/callbacks/early_stopping.py b/pytorch_lightning/callbacks/early_stopping.py index 0b66b0409333c..72e7a7944cfcf 100644 --- a/pytorch_lightning/callbacks/early_stopping.py +++ b/pytorch_lightning/callbacks/early_stopping.py @@ -25,8 +25,6 @@ import numpy as np import torch -import logging -log = logging.getLogger(__name__) from pytorch_lightning.callbacks.base import Callback from pytorch_lightning.metrics.metric import Metric from pytorch_lightning.utilities import rank_zero_info, rank_zero_warn, TPU_AVAILABLE diff --git a/pytorch_lightning/trainer/configuration_validator.py b/pytorch_lightning/trainer/configuration_validator.py index 026caa37dd13a..d82454f697d02 100644 --- a/pytorch_lightning/trainer/configuration_validator.py +++ b/pytorch_lightning/trainer/configuration_validator.py @@ -11,8 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import logging -log = logging.getLogger(__name__) from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.utilities import rank_zero_warn from pytorch_lightning.utilities.exceptions import MisconfigurationException diff --git a/pytorch_lightning/trainer/connectors/checkpoint_connector.py b/pytorch_lightning/trainer/connectors/checkpoint_connector.py index 02148e1697a00..34d572de84c51 100644 --- a/pytorch_lightning/trainer/connectors/checkpoint_connector.py +++ b/pytorch_lightning/trainer/connectors/checkpoint_connector.py @@ -20,8 +20,6 @@ import torch import pytorch_lightning -import logging -log = logging.getLogger(__name__) from pytorch_lightning.callbacks import ModelCheckpoint from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.utilities import AMPType, APEX_AVAILABLE, OMEGACONF_AVAILABLE, rank_zero_info, rank_zero_warn From 0c581fe32d1ad44e60ab128bc7c76c19e5ae0f1e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 18 Jan 2021 01:42:42 +0100 Subject: [PATCH 03/18] configure basic logging --- pytorch_lightning/__init__.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index a735407a0ca0e..01e7e2c12c772 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -3,6 +3,7 @@ import logging as python_logging import os import time +import sys _this_year = time.strftime("%Y") __version__ = '1.1.4' @@ -37,10 +38,11 @@ - https://pytorch-lightning.readthedocs.io/en/latest - https://pytorch-lightning.readthedocs.io/en/stable """ - -_logger = python_logging.getLogger(__name__) -_logger.addHandler(python_logging.StreamHandler()) -_logger.setLevel(python_logging.INFO) +python_logging.basicConfig( + stream=sys.stdout, + format="%(message)s", + level=python_logging.INFO, +) PACKAGE_ROOT = os.path.dirname(__file__) PROJECT_ROOT = os.path.dirname(PACKAGE_ROOT) From b68451150edc10665ba746ac99f411747cf4cc16 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 18 Jan 2021 01:45:26 +0100 Subject: [PATCH 04/18] missing import --- tests/callbacks/test_early_stopping.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/callbacks/test_early_stopping.py b/tests/callbacks/test_early_stopping.py index 5c54f6a84805d..6fa7e5f3567b6 100644 --- a/tests/callbacks/test_early_stopping.py +++ b/tests/callbacks/test_early_stopping.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging import os import pickle from unittest import mock @@ -20,11 +21,13 @@ import pytest import torch -from pytorch_lightning import _logger, seed_everything, Trainer +from pytorch_lightning import seed_everything, Trainer from pytorch_lightning.callbacks import EarlyStopping, ModelCheckpoint from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.base import BoringModel, EvalModelTemplate +_logger = logging.getLogger(__name__) + class EarlyStoppingTestRestore(EarlyStopping): # this class has to be defined outside the test function, otherwise we get pickle error From 56c143c7409d72f80fb6eebc4c1ab846b4547dec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 18 Jan 2021 01:56:04 +0100 Subject: [PATCH 05/18] update docs for logging --- docs/source/logging.rst | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/docs/source/logging.rst b/docs/source/logging.rst index 79452b0ca8788..8f32d72f0dbda 100644 --- a/docs/source/logging.rst +++ b/docs/source/logging.rst @@ -259,13 +259,19 @@ Configure console logging ************************* Lightning logs useful information about the training process and user warnings to the console. -You can retrieve the Lightning logger and change it to your liking. For example, increase the logging level -to see fewer messages like so: +You can retrieve the Lightning logger and change it to your liking. For example, adjust the logging level +or redirect output for certain modules to log files: .. code-block:: python import logging - logging.getLogger("lightning").setLevel(logging.ERROR) + + # configure logging at the root level of lightning + logging.getLogger("pytorch_lightning").setLevel(logging.ERROR) + + # configure logging on module level, redirect to file + logger = logging.getLogger("pytorch_lightning.core") + logger.addHandler(logging.FileHandler("core.log")) Read more about custom Python logging `here `_. From 19e09679c43cd37367fcbc092b378f227c2df4cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 18 Jan 2021 02:07:19 +0100 Subject: [PATCH 06/18] import order --- pytorch_lightning/trainer/connectors/precision_connector.py | 4 +++- pytorch_lightning/trainer/connectors/slurm_connector.py | 5 +++-- pytorch_lightning/trainer/trainer.py | 4 ++-- pytorch_lightning/trainer/training_tricks.py | 4 ++-- pytorch_lightning/tuner/batch_size_scaling.py | 5 +++-- pytorch_lightning/tuner/lr_finder.py | 5 +++-- pytorch_lightning/utilities/seed.py | 6 ++++-- pytorch_lightning/utilities/upgrade_checkpoint.py | 5 +++-- 8 files changed, 23 insertions(+), 15 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/precision_connector.py b/pytorch_lightning/trainer/connectors/precision_connector.py index 3c12af26e9cd2..6ec4b01fd8f10 100644 --- a/pytorch_lightning/trainer/connectors/precision_connector.py +++ b/pytorch_lightning/trainer/connectors/precision_connector.py @@ -13,11 +13,13 @@ # limitations under the License. import logging -log = logging.getLogger(__name__) + from pytorch_lightning.plugins.apex import ApexPlugin from pytorch_lightning.plugins.native_amp import NativeAMPPlugin from pytorch_lightning.utilities import AMPType, APEX_AVAILABLE, NATIVE_AMP_AVAILABLE, rank_zero_warn +log = logging.getLogger(__name__) + class PrecisionConnector: diff --git a/pytorch_lightning/trainer/connectors/slurm_connector.py b/pytorch_lightning/trainer/connectors/slurm_connector.py index 306bc2446733f..7e5fdef271313 100644 --- a/pytorch_lightning/trainer/connectors/slurm_connector.py +++ b/pytorch_lightning/trainer/connectors/slurm_connector.py @@ -1,13 +1,14 @@ +import logging import os import re import signal from subprocess import call -import logging -log = logging.getLogger(__name__) from pytorch_lightning.utilities.distributed import rank_zero_info import torch.distributed as torch_distrib import torch +log = logging.getLogger(__name__) + class SLURMConnector: diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index c8d7cb7b73b79..bbfc3fd3202f2 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -14,6 +14,7 @@ """Trainer to automate the training.""" +import logging import os import warnings from pathlib import Path @@ -22,8 +23,6 @@ import torch from torch.utils.data import DataLoader -import logging -log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator from pytorch_lightning.accelerators.accelerator_connector import AcceleratorConnector from pytorch_lightning.callbacks import Callback @@ -65,6 +64,7 @@ from pytorch_lightning.utilities.memory import recursive_detach from pytorch_lightning.utilities.model_utils import is_overridden +log = logging.getLogger(__name__) # warnings to ignore in trainer warnings.filterwarnings( 'ignore', message='torch.distributed.reduce_op is deprecated, ' 'please use torch.distributed.ReduceOp instead' diff --git a/pytorch_lightning/trainer/training_tricks.py b/pytorch_lightning/trainer/training_tricks.py index 0180be5d4e268..5129e1a5a0b7c 100644 --- a/pytorch_lightning/trainer/training_tricks.py +++ b/pytorch_lightning/trainer/training_tricks.py @@ -13,16 +13,16 @@ # limitations under the License. from abc import ABC, abstractmethod +import logging import torch from torch import Tensor -import logging -log = logging.getLogger(__name__) from pytorch_lightning.core.lightning import LightningModule EPSILON = 1e-6 EPSILON_FP16 = 1e-5 +log = logging.getLogger(__name__) class TrainerTrainingTricksMixin(ABC): diff --git a/pytorch_lightning/tuner/batch_size_scaling.py b/pytorch_lightning/tuner/batch_size_scaling.py index c106b5400b8fe..03f95cd66891b 100644 --- a/pytorch_lightning/tuner/batch_size_scaling.py +++ b/pytorch_lightning/tuner/batch_size_scaling.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License +import logging import os from typing import Optional, Tuple @@ -21,10 +22,10 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.memory import is_oom_error, garbage_collection_cuda from pytorch_lightning.loggers.base import DummyLogger -import logging -log = logging.getLogger(__name__) from pytorch_lightning.utilities.cloud_io import get_filesystem +log = logging.getLogger(__name__) + def scale_batch_size(trainer, model: LightningModule, diff --git a/pytorch_lightning/tuner/lr_finder.py b/pytorch_lightning/tuner/lr_finder.py index d2f0b79b2057c..c037a51f88d38 100644 --- a/pytorch_lightning/tuner/lr_finder.py +++ b/pytorch_lightning/tuner/lr_finder.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import importlib +import logging import os from typing import List, Optional, Sequence, Union, Callable from functools import wraps @@ -22,8 +23,6 @@ from torch.optim.lr_scheduler import _LRScheduler from torch.utils.data import DataLoader -import logging -log = logging.getLogger(__name__) from pytorch_lightning.callbacks import Callback from pytorch_lightning.core.datamodule import LightningDataModule from pytorch_lightning.core.lightning import LightningModule @@ -40,6 +39,8 @@ else: from tqdm import tqdm +log = logging.getLogger(__name__) + def _run_lr_finder_internally(trainer, model: LightningModule): """ Call lr finder internally during Trainer.fit() """ diff --git a/pytorch_lightning/utilities/seed.py b/pytorch_lightning/utilities/seed.py index 1189a6155c0d5..b235e9420d59c 100644 --- a/pytorch_lightning/utilities/seed.py +++ b/pytorch_lightning/utilities/seed.py @@ -14,16 +14,18 @@ """Helper functions to help with reproducibility of models. """ +import logging import os import random from typing import Optional import numpy as np import torch -import logging -log = logging.getLogger(__name__) + from pytorch_lightning.utilities import rank_zero_warn +log = logging.getLogger(__name__) + def seed_everything(seed: Optional[int] = None) -> int: """ diff --git a/pytorch_lightning/utilities/upgrade_checkpoint.py b/pytorch_lightning/utilities/upgrade_checkpoint.py index 59d3e50037dd4..4896845f10263 100644 --- a/pytorch_lightning/utilities/upgrade_checkpoint.py +++ b/pytorch_lightning/utilities/upgrade_checkpoint.py @@ -12,12 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. import argparse +import logging from shutil import copyfile import torch -import logging -log = logging.getLogger(__name__) from pytorch_lightning.callbacks import EarlyStopping, ModelCheckpoint KEYS_MAPPING = { @@ -28,6 +27,8 @@ "early_stop_callback_patience": (EarlyStopping, "patience"), } +log = logging.getLogger(__name__) + def upgrade_checkpoint(filepath): checkpoint = torch.load(filepath) From ed7936cb2ab44852046da940c7ecf20270b90ca2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 18 Jan 2021 02:16:54 +0100 Subject: [PATCH 07/18] flake8 --- pytorch_lightning/accelerators/accelerator_connector.py | 6 ++++-- pytorch_lightning/accelerators/ddp2_accelerator.py | 4 ++-- pytorch_lightning/accelerators/ddp_accelerator.py | 4 ++-- .../accelerators/ddp_cpu_spawn_accelerator.py | 4 ++-- pytorch_lightning/accelerators/ddp_hpc_accelerator.py | 4 ++-- pytorch_lightning/accelerators/ddp_spawn_accelerator.py | 4 ++-- pytorch_lightning/accelerators/tpu_accelerator.py | 4 ++-- pytorch_lightning/callbacks/model_checkpoint.py | 6 +++--- .../cluster_environments/slurm_environment.py | 7 ++++--- .../cluster_environments/torchelastic_environment.py | 7 ++++--- pytorch_lightning/core/lightning.py | 4 ++-- pytorch_lightning/core/saving.py | 4 ++-- pytorch_lightning/loggers/comet.py | 4 ++-- pytorch_lightning/loggers/csv_logs.py | 5 +++-- pytorch_lightning/loggers/mlflow.py | 8 ++------ pytorch_lightning/loggers/neptune.py | 4 ++-- pytorch_lightning/loggers/tensorboard.py | 5 +++-- pytorch_lightning/plugins/ddp_plugin.py | 5 +++-- pytorch_lightning/plugins/ddp_sequential_plugin.py | 5 +++-- pytorch_lightning/profiler/profilers.py | 5 +++-- 20 files changed, 52 insertions(+), 47 deletions(-) diff --git a/pytorch_lightning/accelerators/accelerator_connector.py b/pytorch_lightning/accelerators/accelerator_connector.py index be4ed7cbe5eb2..e376be44cf58e 100644 --- a/pytorch_lightning/accelerators/accelerator_connector.py +++ b/pytorch_lightning/accelerators/accelerator_connector.py @@ -11,13 +11,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging import os import torch from pytorch_lightning.utilities import HOROVOD_AVAILABLE -import logging -log = logging.getLogger(__name__) + from pytorch_lightning import accelerators from pytorch_lightning.accelerators.accelerator import Accelerator from pytorch_lightning.cluster_environments.slurm_environment import SLURMEnvironment @@ -29,6 +29,8 @@ if HOROVOD_AVAILABLE: import horovod.torch as hvd +log = logging.getLogger(__name__) + class AcceleratorConnector: diff --git a/pytorch_lightning/accelerators/ddp2_accelerator.py b/pytorch_lightning/accelerators/ddp2_accelerator.py index d334743149ba0..ce0db96e9f737 100644 --- a/pytorch_lightning/accelerators/ddp2_accelerator.py +++ b/pytorch_lightning/accelerators/ddp2_accelerator.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License +import logging import os from typing import Any, List, Optional, Union @@ -18,8 +19,6 @@ import torch.distributed as torch_distrib from torch.nn.parallel import DistributedDataParallel -import logging -log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule @@ -30,6 +29,7 @@ from pytorch_lightning.utilities import AMPType, HYDRA_AVAILABLE from pytorch_lightning.utilities.distributed import all_gather_ddp_if_available, rank_zero_only, sync_ddp_if_available +log = logging.getLogger(__name__) if HYDRA_AVAILABLE: from hydra.core.hydra_config import HydraConfig from hydra.utils import get_original_cwd, to_absolute_path diff --git a/pytorch_lightning/accelerators/ddp_accelerator.py b/pytorch_lightning/accelerators/ddp_accelerator.py index 794b053b43484..24b7c295eb0df 100644 --- a/pytorch_lightning/accelerators/ddp_accelerator.py +++ b/pytorch_lightning/accelerators/ddp_accelerator.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License +import logging import os from os.path import abspath import subprocess @@ -23,8 +24,6 @@ import torch.distributed as torch_distrib from torch.nn.parallel import DistributedDataParallel -import logging -log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule @@ -41,6 +40,7 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.seed import seed_everything +log = logging.getLogger(__name__) if HYDRA_AVAILABLE: from hydra.core.hydra_config import HydraConfig from hydra.utils import get_original_cwd, to_absolute_path diff --git a/pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py b/pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py index 19de2859fbe2e..dd3ebafc66a33 100644 --- a/pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py +++ b/pytorch_lightning/accelerators/ddp_cpu_spawn_accelerator.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License +import logging import os from typing import Any, List, Optional, Union @@ -19,8 +20,6 @@ import torch.multiprocessing as mp from torch.nn.parallel import DistributedDataParallel -import logging -log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule @@ -36,6 +35,7 @@ sync_ddp_if_available, ) +log = logging.getLogger(__name__) if HYDRA_AVAILABLE: from hydra.core.hydra_config import HydraConfig from hydra.utils import get_original_cwd, to_absolute_path diff --git a/pytorch_lightning/accelerators/ddp_hpc_accelerator.py b/pytorch_lightning/accelerators/ddp_hpc_accelerator.py index 7917ddb7efdca..e58820550ee50 100644 --- a/pytorch_lightning/accelerators/ddp_hpc_accelerator.py +++ b/pytorch_lightning/accelerators/ddp_hpc_accelerator.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License +import logging import os from typing import Any, List, Optional, Union @@ -19,8 +20,6 @@ import torch.distributed as torch_distrib from torch.nn.parallel import DistributedDataParallel -import logging -log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule @@ -30,6 +29,7 @@ from pytorch_lightning.utilities import AMPType, HYDRA_AVAILABLE from pytorch_lightning.utilities.distributed import all_gather_ddp_if_available, rank_zero_only, sync_ddp_if_available +log = logging.getLogger(__name__) if HYDRA_AVAILABLE: from hydra.core.hydra_config import HydraConfig from hydra.utils import get_original_cwd, to_absolute_path diff --git a/pytorch_lightning/accelerators/ddp_spawn_accelerator.py b/pytorch_lightning/accelerators/ddp_spawn_accelerator.py index 7a39fc9d822d0..a273579cb14f9 100644 --- a/pytorch_lightning/accelerators/ddp_spawn_accelerator.py +++ b/pytorch_lightning/accelerators/ddp_spawn_accelerator.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License +import logging import os import re from typing import Any, List, Optional, Union @@ -20,8 +21,6 @@ import torch.multiprocessing as mp from torch.nn.parallel import DistributedDataParallel -import logging -log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core.lightning import LightningModule @@ -40,6 +39,7 @@ ) from pytorch_lightning.utilities.seed import seed_everything +log = logging.getLogger(__name__) if HYDRA_AVAILABLE: from hydra.core.hydra_config import HydraConfig from hydra.utils import get_original_cwd, to_absolute_path diff --git a/pytorch_lightning/accelerators/tpu_accelerator.py b/pytorch_lightning/accelerators/tpu_accelerator.py index 5e5e9bd7e49fc..1c3ebf1f76289 100644 --- a/pytorch_lightning/accelerators/tpu_accelerator.py +++ b/pytorch_lightning/accelerators/tpu_accelerator.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import io +import logging import os import re from typing import Any, Callable, Optional, Union @@ -20,8 +21,6 @@ import torch.multiprocessing as mp from torch.optim import Optimizer -import logging -log = logging.getLogger(__name__) from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp from pytorch_lightning.cluster_environments import ClusterEnvironment from pytorch_lightning.core import LightningModule @@ -36,6 +35,7 @@ from pytorch_lightning.utilities.cloud_io import atomic_save from pytorch_lightning.utilities.exceptions import MisconfigurationException +log = logging.getLogger(__name__) if TPU_AVAILABLE: import torch_xla import torch_xla.core.xla_model as xm diff --git a/pytorch_lightning/callbacks/model_checkpoint.py b/pytorch_lightning/callbacks/model_checkpoint.py index 0f9e5fb3af876..6f10bc8bb63b8 100644 --- a/pytorch_lightning/callbacks/model_checkpoint.py +++ b/pytorch_lightning/callbacks/model_checkpoint.py @@ -19,7 +19,7 @@ Automatically save model checkpoints during training. """ - +import logging import numbers import os import re @@ -31,14 +31,14 @@ import torch import yaml -import logging -log = logging.getLogger(__name__) from pytorch_lightning.callbacks.base import Callback from pytorch_lightning.metrics.metric import Metric from pytorch_lightning.utilities import rank_zero_info, rank_zero_only, rank_zero_warn from pytorch_lightning.utilities.cloud_io import get_filesystem from pytorch_lightning.utilities.exceptions import MisconfigurationException +log = logging.getLogger(__name__) + class ModelCheckpoint(Callback): r""" diff --git a/pytorch_lightning/cluster_environments/slurm_environment.py b/pytorch_lightning/cluster_environments/slurm_environment.py index eabf5f11ce8f1..9bb19745ac50b 100644 --- a/pytorch_lightning/cluster_environments/slurm_environment.py +++ b/pytorch_lightning/cluster_environments/slurm_environment.py @@ -11,13 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import logging import os import re -import logging -log = logging.getLogger(__name__) + from pytorch_lightning.cluster_environments.cluster_environment import ClusterEnvironment +log = logging.getLogger(__name__) + class SLURMEnvironment(ClusterEnvironment): diff --git a/pytorch_lightning/cluster_environments/torchelastic_environment.py b/pytorch_lightning/cluster_environments/torchelastic_environment.py index b3c70d62548b6..712c6c3e36661 100644 --- a/pytorch_lightning/cluster_environments/torchelastic_environment.py +++ b/pytorch_lightning/cluster_environments/torchelastic_environment.py @@ -11,13 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -import os import logging -log = logging.getLogger(__name__) +import os + from pytorch_lightning.utilities import rank_zero_warn from pytorch_lightning.cluster_environments.cluster_environment import ClusterEnvironment +log = logging.getLogger(__name__) + class TorchElasticEnvironment(ClusterEnvironment): diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 4dc80645984b7..1ae2bd04090d1 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -17,6 +17,7 @@ import collections import copy import inspect +import logging import os import re import tempfile @@ -30,8 +31,6 @@ from torch.nn import Module from torch.optim.optimizer import Optimizer -import logging -log = logging.getLogger(__name__) from pytorch_lightning.core.grads import GradInformation from pytorch_lightning.core.hooks import CheckpointHooks, DataHooks, ModelHooks from pytorch_lightning.core.memory import ModelSummary @@ -43,6 +42,7 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.parsing import AttributeDict, collect_init_args, get_init_args +log = logging.getLogger(__name__) if TPU_AVAILABLE: import torch_xla.core.xla_model as xm diff --git a/pytorch_lightning/core/saving.py b/pytorch_lightning/core/saving.py index d88243a8f1bf9..3a2f58d1ccf10 100644 --- a/pytorch_lightning/core/saving.py +++ b/pytorch_lightning/core/saving.py @@ -15,6 +15,7 @@ import ast import csv import inspect +import logging import os from argparse import Namespace from copy import deepcopy @@ -25,14 +26,13 @@ import torch import yaml -import logging -log = logging.getLogger(__name__) from pytorch_lightning.utilities import AttributeDict, OMEGACONF_AVAILABLE, rank_zero_warn from pytorch_lightning.utilities.apply_func import apply_to_collection from pytorch_lightning.utilities.cloud_io import get_filesystem from pytorch_lightning.utilities.cloud_io import load as pl_load from pytorch_lightning.utilities.parsing import parse_class_init_keys +log = logging.getLogger(__name__) PRIMITIVE_TYPES = (bool, int, float, str) ALLOWED_CONFIG_TYPES = (AttributeDict, MutableMapping, Namespace) diff --git a/pytorch_lightning/loggers/comet.py b/pytorch_lightning/loggers/comet.py index 6c85d093e79d4..2d23293e881b5 100644 --- a/pytorch_lightning/loggers/comet.py +++ b/pytorch_lightning/loggers/comet.py @@ -17,6 +17,7 @@ ------------ """ +import logging import os from argparse import Namespace from typing import Any, Dict, Optional, Union @@ -24,12 +25,11 @@ import torch from torch import is_tensor -import logging -log = logging.getLogger(__name__) from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities import rank_zero_only, _module_available from pytorch_lightning.utilities.exceptions import MisconfigurationException +log = logging.getLogger(__name__) _COMET_AVAILABLE = _module_available("comet_ml") if _COMET_AVAILABLE: diff --git a/pytorch_lightning/loggers/csv_logs.py b/pytorch_lightning/loggers/csv_logs.py index 79bbf102d6f0a..92190161dbf6a 100644 --- a/pytorch_lightning/loggers/csv_logs.py +++ b/pytorch_lightning/loggers/csv_logs.py @@ -21,18 +21,19 @@ """ import csv import io +import logging import os from argparse import Namespace from typing import Any, Dict, Optional, Union import torch -import logging -log = logging.getLogger(__name__) from pytorch_lightning.core.saving import save_hparams_to_yaml from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities.distributed import rank_zero_only, rank_zero_warn +log = logging.getLogger(__name__) + class ExperimentWriter(object): r""" diff --git a/pytorch_lightning/loggers/mlflow.py b/pytorch_lightning/loggers/mlflow.py index 5159c0111e726..c53ed2c9b3ccc 100644 --- a/pytorch_lightning/loggers/mlflow.py +++ b/pytorch_lightning/loggers/mlflow.py @@ -16,21 +16,17 @@ MLflow Logger ------------- """ +import logging import re from argparse import Namespace from time import time from typing import Any, Dict, Optional, Union - -import logging -log = logging.getLogger(__name__) from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities import rank_zero_only, rank_zero_warn, _module_available - +log = logging.getLogger(__name__) LOCAL_FILE_URI_PREFIX = "file:" - - _MLFLOW_AVAILABLE = _module_available("mlflow") try: import mlflow diff --git a/pytorch_lightning/loggers/neptune.py b/pytorch_lightning/loggers/neptune.py index 2a8eea2c0ef1a..bc94fae848738 100644 --- a/pytorch_lightning/loggers/neptune.py +++ b/pytorch_lightning/loggers/neptune.py @@ -17,16 +17,16 @@ -------------- """ from argparse import Namespace +import logging from typing import Any, Dict, Iterable, Optional, Union import torch from torch import is_tensor -import logging -log = logging.getLogger(__name__) from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities import rank_zero_only, _module_available +log = logging.getLogger(__name__) _NEPTUNE_AVAILABLE = _module_available("neptune") if _NEPTUNE_AVAILABLE: diff --git a/pytorch_lightning/loggers/tensorboard.py b/pytorch_lightning/loggers/tensorboard.py index 8a3984c49f445..fe55b92567a68 100644 --- a/pytorch_lightning/loggers/tensorboard.py +++ b/pytorch_lightning/loggers/tensorboard.py @@ -17,6 +17,7 @@ ------------------ """ +import logging import os from argparse import Namespace from typing import Any, Dict, Optional, Union @@ -25,14 +26,14 @@ from torch.utils.tensorboard import SummaryWriter from torch.utils.tensorboard.summary import hparams -import logging -log = logging.getLogger(__name__) from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.core.saving import save_hparams_to_yaml from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities import rank_zero_only, rank_zero_warn, OMEGACONF_AVAILABLE from pytorch_lightning.utilities.cloud_io import get_filesystem +log = logging.getLogger(__name__) + if OMEGACONF_AVAILABLE: from omegaconf import Container, OmegaConf diff --git a/pytorch_lightning/plugins/ddp_plugin.py b/pytorch_lightning/plugins/ddp_plugin.py index 2ff43efc7b82e..57671a0c57d37 100644 --- a/pytorch_lightning/plugins/ddp_plugin.py +++ b/pytorch_lightning/plugins/ddp_plugin.py @@ -1,3 +1,4 @@ +import logging import os from contextlib import contextmanager from typing import Any, Dict, List, Union @@ -6,12 +7,12 @@ import torch.distributed as torch_distrib from torch.optim import Optimizer -import logging -log = logging.getLogger(__name__) from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel from pytorch_lightning.plugins.plugin import LightningPlugin +log = logging.getLogger(__name__) + class DDPPlugin(LightningPlugin): """ diff --git a/pytorch_lightning/plugins/ddp_sequential_plugin.py b/pytorch_lightning/plugins/ddp_sequential_plugin.py index c95bda5c97b52..39fcce9cb44cd 100644 --- a/pytorch_lightning/plugins/ddp_sequential_plugin.py +++ b/pytorch_lightning/plugins/ddp_sequential_plugin.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License +import logging import os from typing import Any, List, Optional @@ -19,14 +20,14 @@ import torch.distributed as torch_distrib from torch.nn.parallel import DistributedDataParallel -import logging -log = logging.getLogger(__name__) from pytorch_lightning import LightningModule from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel from pytorch_lightning.plugins.rpc_plugin import RPCPlugin from pytorch_lightning.utilities import FAIRSCALE_PIPE_AVAILABLE, rank_zero_only from pytorch_lightning.utilities.exceptions import MisconfigurationException +log = logging.getLogger(__name__) + if FAIRSCALE_PIPE_AVAILABLE: from fairscale.nn import PipeRPCWrapper import fairscale.nn.model_parallel as mpu diff --git a/pytorch_lightning/profiler/profilers.py b/pytorch_lightning/profiler/profilers.py index 952da20cba2d5..fc3afa8c0a993 100644 --- a/pytorch_lightning/profiler/profilers.py +++ b/pytorch_lightning/profiler/profilers.py @@ -16,6 +16,7 @@ import cProfile import io +import logging import os import pstats import time @@ -27,10 +28,10 @@ import fsspec import numpy as np -import logging -log = logging.getLogger(__name__) from pytorch_lightning.utilities.cloud_io import get_filesystem +log = logging.getLogger(__name__) + class BaseProfiler(ABC): """ From 8fb4135d5a9c051b0aa9397bedd28bf2e95045c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 18 Jan 2021 02:31:58 +0100 Subject: [PATCH 08/18] test --- tests/test_profiler.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_profiler.py b/tests/test_profiler.py index 4728b11582dfc..ec9834458ae60 100644 --- a/tests/test_profiler.py +++ b/tests/test_profiler.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import logging import os import time from pathlib import Path @@ -99,7 +99,8 @@ def test_simple_profiler_overhead(simple_profiler, n_iter=5): def test_simple_profiler_describe(caplog, simple_profiler): """Ensure the profiler won't fail when reporting the summary.""" - simple_profiler.describe() + with caplog.at_level(logging.INFO): + simple_profiler.describe() assert "Profiler Report" in caplog.text From 7ae8eabed285e9e7be84043ab63c76b3896da803 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 18 Jan 2021 02:33:52 +0100 Subject: [PATCH 09/18] fix test --- tests/checkpointing/test_model_checkpoint.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/checkpointing/test_model_checkpoint.py b/tests/checkpointing/test_model_checkpoint.py index 3de26ef1a6fb6..f2a01ee8e8b71 100644 --- a/tests/checkpointing/test_model_checkpoint.py +++ b/tests/checkpointing/test_model_checkpoint.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging import os import pickle import platform @@ -570,7 +571,8 @@ def test_model_checkpoint_save_last_warning(tmpdir, caplog, max_epochs, should_v save_top_k=0, save_last=save_last)], max_epochs=max_epochs, ) - trainer.fit(model) + with caplog.at_level(logging.INFO): + trainer.fit(model) assert caplog.messages.count('Saving latest checkpoint...') == save_last From 658c5c1dd3c725134d0ea3ddefb6186aa79f7ba0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 18 Jan 2021 02:41:17 +0100 Subject: [PATCH 10/18] flake8 --- pl_examples/domain_templates/computer_vision_fine_tuning.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pl_examples/domain_templates/computer_vision_fine_tuning.py b/pl_examples/domain_templates/computer_vision_fine_tuning.py index 92a95ce60c53e..4425457f09f55 100644 --- a/pl_examples/domain_templates/computer_vision_fine_tuning.py +++ b/pl_examples/domain_templates/computer_vision_fine_tuning.py @@ -41,6 +41,7 @@ import argparse from collections import OrderedDict +import logging from pathlib import Path from tempfile import TemporaryDirectory from typing import Generator, Optional, Union @@ -58,9 +59,8 @@ import pytorch_lightning as pl from pl_examples import cli_lightning_logo -import logging -log = logging.getLogger(__name__) +log = logging.getLogger(__name__) BN_TYPES = (torch.nn.BatchNorm1d, torch.nn.BatchNorm2d, torch.nn.BatchNorm3d) DATA_URL = "https://storage.googleapis.com/mledu-datasets/cats_and_dogs_filtered.zip" From 1e52a14c53c1f5c30e71e385013f421ac8b8bc75 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 18 Jan 2021 02:44:05 +0100 Subject: [PATCH 11/18] import warning --- pytorch_lightning/__init__.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index 01e7e2c12c772..300bf41e06d7d 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -55,9 +55,7 @@ except NameError: __LIGHTNING_SETUP__: bool = False -if __LIGHTNING_SETUP__: - import sys # pragma: no-cover - +if __LIGHTNING_SETUP__: # pragma: no-cover sys.stdout.write(f'Partial import of `{__name__}` during the build process.\n') # pragma: no-cover # We are not importing the rest of the lightning during the build process, as it may not be compiled yet else: From 3013b2c95a2c8cbea886b8f6fd3a0c2e29efed27 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 18 Jan 2021 02:47:57 +0100 Subject: [PATCH 12/18] add changelog --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7bc448667a86e..f12100abb2006 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,6 +27,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed `reinit_scheduler_properties` with correct optimizer ([#5519](https://github.com/PyTorchLightning/pytorch-lightning/pull/5519)) +- Fixed duplicate logs appearing in console when using the python logging module ([#5509](https://github.com/PyTorchLightning/pytorch-lightning/pull/5509)) + + ## [1.1.4] - 2021-01-12 ### Added From 929f3d56d68f9739a720ce8b91f4f4979b42b21a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 18 Jan 2021 02:52:01 +0100 Subject: [PATCH 13/18] isort --- pl_examples/domain_templates/computer_vision_fine_tuning.py | 2 +- pytorch_lightning/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pl_examples/domain_templates/computer_vision_fine_tuning.py b/pl_examples/domain_templates/computer_vision_fine_tuning.py index 4425457f09f55..41e014c644756 100644 --- a/pl_examples/domain_templates/computer_vision_fine_tuning.py +++ b/pl_examples/domain_templates/computer_vision_fine_tuning.py @@ -40,8 +40,8 @@ """ import argparse -from collections import OrderedDict import logging +from collections import OrderedDict from pathlib import Path from tempfile import TemporaryDirectory from typing import Generator, Optional, Union diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index 300bf41e06d7d..43aa3a015b1a0 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -2,8 +2,8 @@ import logging as python_logging import os -import time import sys +import time _this_year = time.strftime("%Y") __version__ = '1.1.4' From cb4f34102255df2d53cd718b908329334e7575f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 18 Jan 2021 13:09:05 +0100 Subject: [PATCH 14/18] stderr --- pytorch_lightning/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index 43aa3a015b1a0..02fe9d6ecfd66 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -39,7 +39,7 @@ - https://pytorch-lightning.readthedocs.io/en/stable """ python_logging.basicConfig( - stream=sys.stdout, + stream=sys.stderr, format="%(message)s", level=python_logging.INFO, ) From b6b933a56fb53ed4cac3a2253f5a090530e49d62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Thu, 21 Jan 2021 03:16:53 +0100 Subject: [PATCH 15/18] avoid the global settings --- pytorch_lightning/__init__.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index 02fe9d6ecfd66..c94fd2d084cc7 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -38,11 +38,10 @@ - https://pytorch-lightning.readthedocs.io/en/latest - https://pytorch-lightning.readthedocs.io/en/stable """ -python_logging.basicConfig( - stream=sys.stderr, - format="%(message)s", - level=python_logging.INFO, -) +_logger = python_logging.getLogger("pytorch_lightning") +_logger.addHandler(python_logging.StreamHandler()) +_logger.setLevel(python_logging.INFO) +_logger.propagate = False PACKAGE_ROOT = os.path.dirname(__file__) PROJECT_ROOT = os.path.dirname(PACKAGE_ROOT) From 0e24eada56d62f54bb28b3323a96f320857d91a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Thu, 21 Jan 2021 03:17:42 +0100 Subject: [PATCH 16/18] lightning --- pytorch_lightning/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index c94fd2d084cc7..aa7e5cbba4301 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -38,7 +38,7 @@ - https://pytorch-lightning.readthedocs.io/en/latest - https://pytorch-lightning.readthedocs.io/en/stable """ -_logger = python_logging.getLogger("pytorch_lightning") +_logger = python_logging.getLogger(__name__) _logger.addHandler(python_logging.StreamHandler()) _logger.setLevel(python_logging.INFO) _logger.propagate = False From 42bfd40a3ed9a0f1e1afd315ff73f086c4fb1bfc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Sun, 24 Jan 2021 07:28:51 +0100 Subject: [PATCH 17/18] logging with root logger in tests --- pytorch_lightning/__init__.py | 9 +++++++-- tests/__init__.py | 4 ++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index aa7e5cbba4301..4b5137277f3e8 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -38,10 +38,15 @@ - https://pytorch-lightning.readthedocs.io/en/latest - https://pytorch-lightning.readthedocs.io/en/stable """ +_root_logger = python_logging.getLogger() _logger = python_logging.getLogger(__name__) -_logger.addHandler(python_logging.StreamHandler()) _logger.setLevel(python_logging.INFO) -_logger.propagate = False + +# if root logger has handlers, propagate messages up and let root logger process them +if not _root_logger.hasHandlers(): + _logger.addHandler(python_logging.StreamHandler()) + _logger.propagate = False + PACKAGE_ROOT = os.path.dirname(__file__) PROJECT_ROOT = os.path.dirname(PACKAGE_ROOT) diff --git a/tests/__init__.py b/tests/__init__.py index b4a7291dfec66..e035d738ce2be 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging import os import numpy as np @@ -31,3 +32,6 @@ if not os.path.isdir(TEMP_PATH): os.mkdir(TEMP_PATH) + + +logging.basicConfig(level=logging.ERROR) From aa9acca9429bd59097b737ab33a2616db596366f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Sun, 24 Jan 2021 10:01:10 +0100 Subject: [PATCH 18/18] consistent name --- pytorch_lightning/__init__.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index 45ce08db01994..7bf73f8091765 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -1,6 +1,6 @@ """Root package info.""" -import logging as python_logging +import logging import os import sys import time @@ -38,13 +38,13 @@ - https://pytorch-lightning.readthedocs.io/en/latest - https://pytorch-lightning.readthedocs.io/en/stable """ -_root_logger = python_logging.getLogger() -_logger = python_logging.getLogger(__name__) -_logger.setLevel(python_logging.INFO) +_root_logger = logging.getLogger() +_logger = logging.getLogger(__name__) +_logger.setLevel(logging.INFO) # if root logger has handlers, propagate messages up and let root logger process them if not _root_logger.hasHandlers(): - _logger.addHandler(python_logging.StreamHandler()) + _logger.addHandler(logging.StreamHandler()) _logger.propagate = False