Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ module = [
"pytorch_lightning.core.saving",
"pytorch_lightning.demos.boring_classes",
"pytorch_lightning.demos.mnist_datamodule",
"pytorch_lightning.loggers.comet",
"pytorch_lightning.loggers.neptune",
"pytorch_lightning.profilers.base",
"pytorch_lightning.profilers.pytorch",
Expand Down
32 changes: 16 additions & 16 deletions src/pytorch_lightning/loggers/comet.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from argparse import Namespace
from typing import Any, Callable, Dict, Mapping, Optional, Sequence, Union

from torch import is_tensor, Tensor
from torch import Tensor

import pytorch_lightning as pl
from pytorch_lightning.loggers.logger import Logger, rank_zero_experiment
Expand Down Expand Up @@ -141,14 +141,16 @@ def __init__(
prefix: str = "",
agg_key_funcs: Optional[Mapping[str, Callable[[Sequence[float]], float]]] = None,
agg_default_func: Optional[Callable[[Sequence[float]], float]] = None,
**kwargs,
**kwargs: Any,
):
if comet_ml is None:
raise ModuleNotFoundError(
"You want to use `comet_ml` logger which is not installed yet, install it with `pip install comet-ml`."
)
super().__init__(agg_key_funcs=agg_key_funcs, agg_default_func=agg_default_func)
self._experiment = None
self._save_dir: Optional[str]
self.rest_api_key: Optional[str]

# Determine online or offline mode based on which arguments were passed to CometLogger
api_key = api_key or comet_ml.config.get_api_key(None, comet_ml.config.get_config())
Expand All @@ -170,12 +172,12 @@ def __init__(

log.info(f"CometLogger will be initialized in {self.mode} mode")

self._project_name = project_name
self._experiment_key = experiment_key
self._experiment_name = experiment_name
self._prefix = prefix
self._kwargs = kwargs
self._future_experiment_key = None
self._project_name: Optional[str] = project_name
self._experiment_key: Optional[str] = experiment_key
self._experiment_name: Optional[str] = experiment_name
self._prefix: str = prefix
self._kwargs: Any = kwargs
self._future_experiment_key: Optional[str] = None

if rest_api_key is not None:
# Comet.ml rest API, used to determine version number
Expand All @@ -185,9 +187,7 @@ def __init__(
self.rest_api_key = None
self.comet_api = None

self._kwargs = kwargs

@property
@property # type: ignore[misc]
@rank_zero_experiment
def experiment(self) -> Union[CometExperiment, CometExistingExperiment, CometOfflineExperiment]:
r"""
Expand Down Expand Up @@ -240,19 +240,19 @@ def log_hyperparams(self, params: Union[Dict[str, Any], Namespace]) -> None:
self.experiment.log_parameters(params)

@rank_zero_only
def log_metrics(self, metrics: Dict[str, Union[Tensor, float]], step: Optional[int] = None) -> None:
def log_metrics(self, metrics: Mapping[str, Union[Tensor, float]], step: Optional[int] = None) -> None:
assert rank_zero_only.rank == 0, "experiment tried to log from global_rank != 0"
# Comet.ml expects metrics to be a dictionary of detached tensors on CPU
metrics_without_epoch = metrics.copy()
for key, val in metrics_without_epoch.items():
if is_tensor(val):
if isinstance(val, Tensor):
metrics_without_epoch[key] = val.cpu().detach()

epoch = metrics_without_epoch.pop("epoch", None)
metrics_without_epoch = _add_prefix(metrics_without_epoch, self._prefix, self.LOGGER_JOIN_CHAR)
self.experiment.log_metrics(metrics_without_epoch, step=step, epoch=epoch)

def reset_experiment(self):
def reset_experiment(self) -> None:
self._experiment = None

@rank_zero_only
Expand Down Expand Up @@ -326,7 +326,7 @@ def version(self) -> str:

return self._future_experiment_key

def __getstate__(self):
def __getstate__(self) -> Dict[str, Any]:
state = self.__dict__.copy()

# Save the experiment id in case an experiment object already exists,
Expand All @@ -340,6 +340,6 @@ def __getstate__(self):
state["_experiment"] = None
return state

def log_graph(self, model: "pl.LightningModule", input_array=None) -> None:
def log_graph(self, model: "pl.LightningModule", input_array: Optional[Tensor] = None) -> None:
if self._experiment is not None:
self._experiment.set_model_graph(model)
2 changes: 1 addition & 1 deletion src/pytorch_lightning/loggers/csv_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ def log_hyperparams(self, params: Union[Dict[str, Any], Namespace]) -> None:
self.experiment.log_hparams(params)

@rank_zero_only
def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> None:
def log_metrics(self, metrics: Dict[str, Union[Tensor, float]], step: Optional[int] = None) -> None:
metrics = _add_prefix(metrics, self._prefix, self.LOGGER_JOIN_CHAR)
self.experiment.log_metrics(metrics, step)
if step is not None and (step + 1) % self._flush_logs_every_n_steps == 0:
Expand Down
4 changes: 2 additions & 2 deletions src/pytorch_lightning/loggers/mlflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import re
from argparse import Namespace
from time import time
from typing import Any, Dict, Optional, Union
from typing import Any, Dict, Mapping, Optional, Union

from pytorch_lightning.loggers.logger import Logger, rank_zero_experiment
from pytorch_lightning.utilities.imports import _module_available
Expand Down Expand Up @@ -230,7 +230,7 @@ def log_hyperparams(self, params: Union[Dict[str, Any], Namespace]) -> None:
self.experiment.log_param(self.run_id, k, v)

@rank_zero_only
def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> None:
def log_metrics(self, metrics: Mapping[str, float], step: Optional[int] = None) -> None:
assert rank_zero_only.rank == 0, "experiment tried to log from global_rank != 0"

metrics = _add_prefix(metrics, self._prefix, self.LOGGER_JOIN_CHAR)
Expand Down
2 changes: 1 addition & 1 deletion src/pytorch_lightning/loggers/tensorboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ def log_hyperparams(
writer.add_summary(sei)

@rank_zero_only
def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> None:
def log_metrics(self, metrics: Mapping[str, float], step: Optional[int] = None) -> None:
assert rank_zero_only.rank == 0, "experiment tried to log from global_rank != 0"

metrics = _add_prefix(metrics, self._prefix, self.LOGGER_JOIN_CHAR)
Expand Down
2 changes: 1 addition & 1 deletion src/pytorch_lightning/loggers/wandb.py
Original file line number Diff line number Diff line change
Expand Up @@ -379,7 +379,7 @@ def log_hyperparams(self, params: Union[Dict[str, Any], Namespace]) -> None:
self.experiment.config.update(params, allow_val_change=True)

@rank_zero_only
def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> None:
def log_metrics(self, metrics: Mapping[str, float], step: Optional[int] = None) -> None:
assert rank_zero_only.rank == 0, "experiment tried to log from global_rank != 0"

metrics = _add_prefix(metrics, self._prefix, self.LOGGER_JOIN_CHAR)
Expand Down
6 changes: 4 additions & 2 deletions src/pytorch_lightning/utilities/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
"""Utilities for loggers."""

from argparse import Namespace
from typing import Any, Dict, Generator, List, MutableMapping, Optional, Union
from typing import Any, Dict, Generator, List, Mapping, MutableMapping, Optional, Union

import numpy as np
import torch
Expand Down Expand Up @@ -132,7 +132,9 @@ def _sanitize_params(params: Dict[str, Any]) -> Dict[str, Any]:
return params


def _add_prefix(metrics: Dict[str, float], prefix: str, separator: str) -> Dict[str, float]:
def _add_prefix(
metrics: Mapping[str, Union[Tensor, float]], prefix: str, separator: str
) -> Mapping[str, Union[Tensor, float]]:
"""Insert prefix before each key in a dict, separated by the separator.

Args:
Expand Down