diff --git a/pyproject.toml b/pyproject.toml index 989e63122f640..5838495239725 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,8 +58,6 @@ module = [ "pytorch_lightning.core.saving", "pytorch_lightning.demos.boring_classes", "pytorch_lightning.demos.mnist_datamodule", - "pytorch_lightning.loggers.comet", - "pytorch_lightning.loggers.mlflow", "pytorch_lightning.loggers.neptune", "pytorch_lightning.loggers.tensorboard", "pytorch_lightning.loggers.wandb", diff --git a/src/pytorch_lightning/loggers/comet.py b/src/pytorch_lightning/loggers/comet.py index 2b853f59259ff..dc9a2d3364408 100644 --- a/src/pytorch_lightning/loggers/comet.py +++ b/src/pytorch_lightning/loggers/comet.py @@ -141,7 +141,7 @@ def __init__( prefix: str = "", agg_key_funcs: Optional[Mapping[str, Callable[[Sequence[float]], float]]] = None, agg_default_func: Optional[Callable[[Sequence[float]], float]] = None, - **kwargs, + **kwargs: Any, ): if comet_ml is None: raise ModuleNotFoundError( @@ -149,6 +149,8 @@ def __init__( ) super().__init__(agg_key_funcs=agg_key_funcs, agg_default_func=agg_default_func) self._experiment = None + self._save_dir: Optional[str] + self.rest_api_key: Optional[str] # Determine online or offline mode based on which arguments were passed to CometLogger api_key = api_key or comet_ml.config.get_api_key(None, comet_ml.config.get_config()) @@ -170,12 +172,12 @@ def __init__( log.info(f"CometLogger will be initialized in {self.mode} mode") - self._project_name = project_name - self._experiment_key = experiment_key - self._experiment_name = experiment_name - self._prefix = prefix - self._kwargs = kwargs - self._future_experiment_key = None + self._project_name: Optional[str] = project_name + self._experiment_key: Optional[str] = experiment_key + self._experiment_name: Optional[str] = experiment_name + self._prefix: str = prefix + self._kwargs: Any = kwargs + self._future_experiment_key: Optional[str] = None if rest_api_key is not None: # Comet.ml rest API, used to determine version number @@ -187,7 +189,7 @@ def __init__( self._kwargs = kwargs - @property + @property # type: ignore[misc] @rank_zero_experiment def experiment(self) -> Union[CometExperiment, CometExistingExperiment, CometOfflineExperiment]: r""" @@ -246,13 +248,13 @@ def log_metrics(self, metrics: Dict[str, Union[Tensor, float]], step: Optional[i metrics_without_epoch = metrics.copy() for key, val in metrics_without_epoch.items(): if is_tensor(val): - metrics_without_epoch[key] = val.cpu().detach() + metrics_without_epoch[key] = val.cpu().detach() # type: ignore[union-attr] epoch = metrics_without_epoch.pop("epoch", None) metrics_without_epoch = _add_prefix(metrics_without_epoch, self._prefix, self.LOGGER_JOIN_CHAR) self.experiment.log_metrics(metrics_without_epoch, step=step, epoch=epoch) - def reset_experiment(self): + def reset_experiment(self) -> None: self._experiment = None @rank_zero_only @@ -326,7 +328,7 @@ def version(self) -> str: return self._future_experiment_key - def __getstate__(self): + def __getstate__(self): # type: ignore[no-untyped-def] state = self.__dict__.copy() # Save the experiment id in case an experiment object already exists, @@ -340,6 +342,6 @@ def __getstate__(self): state["_experiment"] = None return state - def log_graph(self, model: "pl.LightningModule", input_array=None) -> None: + def log_graph(self, model: "pl.LightningModule", input_array: Optional[Tensor] = None) -> None: if self._experiment is not None: self._experiment.set_model_graph(model) diff --git a/src/pytorch_lightning/loggers/csv_logs.py b/src/pytorch_lightning/loggers/csv_logs.py index 72d21ae2c4974..45d5fffb51e33 100644 --- a/src/pytorch_lightning/loggers/csv_logs.py +++ b/src/pytorch_lightning/loggers/csv_logs.py @@ -195,7 +195,7 @@ def log_hyperparams(self, params: Union[Dict[str, Any], Namespace]) -> None: self.experiment.log_hparams(params) @rank_zero_only - def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> None: + def log_metrics(self, metrics: Dict[str, Union[Tensor, float]], step: Optional[int] = None) -> None: metrics = _add_prefix(metrics, self._prefix, self.LOGGER_JOIN_CHAR) self.experiment.log_metrics(metrics, step) if step is not None and (step + 1) % self._flush_logs_every_n_steps == 0: diff --git a/src/pytorch_lightning/utilities/logger.py b/src/pytorch_lightning/utilities/logger.py index 07ecf4c3c0ca0..7e404482bea24 100644 --- a/src/pytorch_lightning/utilities/logger.py +++ b/src/pytorch_lightning/utilities/logger.py @@ -132,7 +132,9 @@ def _sanitize_params(params: Dict[str, Any]) -> Dict[str, Any]: return params -def _add_prefix(metrics: Dict[str, float], prefix: str, separator: str) -> Dict[str, float]: +def _add_prefix( + metrics: Dict[str, Union[Tensor, float]], prefix: str, separator: str +) -> Dict[str, Union[Tensor, float]]: """Insert prefix before each key in a dict, separated by the separator. Args: