Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Removed the deprecated `terminate_on_nan` argument from the `Trainer` constructor ([#12553](https://github.com/PyTorchLightning/pytorch-lightning/pull/12553))


- Removed the deprecated `XLAStatsMonitor` callback ([#12688](https://github.com/PyTorchLightning/pytorch-lightning/pull/12688))


- Remove deprecated `pytorch_lightning.callbacks.progress.progress` ([#12658](https://github.com/PyTorchLightning/pytorch-lightning/pull/12658))


Expand Down
1 change: 0 additions & 1 deletion docs/source/api_references.rst
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,6 @@ Callbacks API
StochasticWeightAveraging
Timer
TQDMProgressBar
XLAStatsMonitor

Loggers API
-----------
Expand Down
1 change: 0 additions & 1 deletion docs/source/extensions/callbacks.rst
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,6 @@ Lightning has a few built-in callbacks.
StochasticWeightAveraging
Timer
TQDMProgressBar
XLAStatsMonitor

----------

Expand Down
2 changes: 0 additions & 2 deletions pytorch_lightning/callbacks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,15 +27,13 @@
from pytorch_lightning.callbacks.rich_model_summary import RichModelSummary
from pytorch_lightning.callbacks.stochastic_weight_avg import StochasticWeightAveraging
from pytorch_lightning.callbacks.timer import Timer
from pytorch_lightning.callbacks.xla_stats_monitor import XLAStatsMonitor

__all__ = [
"BackboneFinetuning",
"BaseFinetuning",
"Callback",
"DeviceStatsMonitor",
"EarlyStopping",
"XLAStatsMonitor",
"GradientAccumulationScheduler",
"LambdaCallback",
"LearningRateMonitor",
Expand Down
114 changes: 0 additions & 114 deletions pytorch_lightning/callbacks/xla_stats_monitor.py

This file was deleted.

70 changes: 0 additions & 70 deletions tests/callbacks/test_xla_stats_monitor.py

This file was deleted.

23 changes: 0 additions & 23 deletions tests/deprecated_api/test_remove_1-7.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,8 @@
import pytest
import torch

import pytorch_lightning
from pytorch_lightning import Callback, LightningDataModule, Trainer
from pytorch_lightning.callbacks.lr_monitor import LearningRateMonitor
from pytorch_lightning.callbacks.xla_stats_monitor import XLAStatsMonitor
from pytorch_lightning.loggers import LoggerCollection, TestTubeLogger
from pytorch_lightning.overrides.distributed import IndexBatchSamplerWrapper
from pytorch_lightning.plugins.environments import (
Expand All @@ -34,7 +32,6 @@
TorchElasticEnvironment,
)
from pytorch_lightning.strategies import SingleDeviceStrategy
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from tests.deprecated_api import _soft_unimport_module
from tests.helpers import BoringModel
from tests.helpers.datamodules import MNISTDataModule
Expand Down Expand Up @@ -310,12 +307,6 @@ def test_v1_7_0_deprecated_slurm_job_id():
trainer.slurm_job_id


def test_v1_7_0_deprecate_xla_stats_monitor(monkeypatch):
monkeypatch.setattr(pytorch_lightning.callbacks.xla_stats_monitor, "_TPU_AVAILABLE", True)
with pytest.deprecated_call(match="The `XLAStatsMonitor` callback was deprecated in v1.5"):
_ = XLAStatsMonitor()


def test_v1_7_0_deprecated_max_steps_none(tmpdir):
with pytest.deprecated_call(match="`max_steps = None` is deprecated in v1.5"):
_ = Trainer(max_steps=None)
Expand Down Expand Up @@ -428,17 +419,3 @@ def post_dispatch(self, trainer):

with pytest.deprecated_call(match=escape("`CustomPlugin.post_dispatch()` has been deprecated in v1.6")):
CustomPlugin(torch.device("cpu"))


def test_xla_stats_monitor_tpu_not_used(monkeypatch):
monkeypatch.setattr(pytorch_lightning.callbacks.xla_stats_monitor, "_TPU_AVAILABLE", True)
with pytest.deprecated_call(match="The `XLAStatsMonitor` callback was deprecated in v1.5"):
xla_stats = XLAStatsMonitor()

trainer = Trainer(accelerator="cpu", callbacks=[xla_stats])
model = BoringModel()
with pytest.raises(
MisconfigurationException,
match="You are using XLAStatsMonitor but are not running on TPU. The accelerator is set to CPUAccelerator.",
):
trainer.fit(model)