Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions pytorch_lightning/core/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,10 @@
from pytorch_lightning.utilities import rank_zero_deprecation

rank_zero_deprecation(
"`pytorch_lightning.core.memory.get_memory_profile` and"
" `pytorch_lightning.core.memory.get_gpu_memory_map` have been moved"
" to `pytorch_lightning.utilities.memory` since v1.5 and will be removed in v1.7."
"`pytorch_lightning.core.memory.LayerSummary` and"
" `pytorch_lightning.core.memory.ModelSummary` have been moved"
" to `pytorch_lightning.utilities.model_summary` since v1.5 and will be removed in v1.7."
)

# To support backward compatibility as get_memory_profile and get_gpu_memory_map have been moved
from pytorch_lightning.utilities.memory import get_gpu_memory_map, get_memory_profile # noqa: E402, F401 # isort: skip
# To support backward compatibility as LayerSummary and ModelSummary have been moved
from pytorch_lightning.utilities.model_summary import LayerSummary, ModelSummary # noqa: E402, F401 # isort: skip
33 changes: 0 additions & 33 deletions pytorch_lightning/utilities/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,39 +96,6 @@ def garbage_collection_cuda() -> None:
raise


def get_memory_profile(mode: str) -> Dict[str, float]:
r"""
.. deprecated:: v1.5
This function was deprecated in v1.5 in favor of
`pytorch_lightning.accelerators.gpu._get_nvidia_gpu_stats` and will be removed in v1.7.

Get a profile of the current memory usage.

Args:
mode: There are two modes:

- 'all' means return memory for all gpus
- 'min_max' means return memory for max and min

Return:
A dictionary in which the keys are device ids as integers and
values are memory usage as integers in MB.
If mode is 'min_max', the dictionary will also contain two additional keys:

- 'min_gpu_mem': the minimum memory usage in MB
- 'max_gpu_mem': the maximum memory usage in MB
"""
memory_map = get_gpu_memory_map()

if mode == "min_max":
min_index, min_memory = min(memory_map.items(), key=lambda item: item[1])
max_index, max_memory = max(memory_map.items(), key=lambda item: item[1])

memory_map = {"min_gpu_mem": min_memory, "max_gpu_mem": max_memory}

return memory_map


def get_gpu_memory_map() -> Dict[str, float]:
r"""
.. deprecated:: v1.5
Expand Down
6 changes: 3 additions & 3 deletions tests/deprecated_api/test_remove_1-7.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,10 @@
from tests.plugins.environments.test_lsf_environment import _make_rankfile


def test_v1_7_0_moved_get_memory_profile_and_get_gpu_memory_map(tmpdir):
def test_v1_7_0_moved_model_summary_and_layer_summary(tmpdir):
_soft_unimport_module("pytorch_lightning.core.memory")
with pytest.deprecated_call(match="to `pytorch_lightning.utilities.memory` since v1.5"):
from pytorch_lightning.core.memory import get_gpu_memory_map, get_memory_profile # noqa: F401
with pytest.deprecated_call(match="to `pytorch_lightning.utilities.model_summary` since v1.5"):
from pytorch_lightning.core.memory import LayerSummary, ModelSummary # noqa: F401


def test_v1_7_0_datamodule_transform_properties(tmpdir):
Expand Down
4 changes: 0 additions & 4 deletions tests/strategies/test_ddp_spawn.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
import tests.helpers.utils as tutils
from pytorch_lightning.callbacks import EarlyStopping
from pytorch_lightning.trainer import Trainer
from pytorch_lightning.utilities import memory
from tests.helpers import BoringModel
from tests.helpers.datamodules import ClassifDataModule
from tests.helpers.runif import RunIf
Expand Down Expand Up @@ -61,9 +60,6 @@ def test_multi_gpu_model_ddp_spawn(tmpdir):

tpipes.run_model_test(trainer_options, model)

# test memory helper functions
memory.get_memory_profile("min_max")


@RunIf(min_gpus=2)
def test_ddp_all_dataloaders_passed_to_fit(tmpdir):
Expand Down
4 changes: 0 additions & 4 deletions tests/strategies/test_dp.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import tests.helpers.utils as tutils
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks import EarlyStopping
from pytorch_lightning.utilities import memory
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from tests.helpers import BoringModel, RandomDataset
from tests.helpers.datamodules import ClassifDataModule
Expand Down Expand Up @@ -99,9 +98,6 @@ def test_multi_gpu_model_dp(tmpdir):

tpipes.run_model_test(trainer_options, model)

# test memory helper functions
memory.get_memory_profile("min_max")


class ReductionTestModel(BoringModel):
def train_dataloader(self):
Expand Down