|
19 | 19 | import numpy as np |
20 | 20 | import pytest |
21 | 21 |
|
22 | | -import pytorch_lightning |
23 | 22 | from pytorch_lightning import Callback, Trainer |
24 | 23 | from pytorch_lightning.callbacks import ModelCheckpoint |
25 | 24 | from pytorch_lightning.demos.boring_classes import BoringDataModule, BoringModel |
|
30 | 29 | from pytorch_lightning.trainer.configuration_validator import _check_datamodule_checkpoint_hooks |
31 | 30 | from pytorch_lightning.trainer.states import RunningStage |
32 | 31 | from pytorch_lightning.utilities.rank_zero import rank_zero_only |
33 | | -from tests_pytorch.helpers.runif import RunIf |
34 | 32 |
|
35 | 33 |
|
36 | 34 | def test_v1_8_0_on_init_start_end(tmpdir): |
@@ -490,104 +488,6 @@ def on_load_checkpoint(self, checkpoint): |
490 | 488 | _check_datamodule_checkpoint_hooks(trainer) |
491 | 489 |
|
492 | 490 |
|
493 | | -def test_trainer_config_device_ids(): |
494 | | - trainer = Trainer(devices=2) |
495 | | - with pytest.deprecated_call( |
496 | | - match="`Trainer.devices` was deprecated in v1.6 and will be removed in v1.8." |
497 | | - " Please use `Trainer.num_devices` or `Trainer.device_ids` to get device information instead." |
498 | | - ): |
499 | | - trainer.devices == 2 |
500 | | - |
501 | | - |
502 | | -@pytest.mark.parametrize( |
503 | | - ["gpus", "expected_root_gpu", "strategy"], |
504 | | - [ |
505 | | - pytest.param(None, None, "ddp", id="None is None"), |
506 | | - pytest.param(0, None, "ddp", id="O gpus, expect gpu root device to be None."), |
507 | | - pytest.param(1, 0, "ddp", id="1 gpu, expect gpu root device to be 0."), |
508 | | - pytest.param(-1, 0, "ddp", id="-1 - use all gpus, expect gpu root device to be 0."), |
509 | | - pytest.param("-1", 0, "ddp", id="'-1' - use all gpus, expect gpu root device to be 0."), |
510 | | - pytest.param(3, 0, "ddp", id="3 gpus, expect gpu root device to be 0.(backend:ddp)"), |
511 | | - ], |
512 | | -) |
513 | | -def test_root_gpu_property(cuda_count_4, gpus, expected_root_gpu, strategy): |
514 | | - with pytest.deprecated_call( |
515 | | - match="`Trainer.root_gpu` is deprecated in v1.6 and will be removed in v1.8. " |
516 | | - "Please use `Trainer.strategy.root_device.index` instead." |
517 | | - ): |
518 | | - assert Trainer(gpus=gpus, strategy=strategy).root_gpu == expected_root_gpu |
519 | | - |
520 | | - |
521 | | -@pytest.mark.parametrize( |
522 | | - ["gpus", "expected_root_gpu", "strategy"], |
523 | | - [ |
524 | | - pytest.param(None, None, None, id="None is None"), |
525 | | - pytest.param(None, None, "ddp", id="None is None"), |
526 | | - pytest.param(0, None, "ddp", id="None is None"), |
527 | | - ], |
528 | | -) |
529 | | -def test_root_gpu_property_0_passing(cuda_count_0, gpus, expected_root_gpu, strategy): |
530 | | - with pytest.deprecated_call( |
531 | | - match="`Trainer.root_gpu` is deprecated in v1.6 and will be removed in v1.8. " |
532 | | - "Please use `Trainer.strategy.root_device.index` instead." |
533 | | - ): |
534 | | - assert Trainer(gpus=gpus, strategy=strategy).root_gpu == expected_root_gpu |
535 | | - |
536 | | - |
537 | | -@pytest.mark.parametrize( |
538 | | - ["gpus", "expected_num_gpus", "strategy"], |
539 | | - [ |
540 | | - pytest.param(None, 0, None, id="None - expect 0 gpu to use."), |
541 | | - pytest.param(0, 0, None, id="Oth gpu, expect 1 gpu to use."), |
542 | | - pytest.param(1, 1, None, id="1st gpu, expect 1 gpu to use."), |
543 | | - pytest.param(-1, 4, "ddp", id="-1 - use all gpus"), |
544 | | - pytest.param("-1", 4, "ddp", id="'-1' - use all gpus"), |
545 | | - pytest.param(3, 3, "ddp", id="3rd gpu - 1 gpu to use (backend:ddp)"), |
546 | | - ], |
547 | | -) |
548 | | -def test_trainer_gpu_parse(cuda_count_4, gpus, expected_num_gpus, strategy): |
549 | | - with pytest.deprecated_call( |
550 | | - match="`Trainer.num_gpus` was deprecated in v1.6 and will be removed in v1.8." |
551 | | - " Please use `Trainer.num_devices` instead." |
552 | | - ): |
553 | | - assert Trainer(gpus=gpus, strategy=strategy).num_gpus == expected_num_gpus |
554 | | - |
555 | | - |
556 | | -@pytest.mark.parametrize( |
557 | | - ["gpus", "expected_num_gpus", "strategy"], |
558 | | - [ |
559 | | - pytest.param(None, 0, None, id="None - expect 0 gpu to use."), |
560 | | - pytest.param(None, 0, "ddp", id="None - expect 0 gpu to use."), |
561 | | - ], |
562 | | -) |
563 | | -def test_trainer_num_gpu_0(cuda_count_0, gpus, expected_num_gpus, strategy): |
564 | | - with pytest.deprecated_call( |
565 | | - match="`Trainer.num_gpus` was deprecated in v1.6 and will be removed in v1.8." |
566 | | - " Please use `Trainer.num_devices` instead." |
567 | | - ): |
568 | | - assert Trainer(gpus=gpus, strategy=strategy).num_gpus == expected_num_gpus |
569 | | - |
570 | | - |
571 | | -@pytest.mark.parametrize( |
572 | | - ["trainer_kwargs", "expected_ipus"], |
573 | | - [ |
574 | | - ({}, 0), |
575 | | - ({"devices": 1}, 0), |
576 | | - ({"accelerator": "ipu", "devices": 1}, 1), |
577 | | - ({"accelerator": "ipu", "devices": 8}, 8), |
578 | | - ], |
579 | | -) |
580 | | -def test_trainer_config_ipus(monkeypatch, trainer_kwargs, expected_ipus): |
581 | | - monkeypatch.setattr(pytorch_lightning.accelerators.ipu.IPUAccelerator, "is_available", lambda _: True) |
582 | | - monkeypatch.setattr(pytorch_lightning.strategies.ipu, "_IPU_AVAILABLE", lambda: True) |
583 | | - trainer = Trainer(**trainer_kwargs) |
584 | | - with pytest.deprecated_call( |
585 | | - match="`Trainer.ipus` was deprecated in v1.6 and will be removed in v1.8." |
586 | | - " Please use `Trainer.num_devices` instead." |
587 | | - ): |
588 | | - trainer.ipus == expected_ipus |
589 | | - |
590 | | - |
591 | 491 | def test_v1_8_0_deprecated_lightning_ipu_module(): |
592 | 492 | with pytest.deprecated_call(match=r"has been deprecated in v1.7.0 and will be removed in v1.8."): |
593 | 493 | _ = LightningIPUModule(BoringModel(), 32) |
@@ -653,39 +553,3 @@ def on_save_checkpoint(self, trainer, pl_module, checkpoint): |
653 | 553 |
|
654 | 554 | trainer.callbacks = [TestCallbackSaveHookOverride()] |
655 | 555 | trainer.save_checkpoint(tmpdir + "/pathok.ckpt") |
656 | | - |
657 | | - |
658 | | -@pytest.mark.parametrize( |
659 | | - "trainer_kwargs", |
660 | | - [ |
661 | | - pytest.param({"accelerator": "gpu", "devices": 2}, marks=RunIf(mps=False)), |
662 | | - pytest.param({"accelerator": "gpu", "devices": [0, 2]}, marks=RunIf(mps=False)), |
663 | | - pytest.param({"accelerator": "gpu", "devices": "2"}, marks=RunIf(mps=False)), |
664 | | - pytest.param({"accelerator": "gpu", "devices": "0,"}, marks=RunIf(mps=False)), |
665 | | - pytest.param({"accelerator": "gpu", "devices": 1}, marks=RunIf(mps=True)), |
666 | | - pytest.param({"accelerator": "gpu", "devices": [0]}, marks=RunIf(mps=True)), |
667 | | - pytest.param({"accelerator": "gpu", "devices": "0,"}, marks=RunIf(mps=True)), |
668 | | - ], |
669 | | -) |
670 | | -def test_trainer_gpus(cuda_count_4, trainer_kwargs): |
671 | | - trainer = Trainer(**trainer_kwargs) |
672 | | - with pytest.deprecated_call( |
673 | | - match=( |
674 | | - "`Trainer.gpus` was deprecated in v1.6 and will be removed in v1.8." |
675 | | - " Please use `Trainer.num_devices` or `Trainer.device_ids` to get device information instead." |
676 | | - ) |
677 | | - ): |
678 | | - assert trainer.gpus == trainer_kwargs["devices"] |
679 | | - |
680 | | - |
681 | | -@RunIf(skip_windows=True) |
682 | | -def test_trainer_tpu_cores(monkeypatch): |
683 | | - monkeypatch.setattr(pytorch_lightning.accelerators.tpu.TPUAccelerator, "is_available", lambda _: True) |
684 | | - trainer = Trainer(accelerator="tpu", devices=8) |
685 | | - with pytest.deprecated_call( |
686 | | - match=( |
687 | | - "`Trainer.tpu_cores` is deprecated in v1.6 and will be removed in v1.8. " |
688 | | - "Please use `Trainer.num_devices` instead." |
689 | | - ) |
690 | | - ): |
691 | | - assert trainer.tpu_cores == 8 |
0 commit comments