Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ torch>=1.4
future>=0.17.1 # required for builtins in setup.py
tqdm>=4.41.0
PyYAML>=5.1,<=5.4.1
fsspec[http]>=2021.05.0, !=2021.06.0
fsspec[http]>=2021.05.0
tensorboard>=2.2.0, !=2.5.0 # 2.5.0 GPU CI error: 'Couldn't build proto file into descriptor pool!'
torchmetrics>=0.2.0
pyDeprecate==0.3.0
Expand Down
1 change: 1 addition & 0 deletions tests/callbacks/test_early_stopping.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ def test_resume_early_stopping_from_checkpoint(tmpdir):
https://github.com/PyTorchLightning/pytorch-lightning/issues/1464
https://github.com/PyTorchLightning/pytorch-lightning/issues/1463
"""
tmpdir = str(tmpdir)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can it be done in conftest.py directly ?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Tangentially related, what if we converted all tmpdir inputs to pathlib.Path in conftest?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

already is a Path.
can't be convertet to str in conftest because otherwise it will fail in places where we expect tmpdir to be a Path.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it is a py.path.LocalPath not a pathlib.Path, there are small differences but the latter is much more common

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

seed_everything(42)
model = ClassificationModel()
dm = ClassifDataModule()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ def training_step(self, batch, batch_idx):
self.last_coeff *= 0.999
return loss

tmpdir = str(tmpdir)
model = TestModel()
trainer = Trainer(
callbacks=[callbacks.ModelCheckpoint(dirpath=tmpdir, monitor='my_loss', save_top_k=k)],
Expand Down
11 changes: 10 additions & 1 deletion tests/checkpointing/test_model_checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,6 +325,7 @@ def test_model_checkpoint_with_non_string_input(tmpdir, save_top_k: int):
@pytest.mark.parametrize('save_top_k', [-1, 0, 1, 2])
def test_model_checkpoint_to_yaml(tmpdir, save_top_k: int):
""" Test that None in checkpoint callback is valid and that chkp_path is set correctly """
tmpdir = str(tmpdir)
tutils.reset_seed()
model = LogInTwoMethods()

Expand Down Expand Up @@ -480,7 +481,7 @@ def test_model_checkpoint_file_extension(tmpdir):
"""
Test ModelCheckpoint with different file extension.
"""

tmpdir = str(tmpdir)
model = LogInTwoMethods()
model_checkpoint = ModelCheckpointExtensionTest(
monitor='early_stop_on',
Expand Down Expand Up @@ -535,6 +536,7 @@ def test_invalid_top_k(tmpdir):

def test_none_monitor_top_k(tmpdir):
""" Test that a warning appears for positive top_k with monitor=None. """
tmpdir = str(tmpdir)
with pytest.raises(
MisconfigurationException, match=r'ModelCheckpoint\(save_top_k=3, monitor=None\) is not a valid*'
):
Expand Down Expand Up @@ -657,6 +659,7 @@ def test_model_checkpoint_period(tmpdir, period: int):

@pytest.mark.parametrize("every_n_val_epochs", list(range(4)))
def test_model_checkpoint_every_n_val_epochs(tmpdir, every_n_val_epochs):
tmpdir = str(tmpdir)
model = LogInTwoMethods()
epochs = 5
checkpoint_callback = ModelCheckpoint(
Expand All @@ -681,6 +684,7 @@ def test_model_checkpoint_every_n_val_epochs(tmpdir, every_n_val_epochs):
@pytest.mark.parametrize("every_n_val_epochs", list(range(4)))
def test_model_checkpoint_every_n_val_epochs_and_period(tmpdir, every_n_val_epochs):
""" Tests that if period is set, it takes precedence over every_n_val_epochs for backwards compatibility. """
tmpdir = str(tmpdir)
model = LogInTwoMethods()
epochs = 5
checkpoint_callback = ModelCheckpoint(
Expand Down Expand Up @@ -896,6 +900,7 @@ def test_model_checkpoint_save_last_warning(

def test_model_checkpoint_save_last_checkpoint_contents(tmpdir):
""" Tests that the save_last checkpoint contains the latest information. """
tmpdir = str(tmpdir)
seed_everything(100)
model = LogInTwoMethods()
num_epochs = 3
Expand Down Expand Up @@ -931,6 +936,7 @@ def test_model_checkpoint_save_last_checkpoint_contents(tmpdir):
@mock.patch.dict(os.environ, {"PL_DEV_DEBUG": "1"})
@pytest.mark.parametrize('mode', ['min', 'max'])
def test_checkpointing_with_nan_as_first(tmpdir, mode: int):
tmpdir = str(tmpdir)
monitor = [float('nan')]
monitor += [5, 7, 8] if mode == 'max' else [8, 7, 5]

Expand Down Expand Up @@ -1150,6 +1156,7 @@ def test_val_check_interval_checkpoint_files(tmpdir):

def test_current_score(tmpdir):
""" Check that the current_score value is correct and was saved """
tmpdir = str(tmpdir)

class TestModel(BoringModel):

Expand Down Expand Up @@ -1183,6 +1190,7 @@ def training_step(self, *args):
@pytest.mark.parametrize("mode", ["min", "max"])
def test_current_score_when_nan(tmpdir, mode: str):
""" Check that ModelCheckpoint handles NaN values correctly """
tmpdir = str(tmpdir)

class TestModel(BoringModel):

Expand Down Expand Up @@ -1213,6 +1221,7 @@ def training_step(self, *args):

@pytest.mark.parametrize("hparams_type", [dict, Container])
def test_hparams_type(tmpdir, hparams_type):
tmpdir = str(tmpdir)

class TestModel(BoringModel):

Expand Down
4 changes: 4 additions & 0 deletions tests/models/test_cpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ def on_train_epoch_start(self, trainer, model):


def test_early_stopping_cpu_model(tmpdir):
tmpdir = str(tmpdir)

class ModelTrainVal(BoringModel):

Expand Down Expand Up @@ -128,6 +129,7 @@ def validation_step(self, *args, **kwargs):
def test_multi_cpu_model_ddp(tmpdir):
"""Make sure DDP works."""
tutils.set_random_master_port()
tmpdir = str(tmpdir)

trainer_options = dict(
default_root_dir=tmpdir,
Expand Down Expand Up @@ -295,6 +297,7 @@ def test_simple_cpu(tmpdir):

def test_cpu_model(tmpdir):
"""Make sure model trains on CPU."""
tmpdir = str(tmpdir)
trainer_options = dict(
default_root_dir=tmpdir, progress_bar_refresh_rate=0, max_epochs=1, limit_train_batches=4, limit_val_batches=4
)
Expand All @@ -305,6 +308,7 @@ def test_cpu_model(tmpdir):

def test_all_features_cpu_model(tmpdir):
"""Test each of the trainer options."""
tmpdir = str(tmpdir)
trainer_options = dict(
default_root_dir=tmpdir,
gradient_clip_val=1.0,
Expand Down
2 changes: 2 additions & 0 deletions tests/trainer/test_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,6 +318,7 @@ def test_loading_yaml(tmpdir):
)
def test_model_checkpoint_options(tmpdir, save_top_k, save_last, expected_files):
"""Test ModelCheckpoint options."""
tmpdir = str(tmpdir)

def mock_save_function(filepath, *args):
open(filepath, "a").close()
Expand Down Expand Up @@ -1836,6 +1837,7 @@ def validation_epoch_end(self, outputs) -> None:
@RunIf(skip_windows=True)
def test_fit_test_synchronization(tmpdir):
"""Test that the trainer synchronizes processes before returning control back to the caller. """
tmpdir = str(tmpdir)
tutils.set_random_master_port()
model = TestDummyModelForCheckpoint()
checkpoint = ModelCheckpoint(dirpath=tmpdir, monitor='x', mode='min', save_top_k=1)
Expand Down