File tree Expand file tree Collapse file tree 3 files changed +21
-0
lines changed Expand file tree Collapse file tree 3 files changed +21
-0
lines changed Original file line number Diff line number Diff line change 1717
1818
1919def pick_multiple_gpus (nb ):
20+ '''
21+ Raises:
22+ MisconfigurationException:
23+ If ``gpus`` is set to 0, when ``auto_select_gpus=True``.
24+ '''
2025 if nb == 0 :
2126 raise MisconfigurationException (
2227 r"auto_select_gpus=True, gpus=0 is not a valid configuration.\
@@ -33,6 +38,11 @@ def pick_multiple_gpus(nb):
3338
3439
3540def pick_single_gpu (exclude_gpus : list ):
41+ '''
42+ Raises:
43+ RuntimeError:
44+ If you try to allocate a GPU, when no GPUs are available.
45+ '''
3646 for i in range (torch .cuda .device_count ()):
3747 if i in exclude_gpus :
3848 continue
Original file line number Diff line number Diff line change @@ -70,6 +70,13 @@ def scale_batch_size(
7070
7171 **fit_kwargs: remaining arguments to be passed to .fit(), e.g., dataloader
7272 or datamodule.
73+
74+ Raises:
75+ MisconfigurationException:
76+ If field ``batch_arg_name`` is not found in ``model`` and ``model.hparams``, or
77+ if batch scaling feature is used with dataloaders passed directly to ``.fit()``.
78+ ValueError:
79+ If mode in method ``scale_batch_size`` is neither ``power`` nor ``binsearch``.
7380 """
7481 if trainer .fast_dev_run :
7582 rank_zero_warn ('Skipping batch size scaler since fast_dev_run is enabled.' , UserWarning )
Original file line number Diff line number Diff line change @@ -106,6 +106,10 @@ def lr_find(
106106
107107 update_attr: Whether to update the learning rate attribute or not.
108108
109+ Raises:
110+ MisconfigurationException:
111+ If learning rate/lr in ``model`` or ``model.hparams`` isn't overriden when ``auto_lr_find=True``, or
112+ if you are using `more than one optimizer` with learning rate finder.
109113
110114 Example::
111115
You can’t perform that action at this time.
0 commit comments