From f18ef2c7538c0b275e12333af5cb5c6bb534a8c9 Mon Sep 17 00:00:00 2001 From: Marco De Nadai Date: Wed, 18 May 2022 13:51:11 +0200 Subject: [PATCH] Update adabelief.py --- tensorflow_addons/optimizers/adabelief.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tensorflow_addons/optimizers/adabelief.py b/tensorflow_addons/optimizers/adabelief.py index 1ecabb4448..5ee8f923b0 100644 --- a/tensorflow_addons/optimizers/adabelief.py +++ b/tensorflow_addons/optimizers/adabelief.py @@ -113,7 +113,7 @@ def __init__( rectify: boolean. Whether to apply learning rate rectification as from RAdam. total_steps: An integer. Total number of training steps. Enable - warmup by setting a positive value. + warmup by setting a value greater than zero. warmup_proportion: A floating point value. The proportion of increasing steps. min_lr: A floating point value. Minimum learning rate after warmup.