From 3bd380e16ae95f0697b40adedbed8d1c51586c90 Mon Sep 17 00:00:00 2001 From: Seungjae Ryan Lee Date: Sat, 15 Aug 2020 19:45:02 +0000 Subject: [PATCH 1/4] Fix broken link in NovoGrad docstring --- tensorflow_addons/optimizers/novograd.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/tensorflow_addons/optimizers/novograd.py b/tensorflow_addons/optimizers/novograd.py index bdea0a9079..ba1269f95f 100644 --- a/tensorflow_addons/optimizers/novograd.py +++ b/tensorflow_addons/optimizers/novograd.py @@ -23,14 +23,15 @@ @tf.keras.utils.register_keras_serializable(package="Addons") class NovoGrad(tf.keras.optimizers.Optimizer): - """The NovoGrad Optimizer was first proposed in [Stochastic Gradient - Methods with Layerwise Adaptvie Moments for training of Deep - Networks](https://arxiv.org/pdf/1905.11286.pdf) + """Optimizer that implements NovoGrad. - NovoGrad is a first-order SGD-based algorithm, which computes second - moments per layer instead of per weight as in Adam. Compared to Adam, - NovoGrad takes less memory, and has been found to be more numerically - stable. More specifically we compute (for more information on the + The NovoGrad Optimizer was first proposed in [Stochastic Gradient + Methods with Layerwise Adaptvie Moments for training of Deep + Networks](https://arxiv.org/pdf/1905.11286.pdf) NovoGrad is a + first-order SGD-based algorithm, which computes second moments per + layer instead of per weight as in Adam. Compared to Adam, NovoGrad + takes less memory, and has been found to be more numerically stable. + More specifically we compute (for more information on the computation please refer to this [link](https://nvidia.github.io/OpenSeq2Seq/html/optimizers.html): From 6f4b7f0f5620a3062e6ca77f5a95df8ec2bb3f9b Mon Sep 17 00:00:00 2001 From: Seungjae Ryan Lee Date: Sat, 15 Aug 2020 19:45:35 +0000 Subject: [PATCH 2/4] Fix typo in NovoGrad paper title --- tensorflow_addons/optimizers/novograd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tensorflow_addons/optimizers/novograd.py b/tensorflow_addons/optimizers/novograd.py index ba1269f95f..3149cfc0f4 100644 --- a/tensorflow_addons/optimizers/novograd.py +++ b/tensorflow_addons/optimizers/novograd.py @@ -26,7 +26,7 @@ class NovoGrad(tf.keras.optimizers.Optimizer): """Optimizer that implements NovoGrad. The NovoGrad Optimizer was first proposed in [Stochastic Gradient - Methods with Layerwise Adaptvie Moments for training of Deep + Methods with Layerwise Adaptive Moments for training of Deep Networks](https://arxiv.org/pdf/1905.11286.pdf) NovoGrad is a first-order SGD-based algorithm, which computes second moments per layer instead of per weight as in Adam. Compared to Adam, NovoGrad From 360d8cc668bd6dd3eb92b69f45da6d62bb8612af Mon Sep 17 00:00:00 2001 From: Seungjae Ryan Lee Date: Sat, 15 Aug 2020 19:46:56 +0000 Subject: [PATCH 3/4] Add missing parenthesis --- tensorflow_addons/optimizers/novograd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tensorflow_addons/optimizers/novograd.py b/tensorflow_addons/optimizers/novograd.py index 3149cfc0f4..562c4246a9 100644 --- a/tensorflow_addons/optimizers/novograd.py +++ b/tensorflow_addons/optimizers/novograd.py @@ -33,7 +33,7 @@ class NovoGrad(tf.keras.optimizers.Optimizer): takes less memory, and has been found to be more numerically stable. More specifically we compute (for more information on the computation please refer to this - [link](https://nvidia.github.io/OpenSeq2Seq/html/optimizers.html): + [link](https://nvidia.github.io/OpenSeq2Seq/html/optimizers.html)): Second order moment = exponential moving average of Layer-wise square of grads: From 3fbd1ee7af05a917f94e0a688686463868bd8062 Mon Sep 17 00:00:00 2001 From: Seungjae Ryan Lee Date: Sun, 16 Aug 2020 05:34:56 +0000 Subject: [PATCH 4/4] Remove unneeded wordy phrase --- tensorflow_addons/optimizers/novograd.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tensorflow_addons/optimizers/novograd.py b/tensorflow_addons/optimizers/novograd.py index 562c4246a9..a089651488 100644 --- a/tensorflow_addons/optimizers/novograd.py +++ b/tensorflow_addons/optimizers/novograd.py @@ -31,9 +31,8 @@ class NovoGrad(tf.keras.optimizers.Optimizer): first-order SGD-based algorithm, which computes second moments per layer instead of per weight as in Adam. Compared to Adam, NovoGrad takes less memory, and has been found to be more numerically stable. - More specifically we compute (for more information on the - computation please refer to this - [link](https://nvidia.github.io/OpenSeq2Seq/html/optimizers.html)): + (For more information on the computation please refer to this + [link](https://nvidia.github.io/OpenSeq2Seq/html/optimizers.html)) Second order moment = exponential moving average of Layer-wise square of grads: