diff --git a/tensorflow_addons/optimizers/tests/conditional_gradient_test.py b/tensorflow_addons/optimizers/tests/conditional_gradient_test.py index 694fab8164..d153cc0534 100644 --- a/tensorflow_addons/optimizers/tests/conditional_gradient_test.py +++ b/tensorflow_addons/optimizers/tests/conditional_gradient_test.py @@ -1457,3 +1457,15 @@ def test_sparse_nuclear(): ), var1[2].numpy(), ) + + +def test_serialization(): + learning_rate = 0.1 + lambda_ = 0.1 + ord = "nuclear" + optimizer = cg_lib.ConditionalGradient( + learning_rate=learning_rate, lambda_=lambda_, ord=ord + ) + config = tf.keras.optimizers.serialize(optimizer) + new_optimizer = tf.keras.optimizers.deserialize(config) + assert optimizer.get_config() == new_optimizer.get_config() diff --git a/tensorflow_addons/optimizers/tests/lamb_test.py b/tensorflow_addons/optimizers/tests/lamb_test.py index 253bedab92..06113d7ad3 100644 --- a/tensorflow_addons/optimizers/tests/lamb_test.py +++ b/tensorflow_addons/optimizers/tests/lamb_test.py @@ -345,3 +345,10 @@ def test_exclude_layer_adaptation(): assert opt._do_layer_adaptation("var0") assert not opt._do_layer_adaptation("var1") assert not opt._do_layer_adaptation("var1_weight") + + +def test_serialization(): + optimizer = lamb.LAMB(1e-4) + config = tf.keras.optimizers.serialize(optimizer) + new_optimizer = tf.keras.optimizers.deserialize(config) + assert new_optimizer.get_config() == optimizer.get_config() diff --git a/tensorflow_addons/optimizers/tests/lazy_adam_test.py b/tensorflow_addons/optimizers/tests/lazy_adam_test.py index 7560254262..16a5614219 100644 --- a/tensorflow_addons/optimizers/tests/lazy_adam_test.py +++ b/tensorflow_addons/optimizers/tests/lazy_adam_test.py @@ -253,3 +253,10 @@ def test_slots_unique_eager(): # There should be iteration, and two unique slot variables for v1 and v2. assert 5 == len(opt.variables()) assert opt.variables()[0] == opt.iterations + + +def test_serialization(): + optimizer = lazy_adam.LazyAdam() + config = tf.keras.optimizers.serialize(optimizer) + new_optimizer = tf.keras.optimizers.deserialize(config) + assert new_optimizer.get_config() == optimizer.get_config() diff --git a/tensorflow_addons/optimizers/tests/lookahead_test.py b/tensorflow_addons/optimizers/tests/lookahead_test.py index b27f0cfe71..257f32b63e 100644 --- a/tensorflow_addons/optimizers/tests/lookahead_test.py +++ b/tensorflow_addons/optimizers/tests/lookahead_test.py @@ -149,3 +149,10 @@ def test_get_config(): config = opt.get_config() assert config["sync_period"] == 10 assert config["slow_step_size"] == 0.4 + + +def test_serialization(): + optimizer = Lookahead("adam", sync_period=10, slow_step_size=0.4) + config = tf.keras.optimizers.serialize(optimizer) + new_optimizer = tf.keras.optimizers.deserialize(config) + assert new_optimizer.get_config() == optimizer.get_config() diff --git a/tensorflow_addons/optimizers/tests/moving_average_test.py b/tensorflow_addons/optimizers/tests/moving_average_test.py index 25f7d752c9..f89d652a02 100644 --- a/tensorflow_addons/optimizers/tests/moving_average_test.py +++ b/tensorflow_addons/optimizers/tests/moving_average_test.py @@ -169,3 +169,13 @@ def test_fit_simple_linear_model(): max_abs_diff = np.max(np.abs(predicted - y)) assert max_abs_diff < 5e-3 + + +def test_serialization(): + sgd_opt = tf.keras.optimizers.SGD(lr=2.0, nesterov=True, momentum=0.3, decay=0.1) + optimizer = MovingAverage( + sgd_opt, average_decay=0.5, num_updates=None, sequential_update=False + ) + config = tf.keras.optimizers.serialize(optimizer) + new_optimizer = tf.keras.optimizers.deserialize(config) + assert new_optimizer.get_config() == optimizer.get_config() diff --git a/tensorflow_addons/optimizers/tests/novograd_test.py b/tensorflow_addons/optimizers/tests/novograd_test.py index 45d8ecb4f8..f822761954 100644 --- a/tensorflow_addons/optimizers/tests/novograd_test.py +++ b/tensorflow_addons/optimizers/tests/novograd_test.py @@ -139,3 +139,10 @@ def test_get_config(): assert config["learning_rate"] == 1e-4 assert config["weight_decay"] == 0.0 assert config["grad_averaging"] is False + + +def test_serialization(): + optimizer = NovoGrad(lr=1e-4, weight_decay=0.0, grad_averaging=False) + config = tf.keras.optimizers.serialize(optimizer) + new_optimizer = tf.keras.optimizers.deserialize(config) + assert new_optimizer.get_config() == optimizer.get_config() diff --git a/tensorflow_addons/optimizers/tests/stochastic_weight_averaging_test.py b/tensorflow_addons/optimizers/tests/stochastic_weight_averaging_test.py index 820c827d60..c4d09a7c6c 100644 --- a/tensorflow_addons/optimizers/tests/stochastic_weight_averaging_test.py +++ b/tensorflow_addons/optimizers/tests/stochastic_weight_averaging_test.py @@ -113,3 +113,13 @@ def test_fit_simple_linear_model(): max_abs_diff = np.max(np.abs(predicted - y)) assert max_abs_diff < 1e-3 + + +def test_serialization(): + start_averaging = 0 + average_period = 1 + sgd = tf.keras.optimizers.SGD(lr=1.0) + optimizer = SWA(sgd, start_averaging, average_period) + config = tf.keras.optimizers.serialize(optimizer) + new_optimizer = tf.keras.optimizers.deserialize(config) + assert new_optimizer.get_config() == optimizer.get_config() diff --git a/tensorflow_addons/optimizers/tests/weight_decay_optimizers_test.py b/tensorflow_addons/optimizers/tests/weight_decay_optimizers_test.py index c4b004e159..98f1379b2a 100644 --- a/tensorflow_addons/optimizers/tests/weight_decay_optimizers_test.py +++ b/tensorflow_addons/optimizers/tests/weight_decay_optimizers_test.py @@ -348,3 +348,10 @@ def test_optimizer_sparse(dtype, optimizer): do_test_sparse_repeated_indices( dtype, optimizer, learning_rate=0.001, momentum=0.9, weight_decay=WEIGHT_DECAY, ) + + +def test_serialization(): + optimizer = weight_decay_optimizers.AdamW(learning_rate=1e-4, weight_decay=1e-4) + config = tf.keras.optimizers.serialize(optimizer) + new_optimizer = tf.keras.optimizers.deserialize(config) + assert new_optimizer.get_config() == optimizer.get_config() diff --git a/tensorflow_addons/optimizers/tests/yogi_test.py b/tensorflow_addons/optimizers/tests/yogi_test.py index daa8d52675..1cc0498eff 100644 --- a/tensorflow_addons/optimizers/tests/yogi_test.py +++ b/tensorflow_addons/optimizers/tests/yogi_test.py @@ -337,3 +337,10 @@ def test_get_config(): opt = yogi.Yogi(1e-4) config = opt.get_config() assert config["learning_rate"] == 1e-4 + + +def test_serialization(): + optimizer = yogi.Yogi(1e-4) + config = tf.keras.optimizers.serialize(optimizer) + new_optimizer = tf.keras.optimizers.deserialize(config) + assert new_optimizer.get_config() == optimizer.get_config() diff --git a/tensorflow_addons/optimizers/yogi.py b/tensorflow_addons/optimizers/yogi.py index 09db073d1d..bb4d264b02 100644 --- a/tensorflow_addons/optimizers/yogi.py +++ b/tensorflow_addons/optimizers/yogi.py @@ -320,8 +320,12 @@ def get_config(self): "beta1": self._serialize_hyperparameter("beta_1"), "beta2": self._serialize_hyperparameter("beta_2"), "epsilon": self._serialize_hyperparameter("epsilon"), - "l1_t": self._serialize_hyperparameter("l1_regularization_strength"), - "l2_t": self._serialize_hyperparameter("l2_regularization_strength"), + "l1_regularization_strength": self._serialize_hyperparameter( + "l1_regularization_strength" + ), + "l2_regularization_strength": self._serialize_hyperparameter( + "l2_regularization_strength" + ), "activation": self._activation, "initial_accumulator_value": self._initial_accumulator_value, }