Skip to content

Commit c91cf2d

Browse files
authored
Added serializability tests in all optimizers and fixed serializability bug in yogi (#1728)
* Added serializability tests in all optimizers and fixed serializability bug in yogi
1 parent 1e146df commit c91cf2d

File tree

10 files changed

+80
-2
lines changed

10 files changed

+80
-2
lines changed

tensorflow_addons/optimizers/tests/conditional_gradient_test.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1457,3 +1457,15 @@ def test_sparse_nuclear():
14571457
),
14581458
var1[2].numpy(),
14591459
)
1460+
1461+
1462+
def test_serialization():
1463+
learning_rate = 0.1
1464+
lambda_ = 0.1
1465+
ord = "nuclear"
1466+
optimizer = cg_lib.ConditionalGradient(
1467+
learning_rate=learning_rate, lambda_=lambda_, ord=ord
1468+
)
1469+
config = tf.keras.optimizers.serialize(optimizer)
1470+
new_optimizer = tf.keras.optimizers.deserialize(config)
1471+
assert optimizer.get_config() == new_optimizer.get_config()

tensorflow_addons/optimizers/tests/lamb_test.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -345,3 +345,10 @@ def test_exclude_layer_adaptation():
345345
assert opt._do_layer_adaptation("var0")
346346
assert not opt._do_layer_adaptation("var1")
347347
assert not opt._do_layer_adaptation("var1_weight")
348+
349+
350+
def test_serialization():
351+
optimizer = lamb.LAMB(1e-4)
352+
config = tf.keras.optimizers.serialize(optimizer)
353+
new_optimizer = tf.keras.optimizers.deserialize(config)
354+
assert new_optimizer.get_config() == optimizer.get_config()

tensorflow_addons/optimizers/tests/lazy_adam_test.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -253,3 +253,10 @@ def test_slots_unique_eager():
253253
# There should be iteration, and two unique slot variables for v1 and v2.
254254
assert 5 == len(opt.variables())
255255
assert opt.variables()[0] == opt.iterations
256+
257+
258+
def test_serialization():
259+
optimizer = lazy_adam.LazyAdam()
260+
config = tf.keras.optimizers.serialize(optimizer)
261+
new_optimizer = tf.keras.optimizers.deserialize(config)
262+
assert new_optimizer.get_config() == optimizer.get_config()

tensorflow_addons/optimizers/tests/lookahead_test.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -149,3 +149,10 @@ def test_get_config():
149149
config = opt.get_config()
150150
assert config["sync_period"] == 10
151151
assert config["slow_step_size"] == 0.4
152+
153+
154+
def test_serialization():
155+
optimizer = Lookahead("adam", sync_period=10, slow_step_size=0.4)
156+
config = tf.keras.optimizers.serialize(optimizer)
157+
new_optimizer = tf.keras.optimizers.deserialize(config)
158+
assert new_optimizer.get_config() == optimizer.get_config()

tensorflow_addons/optimizers/tests/moving_average_test.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -169,3 +169,13 @@ def test_fit_simple_linear_model():
169169

170170
max_abs_diff = np.max(np.abs(predicted - y))
171171
assert max_abs_diff < 5e-3
172+
173+
174+
def test_serialization():
175+
sgd_opt = tf.keras.optimizers.SGD(lr=2.0, nesterov=True, momentum=0.3, decay=0.1)
176+
optimizer = MovingAverage(
177+
sgd_opt, average_decay=0.5, num_updates=None, sequential_update=False
178+
)
179+
config = tf.keras.optimizers.serialize(optimizer)
180+
new_optimizer = tf.keras.optimizers.deserialize(config)
181+
assert new_optimizer.get_config() == optimizer.get_config()

tensorflow_addons/optimizers/tests/novograd_test.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -139,3 +139,10 @@ def test_get_config():
139139
assert config["learning_rate"] == 1e-4
140140
assert config["weight_decay"] == 0.0
141141
assert config["grad_averaging"] is False
142+
143+
144+
def test_serialization():
145+
optimizer = NovoGrad(lr=1e-4, weight_decay=0.0, grad_averaging=False)
146+
config = tf.keras.optimizers.serialize(optimizer)
147+
new_optimizer = tf.keras.optimizers.deserialize(config)
148+
assert new_optimizer.get_config() == optimizer.get_config()

tensorflow_addons/optimizers/tests/stochastic_weight_averaging_test.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -113,3 +113,13 @@ def test_fit_simple_linear_model():
113113

114114
max_abs_diff = np.max(np.abs(predicted - y))
115115
assert max_abs_diff < 1e-3
116+
117+
118+
def test_serialization():
119+
start_averaging = 0
120+
average_period = 1
121+
sgd = tf.keras.optimizers.SGD(lr=1.0)
122+
optimizer = SWA(sgd, start_averaging, average_period)
123+
config = tf.keras.optimizers.serialize(optimizer)
124+
new_optimizer = tf.keras.optimizers.deserialize(config)
125+
assert new_optimizer.get_config() == optimizer.get_config()

tensorflow_addons/optimizers/tests/weight_decay_optimizers_test.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -348,3 +348,10 @@ def test_optimizer_sparse(dtype, optimizer):
348348
do_test_sparse_repeated_indices(
349349
dtype, optimizer, learning_rate=0.001, momentum=0.9, weight_decay=WEIGHT_DECAY,
350350
)
351+
352+
353+
def test_serialization():
354+
optimizer = weight_decay_optimizers.AdamW(learning_rate=1e-4, weight_decay=1e-4)
355+
config = tf.keras.optimizers.serialize(optimizer)
356+
new_optimizer = tf.keras.optimizers.deserialize(config)
357+
assert new_optimizer.get_config() == optimizer.get_config()

tensorflow_addons/optimizers/tests/yogi_test.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -337,3 +337,10 @@ def test_get_config():
337337
opt = yogi.Yogi(1e-4)
338338
config = opt.get_config()
339339
assert config["learning_rate"] == 1e-4
340+
341+
342+
def test_serialization():
343+
optimizer = yogi.Yogi(1e-4)
344+
config = tf.keras.optimizers.serialize(optimizer)
345+
new_optimizer = tf.keras.optimizers.deserialize(config)
346+
assert new_optimizer.get_config() == optimizer.get_config()

tensorflow_addons/optimizers/yogi.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -320,8 +320,12 @@ def get_config(self):
320320
"beta1": self._serialize_hyperparameter("beta_1"),
321321
"beta2": self._serialize_hyperparameter("beta_2"),
322322
"epsilon": self._serialize_hyperparameter("epsilon"),
323-
"l1_t": self._serialize_hyperparameter("l1_regularization_strength"),
324-
"l2_t": self._serialize_hyperparameter("l2_regularization_strength"),
323+
"l1_regularization_strength": self._serialize_hyperparameter(
324+
"l1_regularization_strength"
325+
),
326+
"l2_regularization_strength": self._serialize_hyperparameter(
327+
"l2_regularization_strength"
328+
),
325329
"activation": self._activation,
326330
"initial_accumulator_value": self._initial_accumulator_value,
327331
}

0 commit comments

Comments
 (0)