Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions tensorflow_addons/optimizers/tests/conditional_gradient_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -1457,3 +1457,15 @@ def test_sparse_nuclear():
),
var1[2].numpy(),
)


def test_serialization():
learning_rate = 0.1
lambda_ = 0.1
ord = "nuclear"
optimizer = cg_lib.ConditionalGradient(
learning_rate=learning_rate, lambda_=lambda_, ord=ord
)
config = tf.keras.optimizers.serialize(optimizer)
new_optimizer = tf.keras.optimizers.deserialize(config)
assert optimizer.get_config() == new_optimizer.get_config()
7 changes: 7 additions & 0 deletions tensorflow_addons/optimizers/tests/lamb_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,3 +345,10 @@ def test_exclude_layer_adaptation():
assert opt._do_layer_adaptation("var0")
assert not opt._do_layer_adaptation("var1")
assert not opt._do_layer_adaptation("var1_weight")


def test_serialization():
optimizer = lamb.LAMB(1e-4)
config = tf.keras.optimizers.serialize(optimizer)
new_optimizer = tf.keras.optimizers.deserialize(config)
assert new_optimizer.get_config() == optimizer.get_config()
7 changes: 7 additions & 0 deletions tensorflow_addons/optimizers/tests/lazy_adam_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,3 +253,10 @@ def test_slots_unique_eager():
# There should be iteration, and two unique slot variables for v1 and v2.
assert 5 == len(opt.variables())
assert opt.variables()[0] == opt.iterations


def test_serialization():
optimizer = lazy_adam.LazyAdam()
config = tf.keras.optimizers.serialize(optimizer)
new_optimizer = tf.keras.optimizers.deserialize(config)
assert new_optimizer.get_config() == optimizer.get_config()
7 changes: 7 additions & 0 deletions tensorflow_addons/optimizers/tests/lookahead_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,3 +149,10 @@ def test_get_config():
config = opt.get_config()
assert config["sync_period"] == 10
assert config["slow_step_size"] == 0.4


def test_serialization():
optimizer = Lookahead("adam", sync_period=10, slow_step_size=0.4)
config = tf.keras.optimizers.serialize(optimizer)
new_optimizer = tf.keras.optimizers.deserialize(config)
assert new_optimizer.get_config() == optimizer.get_config()
10 changes: 10 additions & 0 deletions tensorflow_addons/optimizers/tests/moving_average_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,3 +169,13 @@ def test_fit_simple_linear_model():

max_abs_diff = np.max(np.abs(predicted - y))
assert max_abs_diff < 5e-3


def test_serialization():
sgd_opt = tf.keras.optimizers.SGD(lr=2.0, nesterov=True, momentum=0.3, decay=0.1)
optimizer = MovingAverage(
sgd_opt, average_decay=0.5, num_updates=None, sequential_update=False
)
config = tf.keras.optimizers.serialize(optimizer)
new_optimizer = tf.keras.optimizers.deserialize(config)
assert new_optimizer.get_config() == optimizer.get_config()
7 changes: 7 additions & 0 deletions tensorflow_addons/optimizers/tests/novograd_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,3 +139,10 @@ def test_get_config():
assert config["learning_rate"] == 1e-4
assert config["weight_decay"] == 0.0
assert config["grad_averaging"] is False


def test_serialization():
optimizer = NovoGrad(lr=1e-4, weight_decay=0.0, grad_averaging=False)
config = tf.keras.optimizers.serialize(optimizer)
new_optimizer = tf.keras.optimizers.deserialize(config)
assert new_optimizer.get_config() == optimizer.get_config()
Original file line number Diff line number Diff line change
Expand Up @@ -113,3 +113,13 @@ def test_fit_simple_linear_model():

max_abs_diff = np.max(np.abs(predicted - y))
assert max_abs_diff < 1e-3


def test_serialization():
start_averaging = 0
average_period = 1
sgd = tf.keras.optimizers.SGD(lr=1.0)
optimizer = SWA(sgd, start_averaging, average_period)
config = tf.keras.optimizers.serialize(optimizer)
new_optimizer = tf.keras.optimizers.deserialize(config)
assert new_optimizer.get_config() == optimizer.get_config()
Original file line number Diff line number Diff line change
Expand Up @@ -348,3 +348,10 @@ def test_optimizer_sparse(dtype, optimizer):
do_test_sparse_repeated_indices(
dtype, optimizer, learning_rate=0.001, momentum=0.9, weight_decay=WEIGHT_DECAY,
)


def test_serialization():
optimizer = weight_decay_optimizers.AdamW(learning_rate=1e-4, weight_decay=1e-4)
config = tf.keras.optimizers.serialize(optimizer)
new_optimizer = tf.keras.optimizers.deserialize(config)
assert new_optimizer.get_config() == optimizer.get_config()
7 changes: 7 additions & 0 deletions tensorflow_addons/optimizers/tests/yogi_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,3 +337,10 @@ def test_get_config():
opt = yogi.Yogi(1e-4)
config = opt.get_config()
assert config["learning_rate"] == 1e-4


def test_serialization():
optimizer = yogi.Yogi(1e-4)
config = tf.keras.optimizers.serialize(optimizer)
new_optimizer = tf.keras.optimizers.deserialize(config)
assert new_optimizer.get_config() == optimizer.get_config()
8 changes: 6 additions & 2 deletions tensorflow_addons/optimizers/yogi.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,8 +320,12 @@ def get_config(self):
"beta1": self._serialize_hyperparameter("beta_1"),
"beta2": self._serialize_hyperparameter("beta_2"),
"epsilon": self._serialize_hyperparameter("epsilon"),
"l1_t": self._serialize_hyperparameter("l1_regularization_strength"),
"l2_t": self._serialize_hyperparameter("l2_regularization_strength"),
"l1_regularization_strength": self._serialize_hyperparameter(
"l1_regularization_strength"
),
"l2_regularization_strength": self._serialize_hyperparameter(
"l2_regularization_strength"
),
"activation": self._activation,
"initial_accumulator_value": self._initial_accumulator_value,
}
Expand Down