@@ -54,18 +54,15 @@ def __init__(self, weight_decay, *args, **kwargs):
5454 Note: when applying a decay to the learning rate, be sure to manually apply
5555 the decay to the `weight_decay` as well. For example:
5656
57- ```python
58- step = tf.Variable(0, trainable=False)
59- schedule = tf.optimizers.schedules.PiecewiseConstantDecay(
60- [10000, 15000], [1e-0, 1e-1, 1e-2])
61- # lr and wd can be a function or a tensor
62- lr = 1e-1 * schedule(step)
63- wd = lambda: 1e-4 * schedule(step)
64-
65- # ...
66-
67- optimizer = tfa.optimizers.AdamW(learning_rate=lr, weight_decay=wd)
68- ```
57+ Usage:
58+
59+ >>> step = tf.Variable(0, trainable=False)
60+ >>> schedule = tf.optimizers.schedules.PiecewiseConstantDecay(
61+ ... [10000, 15000], [1e-0, 1e-1, 1e-2])
62+ >>> lr = 1e-1 * schedule(step)
63+ >>> wd = lambda: 1e-4 * schedule(step)
64+ >>> optimizer = tfa.optimizers.AdamW(learning_rate=lr, weight_decay=wd)
65+
6966 """
7067
7168 def __init__ (self , weight_decay , ** kwargs ):
@@ -213,18 +210,15 @@ def extend_with_decoupled_weight_decay(base_optimizer):
213210 Note: when applying a decay to the learning rate, be sure to manually apply
214211 the decay to the `weight_decay` as well. For example:
215212
216- ```python
217- step = tf.Variable(0, trainable=False)
218- schedule = tf.optimizers.schedules.PiecewiseConstantDecay(
219- [10000, 15000], [1e-0, 1e-1, 1e-2])
220- # lr and wd can be a function or a tensor
221- lr = 1e-1 * schedule(step)
222- wd = lambda: 1e-4 * schedule(step)
213+ Usage:
223214
224- # ...
215+ >>> step = tf.Variable(0, trainable=False)
216+ >>> schedule = tf.optimizers.schedules.PiecewiseConstantDecay(
217+ ... [10000, 15000], [1e-0, 1e-1, 1e-2])
218+ >>> lr = 1e-1 * schedule(step)
219+ >>> wd = lambda: 1e-4 * schedule(step)
220+ >>> optimizer = tfa.optimizers.AdamW(learning_rate=lr, weight_decay=wd)
225221
226- optimizer = tfa.optimizers.AdamW(learning_rate=lr, weight_decay=wd)
227- ```
228222
229223 Note: you might want to register your own custom optimizer using
230224 `tf.keras.utils.get_custom_objects()`.
@@ -287,19 +281,16 @@ class SGDW(DecoupledWeightDecayExtension, tf.keras.optimizers.SGD):
287281 Note: when applying a decay to the learning rate, be sure to manually apply
288282 the decay to the `weight_decay` as well. For example:
289283
290- ```python
291- step = tf.Variable(0, trainable=False)
292- schedule = tf.optimizers.schedules.PiecewiseConstantDecay(
293- [10000, 15000], [1e-0, 1e-1, 1e-2])
294- # lr and wd can be a function or a tensor
295- lr = 1e-1 * schedule(step)
296- wd = lambda: 1e-4 * schedule(step)
297-
298- # ...
284+ Usage:
299285
300- optimizer = tfa.optimizers.SGDW(
301- learning_rate=lr, weight_decay=wd, momentum=0.9)
302- ```
286+ >>> step = tf.Variable(0, trainable=False)
287+ >>> schedule = tf.optimizers.schedules.PiecewiseConstantDecay(
288+ ... [10000, 15000], [1e-0, 1e-1, 1e-2])
289+ >>> lr = 1e-1 * schedule(step)
290+ >>> wd = lambda: 1e-4 * schedule(step)
291+ >>> optimizer = tfa.optimizers.SGDW(
292+ ... learning_rate=lr, weight_decay=wd, momentum=0.9)
293+
303294 """
304295
305296 def __init__ (self ,
@@ -362,18 +353,15 @@ class AdamW(DecoupledWeightDecayExtension, tf.keras.optimizers.Adam):
362353 Note: when applying a decay to the learning rate, be sure to manually apply
363354 the decay to the `weight_decay` as well. For example:
364355
365- ```python
366- step = tf.Variable(0, trainable=False)
367- schedule = tf.optimizers.schedules.PiecewiseConstantDecay(
368- [10000, 15000], [1e-0, 1e-1, 1e-2])
369- # lr and wd can be a function or a tensor
370- lr = 1e-1 * schedule(step)
371- wd = lambda: 1e-4 * schedule(step)
372-
373- # ...
356+ Usage:
374357
375- optimizer = tfa.optimizers.AdamW(learning_rate=lr, weight_decay=wd)
376- ```
358+ >>> step = tf.Variable(0, trainable=False)
359+ >>> schedule = tf.optimizers.schedules.PiecewiseConstantDecay(
360+ ... [10000, 15000], [1e-0, 1e-1, 1e-2])
361+ >>> lr = 1e-1 * schedule(step)
362+ >>> wd = lambda: 1e-4 * schedule(step)
363+ >>> optimizer = tfa.optimizers.AdamW(learning_rate=lr, weight_decay=wd)
364+
377365 """
378366
379367 def __init__ (self ,
0 commit comments