Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions tensorflow_addons/layers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@
from __future__ import division
from __future__ import print_function

from tensorflow_addons.layers.gelu import GeLU
from tensorflow_addons.layers.gelu import GELU
from tensorflow_addons.layers.maxout import Maxout
from tensorflow_addons.layers.normalizations import GroupNormalization
from tensorflow_addons.layers.normalizations import InstanceNormalization
from tensorflow_addons.layers.optical_flow import CorrelationCost
from tensorflow_addons.layers.poincare import PoincareNormalize
from tensorflow_addons.layers.sparsemax import Sparsemax
from tensorflow_addons.layers.wrappers import WeightNormalization
from tensorflow_addons.layers.wrappers import WeightNormalization
8 changes: 4 additions & 4 deletions tensorflow_addons/layers/gelu.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implements GeLU activation."""
"""Implements GELU activation."""

from __future__ import absolute_import
from __future__ import division
Expand All @@ -23,7 +23,7 @@


@tf.keras.utils.register_keras_serializable(package='Addons')
class GeLU(tf.keras.layers.Layer):
class GELU(tf.keras.layers.Layer):
"""Gaussian Error Linear Unit.

A smoother version of ReLU generally used
Expand All @@ -40,7 +40,7 @@ class GeLU(tf.keras.layers.Layer):
"""

def __init__(self, approximate=True, **kwargs):
super(GeLU, self).__init__(**kwargs)
super(GELU, self).__init__(**kwargs)
self.approximate = approximate
self.supports_masking = True

Expand All @@ -49,7 +49,7 @@ def call(self, inputs):

def get_config(self):
config = {'approximate': self.approximate}
base_config = super(GeLU, self).get_config()
base_config = super(GELU, self).get_config()
return dict(list(base_config.items()) + list(config.items()))

def compute_output_shape(self, input_shape):
Expand Down
8 changes: 4 additions & 4 deletions tensorflow_addons/layers/gelu_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for GeLU activation."""
"""Tests for GELU activation."""

from __future__ import absolute_import
from __future__ import division
Expand All @@ -21,18 +21,18 @@
import numpy as np
import tensorflow as tf
from absl.testing import parameterized
from tensorflow_addons.layers.gelu import GeLU
from tensorflow_addons.layers.gelu import GELU
from tensorflow_addons.utils import test_utils


@parameterized.parameters([np.float16, np.float32, np.float64])
@test_utils.run_all_in_graph_and_eager_modes
class TestGeLU(tf.test.TestCase):
class TestGELU(tf.test.TestCase):
def test_random(self, dtype):
x = np.array([[0.5, 1.2, -0.3]]).astype(dtype)
val = np.array([[0.345714, 1.0617027, -0.11462909]]).astype(dtype)
test_utils.layer_test(
GeLU, kwargs={'dtype': dtype}, input_data=x, expected_output=val)
GELU, kwargs={'dtype': dtype}, input_data=x, expected_output=val)


if __name__ == '__main__':
Expand Down