From 56fb4f3c0692fa2751308dedbf3ba5d8c0e0b1a6 Mon Sep 17 00:00:00 2001 From: Sefik Ilkin Serengil Date: Tue, 15 Oct 2019 10:32:25 +0300 Subject: [PATCH] mish activation --- keras/activations.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/keras/activations.py b/keras/activations.py index 127c8af9f881..071bbabb6d00 100644 --- a/keras/activations.py +++ b/keras/activations.py @@ -35,6 +35,15 @@ def softmax(x, axis=-1): raise ValueError('Cannot apply softmax to a tensor that is 1D. ' 'Received input: %s' % x) +def mish(x): + """Mish: A Self Regularized Non-Monotonic Neural Activation Function (https://arxiv.org/abs/1908.08681v2) + Overperforms than both ReLU and Swish. + # Arguments + x: Input tensor. + # Returns + x * tanh(softplus(x)) + """ + return x*K.tanh(K.softplus(x)) def elu(x, alpha=1.0): """Exponential linear unit.