From 9505628eb0dfbc9c744823da5931fb676faa62c7 Mon Sep 17 00:00:00 2001 From: Nikhil Kilari <36819773+kilarinikhil@users.noreply.github.com> Date: Sun, 12 Apr 2020 09:06:02 +0530 Subject: [PATCH] output layer aactivation, add fc2 in call softmax applied during training phase to output layer and fc2 layer is unused --- tensorflow_v2/notebooks/3_NeuralNetworks/neural_network.ipynb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tensorflow_v2/notebooks/3_NeuralNetworks/neural_network.ipynb b/tensorflow_v2/notebooks/3_NeuralNetworks/neural_network.ipynb index 2bcd1860..77926535 100644 --- a/tensorflow_v2/notebooks/3_NeuralNetworks/neural_network.ipynb +++ b/tensorflow_v2/notebooks/3_NeuralNetworks/neural_network.ipynb @@ -111,11 +111,12 @@ " # First fully-connected hidden layer.\n", " self.fc2 = layers.Dense(n_hidden_2, activation=tf.nn.relu)\n", " # Second fully-connecter hidden layer.\n", - " self.out = layers.Dense(num_classes, activation=tf.nn.softmax)\n", + " self.out = layers.Dense(num_classes)\n", "\n", " # Set forward pass.\n", " def call(self, x, is_training=False):\n", " x = self.fc1(x)\n", + " x = self.fc2(x)\n" " x = self.out(x)\n", " if not is_training:\n", " # tf cross entropy expect logits without softmax, so only\n",