From dc262e15ac78021fa54c73daa45e74cd2575f0d4 Mon Sep 17 00:00:00 2001 From: Wojciech Rzadkowski Date: Tue, 22 Jan 2019 00:18:30 +0100 Subject: [PATCH] Added ReLU activations to multilater_perceptron.py --- examples/3_NeuralNetworks/multilayer_perceptron.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/3_NeuralNetworks/multilayer_perceptron.py b/examples/3_NeuralNetworks/multilayer_perceptron.py index cf04b015..f802d2d6 100644 --- a/examples/3_NeuralNetworks/multilayer_perceptron.py +++ b/examples/3_NeuralNetworks/multilayer_perceptron.py @@ -58,11 +58,11 @@ # Create model def multilayer_perceptron(x): # Hidden fully connected layer with 256 neurons - layer_1 = tf.add(tf.matmul(x, weights['h1']), biases['b1']) + layer_1 = tf.maximum(0.,tf.add(tf.matmul(x, weights['h1']), biases['b1'])) # Hidden fully connected layer with 256 neurons - layer_2 = tf.add(tf.matmul(layer_1, weights['h2']), biases['b2']) + layer_2 = tf.maximum(0.,tf.add(tf.matmul(layer_1, weights['h2']), biases['b2'])) # Output fully connected layer with a neuron for each class - out_layer = tf.matmul(layer_2, weights['out']) + biases['out'] + out_layer = tf.maximum(0.,tf.matmul(layer_2, weights['out']) + biases['out']) return out_layer # Construct model