Neural Network Activation Functions With Tensorflow

Neural network activation functions influence neural network behavior in that they determine the fire or non-fire of neurons. ReLu is the one that is most commonly used currently. The visualizations below will help us to understand them more intuitively and there are code samples so that you can run it on your machine.

Dropout

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
#Tensorflow 1.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
output = tf.nn.dropout(x, 0.5)
initialization = tf.global_variables_initializer()

with tf.Session() as session:
session.run(initialization)
y = session.run(output)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('DropOut Activation Function')
plot.plot(x, y)
plot.show()

Figure 1 : DropOut Activation Function

Elu

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
#Tensorflow 1.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
output = tf.nn.elu(x)
initialization = tf.global_variables_initializer()

with tf.Session() as session:
session.run(initialization)
y = session.run(output)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('Elu Activation Function')
plot.plot(x, y)
plot.show()

#Tensorflow 2.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
y = tf.keras.activations.elu(x)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('Elu Activation Function')
plot.plot(x, y)
plot.show()

Figure 2 : Elu Activation Function

ReLu

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
#Tensorflow 1.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
output = tf.nn.relu(x)
initialization = tf.global_variables_initializer()

with tf.Session() as session:
session.run(initialization)
y = session.run(output)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('ReLu Activation Function')
plot.plot(x, y)
plot.show()

#Tensorflow 2.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
y = tf.keras.activations.relu(x)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('Relu Activation Function')
plot.plot(x, y)
plot.show()

Figure 3 : ReLu-Activation-Function

ReLu6

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
#Tensorflow 1.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
output = tf.nn.relu6(x)
initialization = tf.global_variables_initializer()

with tf.Session() as session:
session.run(initialization)
y = session.run(output)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('ReLu6 Activation Function')
plot.plot(x, y)
plot.show()

Figure 4 : ReLu6-Activation-Function

SeLu

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
#Tensorflow 1.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
output = tf.nn.selu(x)
initialization = tf.global_variables_initializer()

with tf.Session() as session:
session.run(initialization)
y = session.run(output)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('SeLu Activation Function')
plot.plot(x, y)
plot.show()

#Tensorflow 2.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
y = tf.keras.activations.selu(x)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('Selu Activation Function')
plot.plot(x, y)
plot.show()

Figure 5 : SeLu Activation Function

Sigmoid

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
#Tensorflow 1.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
output = tf.nn.sigmoid(x)
initialization = tf.global_variables_initializer()

with tf.Session() as session:
session.run(initialization)
y = session.run(output)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('Sigmoid Activation Function')
plot.plot(x, y)
plot.show()

#Tensorflow 2.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
y = tf.keras.activations.sigmoid(x)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('Sigmoid Activation Function')
plot.plot(x, y)
plot.show()

Figure 6 : Sigmoid Activation Function

SoftPlus

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
#Tensorflow 1.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
output = tf.nn.softplus(x)
initialization = tf.global_variables_initializer()

with tf.Session() as session:
session.run(initialization)
y = session.run(output)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('SoftPlus Activation Function')
plot.plot(x, y)
plot.show()

#Tensorflow 2.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
y = tf.keras.activations.softplus(x)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('SoftPlus Activation Function')
plot.plot(x, y)
plot.show()

Figure 7 : SoftPlus-Activation-Function

SoftSign

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
#Tensorflow 1.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
output = tf.nn.softsign(x)
initialization = tf.global_variables_initializer()

with tf.Session() as session:
session.run(initialization)
y = session.run(output)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('SoftSign Activation Function')
plot.plot(x, y)
plot.show()

#Tensorflow 2.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
y = tf.keras.activations.softsign(x)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('SoftSign Activation Function')
plot.plot(x, y)
plot.show()

Figure 8 : SoftSign Activation Function

TanH

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
#Tensorflow 1.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
output = tf.nn.tanh(x)
initialization = tf.global_variables_initializer()

with tf.Session() as session:
session.run(initialization)
y = session.run(output)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('TanH Activation Function')
plot.plot(x, y)
plot.show()

#Tensorflow 2.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plot

x = np.linspace(-10, 10, 50)
y = tf.keras.activations.tanh(x)
plot.xlabel('Neuron Activity')
plot.ylabel('Neuron Output')
plot.title('TanH Activation Function')
plot.plot(x, y)
plot.show()

Figure 9 : TanH Activation Function