import torch
import matplotlib.pyplot as plt
Activation functions
Sigmoid function
= torch.nn.Sigmoid()
sigmoid = torch.arange(-10, 10, step=1)
x
'k-', label = 'sigmoid')
plt.plot(x, sigmoid(x),
plt.legend()'x')
plt.xlabel('f(x)')
plt.ylabel( plt.show()
ReLu function
= torch.nn.ReLU()
relu = torch.arange(-10, 10, step=1)
x
'k-', label = 'ReLu')
plt.plot(x, relu(x),
plt.legend()'x')
plt.xlabel('f(x)')
plt.ylabel( plt.show()
Leaky ReLu
= torch.nn.LeakyReLU(negative_slope=0.1)
f = torch.arange(-10, 10, step=1.0)
x
'k-', label = 'Leaky ReLu')
plt.plot(x, f(x),
plt.legend()'x')
plt.xlabel('f(x)')
plt.ylabel( plt.show()
Softmax function
# create a softmax module to apply to the last dimension of the input tensor
= torch.nn.Softmax(dim=-1)
f = torch.arange(-10, 10, step=1.0)
x
'k-', label = 'Softmax')
plt.plot(x, f(x),
plt.legend()'x')
plt.xlabel('f(x)')
plt.ylabel( plt.show()
Softmax is useful for a vector input. The sum of the values for each element of the vector becomes 1.0
# create a vector (rank 2 tensor) with 3 elements
= torch.randn(1, 3)
x x
tensor([[-0.4259, 1.3028, -1.4466]])
# apply softmax along the last dim (column)
= torch.nn.Softmax(dim=1)
f = f(x)
y y
tensor([[0.1430, 0.8055, 0.0515]])
# verify the sum of the softmax values
sum(y, dim=1) torch.
tensor([1.])