pytorch 之 activation function

 1 import torch
 2 import torch.nn.functional as F
 3 from torch.autograd import Variable
 4 import matplotlib.pyplot as plt
 5 
 6 # fake data
 7 x = torch.linspace(-5, 5, 200)  # x data (tensor), shape=(100, 1)
 8 x = Variable(x)
 9 x_np = x.data.numpy()   # numpy array for plotting
10 
11 # following are popular activation functions
12 y_relu = torch.relu(x).data.numpy()
13 y_sigmoid = torch.sigmoid(x).data.numpy()
14 y_tanh = torch.tanh(x).data.numpy()
15 y_softplus = F.softplus(x).data.numpy() # there's no softplus in torch
16 # y_softmax = torch.softmax(x, dim=0).data.numpy() softmax is a special kind of activation function, it is about probability
17 
18 # plt to visualize these activation function
19 plt.figure(1, figsize=(8, 6))
20 plt.subplot(221)
21 plt.plot(x_np, y_relu, c='red', label='relu')
22 plt.ylim ((- 1, 5 ))
 23 plt.legend (loc = ' best ' )
 24  
25 plt.subplot (222 )
 26 plt.plot (x_np, y_sigmoid, c = ' red ' , label = ' sigmoid ' )
 27 plt.ylim ((- 0.2, 1.2 ))
 28 plt.legend (loc = ' best ' )
 29  
30 plt.subplot (223 )
 31 plt.plot (x_np, y_tanh, c = ' red ' , label = ' tanh ' )
 32plt.ylim ((- 1.2, 1.2 ))
 33 plt.legend (loc = ' best ' )
 34  
35 plt.subplot (224 )
 36 plt.plot (x_np, y_softplus, c = ' red ' , label = ' softplus ' )
 37 plt.ylim ((- 0.2, 6 ))
 38 plt.legend (loc = ' best ' )
 39  
40 plt.show ()

 

Guess you like

Origin www.cnblogs.com/dhName/p/11742877.html