Plotting three common activation functions in Python

 Directly upload the code and diagram

import matplotlib.pyplot as plt
import numpy as np
import math
plt.rcParams['font.sans-serif']=['SimHei']
plt.rcParams['axes.unicode_minus']=False

#sigmoid    
#sigmoid激活函数用的最少,但为二分类问题时,可作为输出层
#会出现梯度消失(即当x趋于无穷大或无穷小时会使得导数趋于0)

def sigmoid(x):
    return 1/(1+np.exp(-x))
x1=np.arange(-10.0,10.0,0.1)
y1=sigmoid(x)
plt.plot(x1,y1,color='red',label='sigmoid')
plt.legend()

#ReLU
#一般都采用ReLU激活函数
def ReLU(x):
    return np.maximum(0,0.1*x)  #ReLU函数的定义
x2=np.arange(-10.0,10.0,0.1)
y2=ReLU(x)
plt.plot(x2,y2,color='blue',label='ReLU')
plt.legend()

#tanh
def tanh(x):
    return (np.exp(x)-np.exp(-x))/(np.exp(x)+np.exp(-x))
x3=np.arange(-10.0,10.0,0.1)
y3=tanh(x)
plt.plot(x3,y3,color='green',label='tanh')
plt.legend()

 

Guess you like

Origin blog.csdn.net/codeshits/article/details/127165468