神经网络----几种基本激活函数

版权声明:© 本文为博主原创文章,未经博主允许不得转载 https://blog.csdn.net/qq_41204464/article/details/89345631

函数模型:

实现代码:

import matplotlib.pyplot as plt
import numpy as np

x = np.linspace(-10,10)   #在-10到10中,组成的数列
y_sigmoid = 1/(1+np.exp(-x))
y_tanh = (np.exp(x)-np.exp(-x))/(np.exp(x)+np.exp(-x))

fig = plt.figure()
# plot sigmoid
ax = fig.add_subplot(221)
ax.plot(x,y_sigmoid,label="Sigmoid", color="blue")
ax.set_ylim((-2,2))
ax.grid()
ax.set_title('(a) Sigmoid')

# plot tanh
ax = fig.add_subplot(222)
ax.plot(x,y_tanh,label="Tanh", color="red")
ax.set_ylim((-2,2))
ax.grid()
ax.set_title('(b) Tanh')

# plot relu
ax = fig.add_subplot(223)
y_relu = np.array([0*item  if item<0 else item for item in x ])
ax.plot(x,y_relu,label="ReLu", color="blue")
ax.grid()
ax.set_title('(c) ReLu')

#plot leaky relu
ax = fig.add_subplot(224)
y_relu = np.array([0.2*item  if item<0 else item for item in x ])
ax.plot(x,y_relu,label="Leaky ReLu", color="red")
ax.grid()
ax.set_title('(d) Leaky ReLu')

plt.tight_layout()
plt.show()

函数模型:

实现代码:

import numpy as np
import matplotlib.pyplot as plt


def sigmoid(x):
    y = 1.0 / (1.0 + np.exp(-x))
    return y


def elu(x, a):
    y = x.copy()
    for i in range(y.shape[0]):
        if y[i] < 0:
            y[i] = a * (np.exp(y[i]) - 1)
    return y


def lrelu(x, a):
    y = x.copy()
    for i in range(y.shape[0]):
        if y[i] < 0:
            y[i] = a * y[i]
    return y


def relu(x):
    y = x.copy()
    y[y < 0] = 0
    return y


def softplus(x):
    y = np.log(np.exp(x) + 1)
    return y


def softsign(x):
    y = x / (np.abs(x) + 1)
    return y


def tanh(x):
    y = (1.0 - np.exp(-2 * x)) / (1.0 + np.exp(-2 * x))
    return y


x = np.linspace(start=-10, stop=10, num=100)
y_sigmoid = sigmoid(x)
y_elu = elu(x, 0.25)
y_lrelu = lrelu(x, 0.25)
y_relu = relu(x)
y_softplus = softplus(x)
y_softsign = softsign(x)
y_tanh = tanh(x)

tx = 6
ty = 0.9

plt.subplot(331)
plt.title('sigmoid')
plt.plot(x, y_sigmoid)
plt.grid(True)

plt.subplot(332)
plt.title('elu')
plt.plot(x, y_elu)
plt.grid(True)

plt.subplot(333)
plt.title('lrelu')
plt.plot(x, y_lrelu)
plt.grid(True)

plt.subplot(334)
plt.title('relu')
plt.plot(x, y_relu)
plt.grid(True)

plt.subplot(335)
plt.title('softplus')
plt.plot(x, y_softplus)
plt.grid(True)

plt.subplot(336)
plt.title('softsign')
plt.plot(x, y_softsign)
plt.grid(True)

plt.subplot(337)
plt.title('tanh')
plt.plot(x, y_tanh)
plt.grid(True)
plt.show()

希望对你有帮助。

扫描二维码关注公众号,回复: 5922461 查看本文章

猜你喜欢

转载自blog.csdn.net/qq_41204464/article/details/89345631
今日推荐