四种激励函数图像

import torch
import torch.nn.functional as F
from torch.autograd import Variable
import matplotlib.pyplot as plt

if __name__ == '__main__':
    # fake data
    x = torch.linspace(-5, 5, 200)  # x data (tensor), shape=(100, 1)
    # x = Variable(x)
    x_np = x.data.numpy()  # 需要把数据转换为numpy形式

    # following are popular activation functions
    y_relu = torch.relu(x).data.numpy()
    y_sigmoid = torch.sigmoid(x).data.numpy()
    y_tanh = torch.tanh(x).data.numpy()
    y_softplus = F.softplus(x).data.numpy()  # there's no softplus in torch
    # y_softmax = torch.softmax(x, dim=0).data.numpy() softmax is a special kind of activation function, it is about probability

    # plt to visualize these activation function
    plt.figure(1, figsize=(8, 6))
    plt.subplot(221)
    plt.plot(x_np, y_relu, c='red', label='relu')
    plt.ylim((-1, 5))
    plt.legend(loc='best')

    plt.subplot(222)
    plt.plot(x_np, y_sigmoid, c='red', label='sigmoid')
    plt.ylim((-0.2, 1.2))
    plt.legend(loc='best')

    plt.subplot(223)
    plt.plot(x_np, y_tanh, c='red', label='tanh')
    plt.ylim((-1.2, 1.2))
    plt.legend(loc='best')

    plt.subplot(224)
    plt.plot(x_np, y_softplus, c='red', label='softplus')
    plt.ylim((-0.2, 6))
    plt.legend(loc='best')

    plt.show()

四种激励函数图像

在这里插入图片描述

发布了41 篇原创文章 · 获赞 44 · 访问量 7645

猜你喜欢

转载自blog.csdn.net/tailonh/article/details/105351717
今日推荐