def softmax(logits, axis=None, name=None):
'''
Computes softmax activations.
This function performs the equivalent of
softmax = tf.exp(logits) / tf.reduce_sum(tf.exp(logits), axis)
'''
import tensorflow as tf
# 用正态分布来模拟100个样本的预测结果,可以认为是100个MNIST样本
# out = tf.random.normal([100,10])
out = tf.constant([[1.,2,3],
[2,0,1]])
# 计算每一行转换为概率,输出
exp = tf.exp(out)
'''输出
tf.Tensor(
[[ 2.7182817 7.389056 20.085537 ]
[ 7.389056 1. 2.7182817]], shape=(2, 3), dtype=float32)'''
sum = tf.reduce_sum(exp,axis=1)
'''输出
tf.Tensor([30.192875 11.107338], shape=(2,), dtype=float32)
'''
out = tf.nn.softmax(out, axis=1)
'''输出
tf.Tensor(
[[0.09003057 0.24472848 0.66524094]
[0.6652409 0.09003057 0.24472846]], shape=(2, 3), dtype=float32)
tf.Tensor([2 0], shape=(2,), dtype=int64)'''
运算过程: