pytorch-统计属性

统计属性

▪ norm

▪ mean sum

▪ prod

▪ max, min, argmin, argmax

▪ kthvalue, topk

# norm-p
import torch
a=torch.full([8], 1.0)
a
tensor([1., 1., 1., 1., 1., 1., 1., 1.])
b=a.view(2,4)
print(b)
c=a.view(2,2,2)
print(c)
tensor([[1., 1., 1., 1.],
        [1., 1., 1., 1.]])
tensor([[[1., 1.],
         [1., 1.]],

        [[1., 1.],
         [1., 1.]]])
a.norm(1),b.norm(1),c.norm(1)
(tensor(8.), tensor(8.), tensor(8.))
a.norm(2),b.norm(2),c.norm(2)
(tensor(2.8284), tensor(2.8284), tensor(2.8284))
b.norm(1, dim=1)
tensor([4., 4.])
b.norm(2, dim=1)
tensor([2., 2.])
c.norm(1, dim=0)
tensor([[2., 2.],
        [2., 2.]])
c.norm(2, dim=0)
tensor([[1.4142, 1.4142],
        [1.4142, 1.4142]])

mean, sum, min, max, prod(累乘)

a=torch.arange(8).view(2,4).float()
a
tensor([[0., 1., 2., 3.],
        [4., 5., 6., 7.]])
a.min(),a.max(),a.mean(),a.prod()
(tensor(0.), tensor(7.), tensor(3.5000), tensor(0.))
a.sum()
tensor(28.)

argmin, argmax

# 将数据打平
# 0   1   2   3   4   5   5   7
# 0., 1., 2., 3. ,4., 5., 6., 7.
a.argmax()
tensor(7)
# 将数据打平
# 0   1   2   3   4   5   5   7
# 0., 1., 2., 3. ,4., 5., 6., 7.
a.argmin()
tensor(0)
a =  torch.rand(2,3,4)   # 2x3x4=24
a
tensor([[[0.8881, 0.8049, 0.6173, 0.6132],
         [0.4918, 0.4614, 0.3723, 0.7169],
         [0.7246, 0.9947, 0.5305, 0.1393]],

        [[0.0236, 0.2242, 0.6364, 0.2610],
         [0.6931, 0.3401, 0.4342, 0.1042],
         [0.8220, 0.9793, 0.0228, 0.5665]]])
a.argmax()
tensor(17)
a=torch.rand(4,10)
a
tensor([[0.0622, 0.8281, 0.5229, 0.0179, 0.1282, 0.2806, 0.9035, 0.0344, 0.8047,
         0.3573],
        [0.2241, 0.9326, 0.1820, 0.5662, 0.3079, 0.7449, 0.9265, 0.6096, 0.6232,
         0.1947],
        [0.7625, 0.3807, 0.3237, 0.4953, 0.5655, 0.2867, 0.2200, 0.0215, 0.6170,
         0.3477],
        [0.5033, 0.1363, 0.0296, 0.7398, 0.9477, 0.2766, 0.5090, 0.0175, 0.4481,
         0.8575]])
a[0]
tensor([0.0622, 0.8281, 0.5229, 0.0179, 0.1282, 0.2806, 0.9035, 0.0344, 0.8047,
        0.3573])
a.argmax()
tensor(34)
a.argmax(dim=1)
tensor([6, 1, 0, 4])

dim, keepdim

a=torch.rand(4,10)
a
tensor([[0.1202, 0.0744, 0.7729, 0.6513, 0.5008, 0.7367, 0.0984, 0.9881, 0.9041,
         0.7258],
        [0.8578, 0.1581, 0.5646, 0.1224, 0.6212, 0.3750, 0.0776, 0.4940, 0.5347,
         0.6968],
        [0.1681, 0.5017, 0.1138, 0.4363, 0.8306, 0.1492, 0.9907, 0.5928, 0.7162,
         0.8629],
        [0.8479, 0.2474, 0.5249, 0.5171, 0.0968, 0.0684, 0.1510, 0.7171, 0.8209,
         0.4611]])
print(a.argmax(dim=1))
print("-"*50)
print(a.max(dim=1))
tensor([7, 0, 6, 0])
--------------------------------------------------
torch.return_types.max(
values=tensor([0.9881, 0.8578, 0.9907, 0.8479]),
indices=tensor([7, 0, 6, 0]))
a.max(dim=1, keepdim=True)
torch.return_types.max(
values=tensor([[0.9881],
        [0.8578],
        [0.9907],
        [0.8479]]),
indices=tensor([[7],
        [0],
        [6],
        [0]]))
a.argmax(dim=1, keepdim=True)
tensor([[7],
        [0],
        [6],
        [0]])

Top-k or k-th ==> 论文中的top-1 / top-5

pre1 = torch.randn(2,10)
print(pre1)
pre1.dim()
tensor([[ 0.6806, -0.7074,  2.7420, -1.9766,  1.4887,  1.1216, -0.2368, -1.1325,
         -1.0066,  1.6367],
        [ 0.6096, -0.5251, -0.5601, -1.5806,  0.2537,  1.5675, -1.0789, -1.5864,
         -0.0223,  0.4532]])





2
pre1.topk(3,dim=1)
torch.return_types.topk(
values=tensor([[2.7420, 1.6367, 1.4887],
        [1.5675, 0.6096, 0.4532]]),
indices=tensor([[2, 9, 4],
        [5, 0, 9]]))
pre = torch.arange(10)*0.1
print(pre)
pre.dim()
tensor([0.0000, 0.1000, 0.2000, 0.3000, 0.4000, 0.5000, 0.6000, 0.7000, 0.8000,
        0.9000])





1
pre.topk(3, dim=0)  # 顺序前三个值最大值和索引
torch.return_types.topk(
values=tensor([0.9000, 0.8000, 0.7000]),
indices=tensor([9, 8, 7]))
pre.topk(3, dim=0, largest=False) # 顺序后三个最小值和索引
torch.return_types.topk(
values=tensor([0.0000, 0.1000, 0.2000]),
indices=tensor([0, 1, 2]))

compare

>, >=, <, <=, !=, ==

torch.eq(a, b)

▪ torch.equal(a, b)

a = torch.randn(3,6)-0.5
a
tensor([[-0.9635, -1.6708, -1.0660, -1.6179,  0.5257,  0.7999],
        [-0.7183, -2.1876, -0.3822, -1.2526, -1.1457, -1.0905],
        [-0.8699,  0.1701,  1.7849, -0.9955,  0.1741,  0.8979]])
a>0
tensor([[False, False, False, False,  True,  True],
        [False, False, False, False, False, False],
        [False,  True,  True, False,  True,  True]])
torch.gt(a, 0)
tensor([[False, False, False, False,  True,  True],
        [False, False, False, False, False, False],
        [False,  True,  True, False,  True,  True]])
a != 0
tensor([[True, True, True, True, True, True],
        [True, True, True, True, True, True],
        [True, True, True, True, True, True]])
a=torch.ones(2,3)
print(a)
b=torch.randn(2,3)
print(b)
tensor([[1., 1., 1.],
        [1., 1., 1.]])
tensor([[ 1.0654,  0.8177,  0.3416],
        [ 0.2996, -0.7102,  0.8026]])
torch.eq(a,b)
tensor([[False, False, False],
        [False, False, False]])
torch.eq(a,a)
tensor([[True, True, True],
        [True, True, True]])
torch.equal(a,a)
True

猜你喜欢

转载自blog.csdn.net/MasterCayman/article/details/109407667