PyTorch中张量的创建和维度的操作

一:张量的数据类型

PyTorch的不同类型之间,通过调用to方法进行转化,该方法传入的参数为转换的目标类型

import numpy as np
import torch  #导入torch包

# 转换pthon列表为PyTorch张量
a=torch.tensor([1,2,3,4])
print(a)
print(a.dtype)#查看类型

#转换numpy数组为PyTorch张量
b=np.array([1,2,3,4])
print(b.dtype)
c=torch.tensor(np.array([1,2,3,4]))
print(c)
print(c.dtype)

#从torch.int64转换为torch.float
torch.randint(0,5,(3,3)).to(torch.float)
tensor([1, 2, 3, 4])
torch.int64
int32
tensor([1, 2, 3, 4], dtype=torch.int32)
torch.int32





tensor([[4., 1., 2.],
        [0., 4., 1.],
        [1., 3., 4.]])

[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-aiDzBMm9-1683018484487)(attachment:AFEF618A0BF3F77C779399B89C952525.jpg)]

二:张量的创建方式

Pytorch中有四种张量创建方式

1:通过torch.tensor函数创建

2:通过Pytorch内置函数创建

3:通过已知张量创建形状相同的张量

1:通过torch.tensor函数创建

例子见数据类型一节(上面)

2:通过Pytorch内置函数创建

torch.rand函数

torch.randn

torch.zeros

torch.ones函数

torch.eye函数

import torch  #导入torch包

# torch.rand函数
print(torch.rand(3,3)) #生成3*3的矩阵,元素服从(0,1)上的均匀分布
print(torch.randn(2,3,4))#生成2*3*4的张量,张量元素服从标准正态分布

#torch.zeros函数
print(torch.zeros(2,2,2)) #生成2*2*2的张量,张量元素全为0

#torch.ones函数
print(torch.ones(1,2,3)) #生成1*2*3的张量,张量元素全为1

#torch.eye函数
print(torch.eye(3)) #生成3*3的单位阵
tensor([[0.9391, 0.8454, 0.9455],
        [0.6574, 0.3585, 0.1600],
        [0.7382, 0.1731, 0.9842]])
tensor([[[-2.2154, -0.7502, -0.0475, -0.1608],
         [ 0.4444,  0.3133,  1.3741,  0.9021],
         [ 0.1571, -0.3610, -0.7061,  1.3664]],

        [[ 0.3917, -0.5090, -0.0340, -0.3178],
         [-1.3516,  0.1065, -0.0273, -0.6073],
         [-0.5911,  0.3819,  0.4520, -0.3360]]])
tensor([[[0., 0.],
         [0., 0.]],

        [[0., 0.],
         [0., 0.]]])
tensor([[[1., 1., 1.],
         [1., 1., 1.]]])
tensor([[1., 0., 0.],
        [0., 1., 0.],
        [0., 0., 1.]])

3:通过已知张量创建形状相同的张量

torch.zeros_like(t) #t大小,元素全为0的张量

torch.ones_like(t) #t大小,元素全为1的张量

torch.rand_like(t) #t大小,元素服从(0,1)上的均匀分布的张量

torch.randn_like(t) #t大小,元素服从标准正态分布的张量

import torch  #导入torch包

t=torch.randn(3,3)
print(t) #
print(torch.zeros_like(t))
print(torch.ones_like(t))
print(torch.rand_like(t))
print(torch.randn_like(t))
tensor([[ 0.0520,  1.2636,  0.2217],
        [-0.7801, -0.4239, -0.4820],
        [-1.1670, -0.7687, -0.7751]])
tensor([[0., 0., 0.],
        [0., 0., 0.],
        [0., 0., 0.]])
tensor([[1., 1., 1.],
        [1., 1., 1.],
        [1., 1., 1.]])
tensor([[0.8698, 0.3951, 0.9457],
        [0.3843, 0.4096, 0.2056],
        [0.2966, 0.0818, 0.3750]])
tensor([[ 0.3153,  1.1999, -0.0310],
        [-0.5127,  0.3654, -0.8379],
        [-1.0135, -0.9964, -1.3940]])

三:和张量维度相关的方法

1:PyTorch张量形状相关的一些函数

主要包含获取张量的某一特定的维度元素的数目及张量包含的所有元素数目的一些方法

import torch  #导入torch包

t=torch.randn(3,4,5)
print(t)
print(t.ndimension()) #获取维度的数目
print(t.nelement()) #获取张量的总元素数目
print(t.size()) #获取张量的每个维度大小,调用属性
print(t.shape)  #注意没有括号,获取张量的每个维度大小,访问属性
print(t.size(0))#获取该张量维度0的大小
tensor([[[ 3.1128e-01,  4.1240e-02, -5.4987e-01, -1.5764e+00, -7.5734e-01],
         [-1.4109e+00, -6.3453e-01, -1.2514e-01, -4.3276e-01,  7.6700e-01],
         [ 5.6799e-01,  9.8255e-01,  7.2142e-01,  1.0887e-01,  1.1115e+00],
         [ 1.8265e-03, -8.6204e-01,  8.3635e-01, -6.2222e-02, -3.3022e-01]],

        [[ 9.4435e-01, -5.6807e-01,  9.3470e-01, -1.0746e+00, -1.6739e+00],
         [-3.1463e-01, -6.1035e-01, -3.5705e-01, -2.7637e+00,  1.6379e-01],
         [-4.6570e-01, -2.5795e-01,  9.7071e-01,  3.5631e-01, -3.5840e-01],
         [ 2.0780e-01, -6.8428e-01, -1.2434e-01, -1.4971e-01, -1.4490e+00]],

        [[-2.5788e+00,  1.5449e+00,  8.2536e-01,  7.9389e-01,  6.5723e-01],
         [-6.2999e-02, -9.0702e-02, -2.1474e-01, -2.2358e-02, -6.5677e-02],
         [ 2.2985e+00,  7.8995e-01,  7.8908e-01,  1.0437e+00,  8.5898e-01],
         [-1.9019e-01, -1.2103e+00, -4.3281e-03,  5.0496e-01,  3.0169e-01]]])
3
60
torch.Size([3, 4, 5])
torch.Size([3, 4, 5])
3

3:改变张量的形状大小

t.view

t.reshap

import torch  #导入torch包

t=torch.randn(12)
print(t)
print(t.view(3,4)) #向量改变形状为3*4的矩阵
print(t.view(4,3))
print(t) #t.view不改变t

print(t.reshape(3,4))
print(t) #t.reshape不改变t
tensor([ 1.1530, -0.2666,  0.2820, -0.5278, -1.1823, -1.4835, -0.3273,  0.1467,
         1.4569,  0.2755, -0.2063,  0.2273])
tensor([[ 1.1530, -0.2666,  0.2820, -0.5278],
        [-1.1823, -1.4835, -0.3273,  0.1467],
        [ 1.4569,  0.2755, -0.2063,  0.2273]])
tensor([[ 1.1530, -0.2666,  0.2820],
        [-0.5278, -1.1823, -1.4835],
        [-0.3273,  0.1467,  1.4569],
        [ 0.2755, -0.2063,  0.2273]])
tensor([ 1.1530, -0.2666,  0.2820, -0.5278, -1.1823, -1.4835, -0.3273,  0.1467,
         1.4569,  0.2755, -0.2063,  0.2273])
tensor([[ 1.1530, -0.2666,  0.2820, -0.5278],
        [-1.1823, -1.4835, -0.3273,  0.1467],
        [ 1.4569,  0.2755, -0.2063,  0.2273]])
tensor([ 1.1530, -0.2666,  0.2820, -0.5278, -1.1823, -1.4835, -0.3273,  0.1467,
         1.4569,  0.2755, -0.2063,  0.2273])

四:张量的索引和切片

import torch  #导入torch包

t=torch.randn(2,3,4)
print(t)
print(t[1,2,3]) #获取张量在0维1号,1维2号,2维3号的元素(编号从0开始)
print(t[:,1:-1,1:3]) #仅仅一个冒号表示所有,-1表示最后一个元素
t[1,2,3]=-10
print(t) #直接更改索引和切片会更改原始张量的值
print(t>0)#张量大于0的掩码
print(t[t>0])#根据掩码选择张量的元素,注意最后选出来的是一个向量
tensor([[[ 0.8539, -0.0756, -0.0093,  1.1649],
         [ 1.4486, -0.2762, -0.9639, -0.5436],
         [-1.5586,  0.9819,  1.8594, -0.1665]],

        [[-0.3079,  0.1786, -0.1279, -0.6654],
         [ 2.5747, -0.7501,  2.0875,  0.0110],
         [ 0.1397,  0.1322, -0.4082, -0.1392]]])
tensor(-0.1392)
tensor([[[-0.2762, -0.9639]],

        [[-0.7501,  2.0875]]])
tensor([[[ 8.5389e-01, -7.5556e-02, -9.2739e-03,  1.1649e+00],
         [ 1.4486e+00, -2.7617e-01, -9.6386e-01, -5.4355e-01],
         [-1.5586e+00,  9.8193e-01,  1.8594e+00, -1.6649e-01]],

        [[-3.0790e-01,  1.7862e-01, -1.2788e-01, -6.6541e-01],
         [ 2.5747e+00, -7.5011e-01,  2.0875e+00,  1.0968e-02],
         [ 1.3971e-01,  1.3219e-01, -4.0819e-01, -1.0000e+01]]])
tensor([[[ True, False, False,  True],
         [ True, False, False, False],
         [False,  True,  True, False]],

        [[False,  True, False, False],
         [ True, False,  True,  True],
         [ True,  True, False, False]]])
tensor([0.8539, 1.1649, 1.4486, 0.9819, 1.8594, 0.1786, 2.5747, 2.0875, 0.0110,
        0.1397, 0.1322])

猜你喜欢

转载自blog.csdn.net/qq_44425179/article/details/130465117