tensor中[0]進行降維,利用切片的思想
阿新 • • 發佈:2021-02-01
技術標籤:pytorch
import torch
a=torch.tensor([[[ 0.0402, -0.1184, -0.7499, -0.3347, -0.7184, -0.7137],
[ 0.0829, -0.3118, -0.2069, -0.8267, -0.6034, -0.1528],
[ 0.2082, -0.1497, -0.3795, -0.2937, -0.5613, -0.0673],
[ 0.3715, -0.0893, -0.0470, -0.3137, -0.4161, -0.0860],
[ 0.2035, 0.0389, 0.1358, - 0.0482, -0.6119, -0.1137],
[-0.1657, 0.0381, 0.2353, 0.1406, -0.3886, -0.4558]]])
print(a.shape)
# torch.Size([1, 6, 6])
print("a[1:]",a[1:],a[1:].shape)
#a[1:] tensor([], size=(0, 6, 6)) torch.Size([0, 6, 6])
print("a[0:]",a[0:],a[0:].shape)
#a[0:] tensor([[[ 0.0402, -0.1184, -0.7499, -0.3347, -0.7184, -0.7137],
# [ 0.0829, -0.3118, -0.2069, -0.8267, -0.6034, -0.1528],
# [ 0.2082, -0.1497, -0.3795, -0.2937, -0.5613, -0.0673],
# [ 0.3715, -0.0893, -0.0470, -0.3137, -0.4161, -0.0860],
# [ 0.2035, 0.0389, 0.1358, -0.0482, -0.6119, -0.1137],
# [-0.1657, 0.0381, 0.2353, 0.1406, -0.3886, -0.4558]]])
#torch.Size([1, 6, 6])
print("a[0]",a[0],a[0].shape)
#a[0] tensor([[ 0.0402, -0.1184, -0.7499, -0.3347, -0.7184, -0.7137],
#[ 0.0829, -0.3118, -0.2069, -0.8267, -0.6034, -0.1528],
#[ 0.2082, -0.1497, -0.3795, -0.2937, -0.5613, -0.0673],
#[ 0.3715, -0.0893, -0.0470, -0.3137, -0.4161, -0.0860],
#[ 0.2035, 0.0389, 0.1358, -0.0482, -0.6119, -0.1137],
#[-0.1657, 0.0381, 0.2353, 0.1406, -0.3886, -0.4558]])
#torch.Size([6, 6])
a=torch.tensor([[[ 0.0402, -0.1184, -0.7499, -0.3347, -0.7184, -0.7137],
[ 0.0829, -0.3118, -0.2069, -0.8267, -0.6034, -0.1528],
[ 0.2082, -0.1497, -0.3795, -0.2937, -0.5613, -0.0673]],
[[ 0.3715, -0.0893, -0.0470, -0.3137, -0.4161, -0.0860],
[ 0.2035, 0.0389, 0.1358, -0.0482, -0.6119, -0.1137],
[-0.1657, 0.0381, 0.2353, 0.1406, -0.3886, -0.4558]]])
print("a[0]",a[0,2],a[0,2].shape)# 提取第1組資料的第三行資料
a[0] tensor([ 0.2082, -0.1497, -0.3795, -0.2937, -0.5613, -0.0673]) torch.Size([6])