Pytorch入門(一):Tensorの基本用


ダイレクトコード
import torch as t
print(t.__version__)  #    

# ============================Tensor  ==============================
#   tensor
x = t.Tensor(2, 3)  #    5x3   ,       ,        
x = t.Tensor([[1, 2, 1], [3, 4, 1]])  #  list   Tensor  
y = t.rand(2, 3)  #   [0,1]             

#   
Shape = x.size()  #   x        torch.Size
# torch.Size  tuple     ,     tuple     
columns_n = Shape[1]
line_n = Shape[0]

#   :       
z = x + y
z = t.add(x, y)
y.add(x)  #     ,   y   
y.add_(x)  # inplace   ,y  

#   : Numpy  
z = x[:1, 1:]  #   tensor  
z = x[1]
z = x[1][1]  #       scalar
x = t.tensor([2])
print(type(z), type(x))  #     
print(z, x)  # tensor(4.) tensor([2])
Shape = z.size()  # torch.Size([])
Shape1 = x.size()  # torch.Size([1])
z = z.item()  #   scalar.item()     scalar   python     
x = x.item()  #        tensor     `tensor.item()`,    
z = t.tensor(100)  #       scalar

# Tensor       ,      、    、  、    ,      Numpy    。
# Tensor Numpy                。  Tensor      ,
#      Numpy    ,     Tensor
z = t.ones(5)  #      1 Tensor
x = z.numpy()  # Tensor -> Numpy
y = t.from_numpy(x)  # Numpy->Tensor
# Tensor numpy      ,           ,            。
#       ,        ,          。
z.add_(1)
x += 1
y.add_(1)

#   
tensor = t.tensor([3, 4])
old_tensor = tensor  #     
new_tensor = old_tensor.clone().detach()  #      
new_tensor1 = old_tensor.detach()  #     
new_tensor[0] = 1111
old_tensor[1] = 222
tensor[0] = 3333


# ============================autograd:     ==============================
#  tensor   requires_grad   ,        ;pytorch      autograd     
x = t.ones(2, 2, requires_grad=True)
y = x.sum()  # y = x.sum() = (x[0][0] + x[0][1] + x[1][0] + x[1][1])
y.backward()  #     ,    
G = x.grad  #         1
#   :grad            (accumulated),             ,
#            ,              。
y.backward()
G = x.grad  #         2
#           inplace  ,       ,  add_
x.grad.data.zero_()
G = x.grad