#!/usr/bin/env python3
import torch # PyTorch needs to be installed
dim = 2
eps = 0.1
x = torch.ones(dim, requires_grad=True) # leaf of computational graph
print("x : ",x)
print("x : ",x.data)
y = x + 2
out = torch.dot(y,y) # scalar product
print("y : ",y)
print("out : ",out)
print()
out.backward() # backward pass --> gradients
print("x.grad : ",x.grad)
with torch.no_grad(): # detach from computational graph
x -= eps*x.grad # updating parameter tensor
x.grad = None # flush
print("x : ",x.data)
torch.dot(x+2,x+2).backward()
print("x.grad : ",x.grad)