Skip to content

Instantly share code, notes, and snippets.

@ikhlestov
Created September 11, 2017 20:08
Show Gist options
  • Save ikhlestov/7242c5630dbb645bdaeb532f177e22e2 to your computer and use it in GitHub Desktop.
Save ikhlestov/7242c5630dbb645bdaeb532f177e22e2 to your computer and use it in GitHub Desktop.
pytorch: from tensors to variables
import torch
from torch.autograd import Variable
# define an inputs
x_tensor = torch.randn(10, 20)
y_tensor = torch.randn(10, 5)
x = Variable(x_tensor, requires_grad=False)
y = Variable(y_tensor, requires_grad=False)
# define some weights
w = Variable(torch.randn(20, 5), requires_grad=True)
# get variable tensor
print(type(w.data)) # torch.FloatTensor
# get variable gradient
print(w.grad) # None
loss = torch.mean((y - x @ w) ** 2)
# calculate the gradients
loss.backward()
print(w.grad) # some gradients
# manually apply gradients
w.data -= 0.01 * w.grad.data
# manually zero gradients after update
w.grad.data.zero_()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment