Skip to content

Instantly share code, notes, and snippets.

@zou3519
Last active November 20, 2017 21:57
Show Gist options
  • Save zou3519/25a05760c55ecc9eda07b655c2a29788 to your computer and use it in GitHub Desktop.
Save zou3519/25a05760c55ecc9eda07b655c2a29788 to your computer and use it in GitHub Desktop.
import torch
from torch.autograd import Variable
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
v_in = Variable(torch.Tensor([0.1, 0.1]).view(2, 1), requires_grad=True)
def forward(v_in):
f1 = lambda x: x * 2
f2 = torch.nn.Linear(1, 1)
grad_out = Variable(torch.ones(2, 1))
gradient = torch.autograd.grad(outputs=f2(f1(v_in)), inputs=v_in,
grad_outputs=grad_out,
create_graph=True, retain_graph=True,
only_inputs=True)[0]
out = gradient.sum()
return out
# returns false
autograd.gradcheck(forward, [v_in])
forward(v_in).backward()
v_in.grad # these are non-existent?
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment