Skip to content

Instantly share code, notes, and snippets.

@mrdrozdov
Created December 16, 2018 23:40
Show Gist options
  • Save mrdrozdov/2ad49a68749d3b7343046dc9f0f1d5fa to your computer and use it in GitHub Desktop.
Save mrdrozdov/2ad49a68749d3b7343046dc9f0f1d5fa to your computer and use it in GitHub Desktop.
torch-inplace.txt
In [1]: print('hello world')
hello world
In [2]: print('hello world!')
hello world!
In [3]: import torch
In [4]: print(torch.__version__)
0.4.1
In [5]: l = torch.nn.Linear(1, 1)
In [6]: chart = torch.FloatTensor(10, 5)
In [7]: x = torch.FloatTensor(10, 1)
In [8]: chart.fill_(0)
Out[8]:
tensor([[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.]])
In [9]: x.normal_()
Out[9]:
tensor([[ 0.5913],
[-1.0685],
[-0.1202],
[-0.9325],
[-0.8090],
[-1.5605],
[-0.1662],
[-0.2653],
[ 0.5720],
[ 0.0766]])
In [10]: chart[:, 0:1] = x
In [11]: chart
Out[11]:
tensor([[ 0.5913, 0.0000, 0.0000, 0.0000, 0.0000],
[-1.0685, 0.0000, 0.0000, 0.0000, 0.0000],
[-0.1202, 0.0000, 0.0000, 0.0000, 0.0000],
[-0.9325, 0.0000, 0.0000, 0.0000, 0.0000],
[-0.8090, 0.0000, 0.0000, 0.0000, 0.0000],
[-1.5605, 0.0000, 0.0000, 0.0000, 0.0000],
[-0.1662, 0.0000, 0.0000, 0.0000, 0.0000],
[-0.2653, 0.0000, 0.0000, 0.0000, 0.0000],
[ 0.5720, 0.0000, 0.0000, 0.0000, 0.0000],
[ 0.0766, 0.0000, 0.0000, 0.0000, 0.0000]])
In [12]: chart[:, 1:2] = l(chart[:, 0:1])
In [13]: chart
Out[13]:
tensor([[ 0.5913, 1.3746, 0.0000, 0.0000, 0.0000],
[-1.0685, -0.2462, 0.0000, 0.0000, 0.0000],
[-0.1202, 0.6798, 0.0000, 0.0000, 0.0000],
[-0.9325, -0.1135, 0.0000, 0.0000, 0.0000],
[-0.8090, 0.0071, 0.0000, 0.0000, 0.0000],
[-1.5605, -0.7267, 0.0000, 0.0000, 0.0000],
[-0.1662, 0.6349, 0.0000, 0.0000, 0.0000],
[-0.2653, 0.5381, 0.0000, 0.0000, 0.0000],
[ 0.5720, 1.3558, 0.0000, 0.0000, 0.0000],
[ 0.0766, 0.8720, 0.0000, 0.0000, 0.0000]], grad_fn=<CopySlices>)
In [14]: chart[:, 3:4] = l(chart[:, 1:2])
In [15]: chart
Out[15]:
tensor([[ 0.5913, 1.3746, 0.0000, 2.1396, 0.0000],
[-1.0685, -0.2462, 0.0000, 0.5568, 0.0000],
[-0.1202, 0.6798, 0.0000, 1.4611, 0.0000],
[-0.9325, -0.1135, 0.0000, 0.6864, 0.0000],
[-0.8090, 0.0071, 0.0000, 0.8042, 0.0000],
[-1.5605, -0.7267, 0.0000, 0.0875, 0.0000],
[-0.1662, 0.6349, 0.0000, 1.4172, 0.0000],
[-0.2653, 0.5381, 0.0000, 1.3226, 0.0000],
[ 0.5720, 1.3558, 0.0000, 2.1212, 0.0000],
[ 0.0766, 0.8720, 0.0000, 1.6487, 0.0000]], grad_fn=<CopySlices>)
In [16]: loss = chart.norm()
In [17]: loss.backward()
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-17-52a0569421b1> in <module>
----> 1 loss.backward()
~/anaconda2/envs/diora-1/lib/python3.6/site-packages/torch/tensor.py in backward(self, gradient, retain_graph, create_graph)
91 products. Defaults to ``False``.
92 """
---> 93 torch.autograd.backward(self, gradient, retain_graph, create_graph)
94
95 def register_hook(self, hook):
~/anaconda2/envs/diora-1/lib/python3.6/site-packages/torch/autograd/__init__.py in backward(tensors, grad_tensors, retain_graph, create_graph, grad_variables)
88 Variable._execution_engine.run_backward(
89 tensors, grad_tensors, retain_graph, create_graph,
---> 90 allow_unreachable=True) # allow_unreachable flag
91
92
RuntimeError: one of the variables needed for gradient computation has been modified by an inplace operation
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment