Created
July 7, 2017 01:18
-
-
Save c0nn3r/e149e992672d39c29943552601586cf6 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Traceback (most recent call last): | |
File "main.py", line 173, in <module> | |
train() | |
File "main.py", line 140, in train | |
output, hidden = model(data, hidden) | |
File "/home/conner/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py", line 225, in __call__ | |
result = self.forward(*input, **kwargs) | |
File "/home/conner/programming/learned_activations/experiments/word_language_model/model.py", line 57, in forward | |
output, hidden = self.rnn(emb, hidden) | |
File "/home/conner/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py", line 225, in __call__ | |
result = self.forward(*input, **kwargs) | |
File "/home/conner/anaconda3/lib/python3.6/site-packages/learned_activations/recurrent.py", line 206, in forward | |
cell=self.cells[layer], input_=input_, length=length, hx=hx) | |
File "/home/conner/anaconda3/lib/python3.6/site-packages/learned_activations/recurrent.py", line 180, in _forward_rnn | |
h_next, c_next = cell(input_=input_[time], hx=hx) | |
File "/home/conner/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py", line 225, in __call__ | |
result = self.forward(*input, **kwargs) | |
File "/home/conner/anaconda3/lib/python3.6/site-packages/learned_activations/recurrent.py", line 135, in forward | |
c_1 = self.sigmoid_1(f) * c_0 + self.sigmoid_1(i) * self.tahn_1(g) | |
File "/home/conner/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py", line 225, in __call__ | |
result = self.forward(*input, **kwargs) | |
File "/home/conner/anaconda3/lib/python3.6/site-packages/learned_activations/activations.py", line 44, in forward | |
x = self.normalize_1(x) | |
File "/home/conner/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py", line 225, in __call__ | |
result = self.forward(*input, **kwargs) | |
File "/home/conner/anaconda3/lib/python3.6/site-packages/learned_activations/activations.py", line 20, in forward | |
return self.gamma * (x - mean) / (std + self.eps) + self.beta | |
File "/home/conner/anaconda3/lib/python3.6/site-packages/torch/autograd/variable.py", line 793, in __sub__ | |
return self.sub(other) | |
File "/home/conner/anaconda3/lib/python3.6/site-packages/torch/autograd/variable.py", line 304, in sub | |
return self._sub(other, False) | |
File "/home/conner/anaconda3/lib/python3.6/site-packages/torch/autograd/variable.py", line 298, in _sub | |
return Sub.apply(self, other, inplace) | |
File "/home/conner/anaconda3/lib/python3.6/site-packages/torch/autograd/_functions/basic_ops.py", line 34, in forward | |
return a.sub(b) | |
RuntimeError: inconsistent tensor size, expected r_ [4000 x 25], t [4000 x 25] and src [4000] to have the same number of elements, but got 100000, 100000 and 4000 elements respectively at /home/conner/programming/pytorch/torch/lib/TH/generic/THTensorMath.c:887 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment