Created
November 10, 2017 14:46
-
-
Save zou3519/0945d32cc77a5ad6f78c3ba131082101 to your computer and use it in GitHub Desktop.
allennlp test run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
----------------------------- Captured stderr call ----------------------------- | |
2017-11-09 22:23:15,113 - INFO - allennlp.common.checks - Pytorch version: 0.3.0b0+1f694e9 | |
_________________________ EntropyTest.test_masked_case _________________________ | |
self = <tests.training.metrics.entropy_test.EntropyTest testMethod=test_masked_case> | |
def test_masked_case(self): | |
metric = Entropy() | |
# This would have non-zero entropy without the mask. | |
logits = torch.Tensor([[1, 1, 1, 1], | |
[10000, -10000, -10000, -1000]]) | |
mask = torch.Tensor([0, 1]) | |
> metric(logits, mask) | |
tests/training/metrics/entropy_test.py:39: | |
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ | |
allennlp/training/metrics/entropy.py:34: in __call__ | |
log_probs = torch.nn.functional.log_softmax(logits).data | |
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ | |
input = | |
1 1 1 1 | |
10000 -10000 -10000 -1000 | |
[torch.FloatTensor of size 2x4] | |
dim = 1, _stacklevel = 3 | |
def log_softmax(input, dim=None, _stacklevel=3): | |
r"""Applies a softmax followed by a logarithm. | |
While mathematically equivalent to log(softmax(x)), doing these two | |
operations separately is slower, and numerically unstable. This function | |
uses an alternative formulation to compute the output and gradient correctly. | |
See :class:`~torch.nn.LogSoftmax` for more details. | |
Arguments: | |
input (Variable): input | |
dim (int): A dimension along which log_softmax will be computed. | |
""" | |
if dim is None: | |
dim = _get_softmax_dim('log_softmax', input.dim(), _stacklevel) | |
> return torch._C._nn.log_softmax(input, dim) | |
E RuntimeError: log_softmax(): argument 'input' (position 1) must be Variable, not torch.Fl | |
oatTensor | |
/opt/conda/envs/pytorch-py3.6/lib/python3.6/site-packages/torch/nn/functional.py:786: RuntimeErro | |
r |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment