Created
July 16, 2017 19:26
-
-
Save awni/675bedbda43e7aa3ad06dfd7f5ce7205 to your computer and use it in GitHub Desktop.
Variable ByteTensor Sum Bug
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import torch.autograd as autograd | |
import numpy as np | |
np.random.seed(11) | |
for size in range(1, 2000, 1): | |
a = np.random.randint(0, 2, size).astype(np.uint8) | |
av = autograd.Variable(torch.ByteTensor(a)) | |
numpy_val = np.sum(a) | |
torch_dat = torch.sum(av.data) | |
# when using the values wrapped in variables we get the wrong | |
# answer for sizes > 500 or so. | |
torch_var = torch.sum(av).data[0] | |
assert torch_dat == numpy_val | |
if torch_var != torch_dat: | |
print("Size {}: Incorrect: Expected {}, Got {}".format( | |
size, torch_dat, torch_var)) | |
else: | |
print("Size {}: Correct".format(size)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment