Skip to content

Instantly share code, notes, and snippets.

View jamesr66a's full-sized avatar

James Reed jamesr66a

View GitHub Profile
class TracedModule(torch.nn.Module):
def forward(self, x):
x = x.type(torch.float32)
return torch.floor(torch.sqrt(x) / 5.)
tm = torch.jit.trace(torch.rand(5))(TracedModule())
class ScriptModule(torch.jit.ScriptModule):
@torch.jit.script_method
def forward(self, x):
In file included from torch/csrc/torch.cpp:2:
/Users/jamesreed/onnx-fairseq/pytorch/torch/csrc/autograd/generated/VariableType.h:41:36: error: 'storageWithAllocator' marked 'override' but does not override any member functions
virtual std::unique_ptr<Storage> storageWithAllocator(int64_t size,at::Allocator* allocator) const override;
^
/Users/jamesreed/onnx-fairseq/pytorch/torch/csrc/autograd/generated/VariableType.h:31:8: warning: abstract class is marked 'final' [-Wabstract-final-class]
struct VariableType final : public at::Type {
^
/Users/jamesreed/onnx-fairseq/pytorch/torch/lib/tmp_install/include/ATen/Type.h:90:36: note: unimplemented pure virtual method 'storageWithAllocator' in 'VariableType'
virtual std::unique_ptr<Storage> storageWithAllocator(int64_t size,std::unique_ptr<Allocator> allocator) const = 0;
^
def test_call_python_fn_from_script_module(self):
def python_fn(x):
return torch.neg(x)
class ScriptMod(torch.jit.ScriptModule):
def __init__(self):
super(ScriptMod, self).__init__()
self.param = torch.nn.Parameter(torch.rand(4, 3))
@torch.jit.script_method
import torch
x = torch.randn(3, 3, requires_grad=True)
print(x)
min = float('NaN')
max = 0.0
y = torch.clamp(x, min, max)
print('y', y)
y.sum().backward()
======================================================================
ERROR: test_python_call_annotation (__main__.TestScript)
----------------------------------------------------------------------
Traceback (most recent call last):
File "test/test_jit.py", line 1932, in test_python_call_annotation
@torch.jit.script
File "/Users/jamesreed/onnx-fairseq/pytorch/torch/jit/__init__.py", line 373, in script
graph = _jit_script_compile(torch._C.TypedDef(ast, schema), rcb)
TypeError: _jit_script_compile(): incompatible function arguments. The followingargument types are supported:
1. (arg0: torch._C.TypedDef, arg1: Callable[[str], function]) -> torch._C.Graph
import torch
class ClampMod(torch.nn.Module):
def forward(self, x):
return x.clamp(max=3.14, min=4.13)
import io
f = io.BytesIO()
commit bd1b4dbe99e6040ded6c4ab2a59798536a2d6a45
Author: James Reed <jamesreed@fb.com>
Date: Mon Jul 23 13:59:34 2018 -0700
Fix zipfile export
diff --git a/test/expect/TestScript.test_script_module_file_export.expect b/test/expect/TestScript.test_script_module_file_export.expect
new file mode 100644
index 000000000..1d7ce966b
--- /dev/null
commit 96d6beb5d300da71c2e5eee0eec9a012480af8f0
Author: James Reed <jamesreed@fb.com>
Date: Mon Jul 23 15:47:43 2018 -0700
Bugfix for stateful module export
diff --git a/test/test_jit.py b/test/test_jit.py
index 9363c0954..39deb0f74 100644
--- a/test/test_jit.py
+++ b/test/test_jit.py
import torch
@torch.jit.script
def foo(x, y):
for i in range(100):
if torch.fmod(_to_tensor(i), 3) == 0:
y += x
else:
x += y
return x, y
diff --git a/torch/onnx/symbolic.py b/torch/onnx/symbolic.py
index 43fff76a1..ca5702837 100644
--- a/torch/onnx/symbolic.py
+++ b/torch/onnx/symbolic.py
@@ -710,6 +710,12 @@ def type_as(g, self, other):
return g.op("ATen", self, other, operator_s="type_as")
+@parse_args('v', 'is', 'v', 'v', 'f', 'i')
+def layer_norm(g, self, normalized_shape, weight, bias, eps, cudnn_enable):