Last active
June 5, 2019 11:58
-
-
Save delta2323/9bbca950ee32c523c7aec2e02ad7f85a to your computer and use it in GitHub Desktop.
Double back propagation with Chainer (v3.0.0RC1), PyTorch (0.2.0_4), and TensorFlow (1.3.0-dev20170822)
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import chainer | |
from chainer import Variable | |
import numpy as np | |
def f(x): | |
y = x * x * x / 3 | |
gx, = chainer.grad([y], [x], enable_double_backprop=True) | |
z = gx - 2 * x + 1 # z = (x - 1) ** 2 | |
return z | |
x = Variable(np.random.uniform(size=())) | |
for _ in range(30): | |
x = Variable(x.data) | |
z = f(x) | |
z.backward() | |
x -= 0.1 * x.grad | |
print(x.data) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import chainer | |
from chainer.initializers import Uniform | |
from chainer.optimizers import SGD | |
class F(chainer.Link): | |
def __init__(self): | |
super(F, self).__init__() | |
with self.init_scope(): | |
self.x = chainer.Parameter(Uniform(), ()) | |
def __call__(self): | |
x = self.x | |
y = x * x * x / 3 | |
gx, = chainer.grad([y], [x], enable_double_backprop=True) | |
z = gx - 2 * x + 1 # z = (x - 1) ** 2 | |
return z | |
opt = SGD(0.1) | |
f = F() | |
opt.setup(f) | |
for _ in range(30): | |
opt.update(f) | |
print(f.x.data) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import torch.autograd import grad | |
from torch.autograd import Variable | |
def f(x): | |
y = x * x * x / 3 | |
gx, = grad([y], [x], create_graph=True) | |
z = gx - 2 * x + 1 | |
return z | |
x = Variable(torch.rand(1)) | |
for _ in range(30): | |
x = Variable(x.data, requires_grad=True) | |
z = f(x) | |
z.backward() | |
x = x - 0.1 * x.grad | |
print(x.data.numpy()) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
x = tf.Variable(tf.random_uniform(())) | |
y = x * x * x / 3 | |
gx, = tf.gradients([y], [x]) | |
z = gx - 2 * x + 1 | |
step = tf.train.GradientDescentOptimizer(0.1).minimize(z) | |
sess = tf.InteractiveSession() | |
tf.global_variables_initializer().run() | |
for _ in range(30): | |
step.run() | |
print(x.value().eval()) | |
sess.close() |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# manual gradient descent optimization with TF. | |
# Did not work with nightly build of 8/22/2017 | |
import tensorflow as tf | |
from tensorflow.python.eager import context | |
with context.eager_mode(): | |
x = tf.random_uniform(()) | |
for _ in range(30): | |
y = x * x | |
dx, = tf.gradients([y], [x]) | |
x -= 0.1 * dx | |
print(x) | |
""" | |
Traceback (most recent call last): | |
File "tf_test2.py", line 11, in <module> | |
dx, = tf.gradients([y], [x]) | |
File "/Users/oonokenta/.pyenv/versions/anaconda3-4.3.0/envs/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/gradients_impl.py", line 433, in gradients | |
grad_ys = _DefaultGradYs(grad_ys, ys, colocate_gradients_with_ops) | |
File "/Users/oonokenta/.pyenv/versions/anaconda3-4.3.0/envs/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/gradients_impl.py", line 233, in _DefaultGradYs | |
with _maybe_colocate_with(y.op, colocate_gradients_with_ops): | |
File "/Users/oonokenta/.pyenv/versions/anaconda3-4.3.0/envs/anaconda3/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 845, in op | |
raise NotImplementedError("op not supported for Eager Tensors.") | |
NotImplementedError: op not supported for Eager Tensors. | |
""" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment