Skip to content

Instantly share code, notes, and snippets.

@jackhftang
Last active June 27, 2017 03:26
Show Gist options
  • Save jackhftang/4306586227405b3a90289082236f53ae to your computer and use it in GitHub Desktop.
Save jackhftang/4306586227405b3a90289082236f53ae to your computer and use it in GitHub Desktop.
An demonstration of autograd. Guess the linear recursive relation of fibnonssi number. i.e. Given f(0) = a0, f(1) = a1, f(x) = w0*f(x-1) + w1*f(x-2), find a0,a1,w that best approximate fibonacci sequence fib(i) where i = 5..9
import torch as th
from torch.autograd import Variable
## helpers
def var(t):
return Variable(t, requires_grad=True)
## training data
fibs = [1, 1]
for i in range(8):
fibs.append(fibs[i] + fibs[i - 1])
## variables
a0 = var(th.randn(1)) # optimal = 1
a1 = var(th.randn(1)) # optimal = 1
w = var(th.randn(2)) # optimal = [1,1]
## model
def model(x):
if x == 0: return a0
if x == 1: return a1
a = model(x - 1)
b = model(x - 2)
return w[0] * a + w[1] * b
def report(name):
print(f'==== {name} ====')
print(f'a0={a0.data[0]}')
print(f'a1={a1.data[0]}')
print(f'w0={w.data[0]}')
print(f'w1={w.data[1]}')
loss = [(model(i) - fibs[i]).abs().data[0] for i in range(len(fibs))]
print(f'loss={loss}')
## training
report('initial')
rate = 0.001
for i in range(10000):
print(f'running iteration {i}...', end='\r')
## approximate last 5 values
for j in range(len(fibs) - 5, len(fibs)):
## absolute diff. as loss
loss = (model(j) - fibs[j]).abs()
## calculate gradient
loss.backward()
## update variables
for v in [a0, a1, w]:
v.data -= rate * v.grad.data
v.grad.data.zero_()
report('final')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment