Skip to content

Instantly share code, notes, and snippets.

@maedoc
Created March 11, 2020 08:11
Show Gist options
  • Save maedoc/feccf8b397c03947aa4c154c3b166ef8 to your computer and use it in GitHub Desktop.
Save maedoc/feccf8b397c03947aa4c154c3b166ef8 to your computer and use it in GitHub Desktop.
Simple autograd odeint model
import time
from autograd import numpy as np, grad
from autograd.scipy.integrate import odeint
from autograd.builtins import tuple
from autograd.scipy.stats import norm
from autograd.misc.optimizers import adam
def vdpo(y, t0, a, k, tau=3.0):
x, y = np.reshape(y, (2, -1))
return np.hstack([
tau * (x - x**3/3 + y),
(1.0/tau) * (a - x + k*np.mean(x))
])
def ode(p, t=np.r_[:3.0:10j]):
return odeint(vdpo, p['ic'], t, tuple((p['a'], p['k'])), rtol=0.001, atol=0.01)
def loss(p, true_y, gain):
l2_ic = norm.logpdf(p['ic'], 0.0, 2.0).sum()
l2_a = norm.logpdf(p['a'], 1.0, 1.0).sum()
l2_y = norm.logpdf(true_y, np.dot(ode(p), gain.T), 1.0).sum()
return -(l2_ic + l2_a + l2_y)
# set up test data
n = 128
true_p = {
'k': 0.1,
'a': np.random.randn(n),
'ic': np.random.randn(2*n)
}
gain = np.random.randn(64, 256)
true_y = np.dot(ode(true_p), gain.T)
# test how long gradient takes
tic = time.time()
gloss = grad(loss)
gloss(true_p, true_y, gain)
toc = time.time()
print('gradient took', toc - tic, 's')
# try optimizing
opt_p = {
'k': 0.2,
'a': np.ones((n, )),
'ic': np.ones((2*n, )),
}
def cb(x, i, g):
if i % 10 == 0:
print('iter', i, 'lp', -loss(x, true_y, gain))
for step_size in (0.2, 0.02):
print('step', step_size)
opt_p = adam(
lambda x, i: gloss(x, true_y, gain),
opt_p, callback=cb, step_size=step_size)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment