Created
March 21, 2024 01:57
-
-
Save edxmorgan/451f518db691530e594e84915bb2c73c to your computer and use it in GitHub Desktop.
a recursive least squares algorithm implemented for closed form in casadi
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from casadi import SX, inv, Function, sqrt, jacobian, vertcat | |
import numpy as np | |
import matplotlib.pyplot as plt | |
#true value of the parameters that will be estimated | |
initialPosition = 10 | |
acceleration = 5 | |
initialVelocity= -2 | |
#noise standard deviation | |
noiseStd = 1 | |
simulationTime = np.linspace(0,15,2000) | |
position=np.zeros(np.size(simulationTime)) | |
for i in np.arange(np.size(simulationTime)): | |
position[i] = initialPosition + initialVelocity*simulationTime[i] + (acceleration*simulationTime[i]**2)/2 | |
# add the measurement noise | |
positionNoisy = position+noiseStd*np.random.randn(np.size(simulationTime)) | |
#verify the position vector by plotting the results | |
plotStep = 300 | |
plt.plot(simulationTime[0:plotStep], position[0:plotStep], linewidth=4, label="Ideal Position") | |
plt.plot(simulationTime[0:plotStep], positionNoisy[0:plotStep], 'r', label="Observed Position") | |
plt.xlabel('time') | |
plt.ylabel('position') | |
plt.legend() | |
#recursive least squares implementation in discrete time | |
pos = SX.sym('pos', 1, 1) | |
vel = SX.sym('vel', 1, 1) | |
acc = SX.sym('acc', 1, 1) | |
sim_t = SX.sym('sim_t', 1, 1) | |
predict = pos + vel*sim_t + (acc*sim_t**2)/2 | |
x = vertcat(pos, vel, acc) | |
#observation matrix | |
H = jacobian(predict, x) | |
#measurements | |
yk = SX.sym('yk', 1, 1) | |
#terms | |
rqk = SX.sym('rqk', 1, 1) #process noise | |
Rk = [email protected] | |
Pk_1 = SX.sym('pk_1', 3, 3) | |
L_k = Rk + H@[email protected] | |
L_k_inv = inv(L_k) | |
#compute gain matrix | |
K = [email protected]@L_k_inv | |
#compute correction | |
err_k = yk - H@x | |
#new estimate | |
estimate = x + K@err_k | |
#propagate the estimation error covariance | |
Pk = (SX.eye(x.size1()) - K@H)@Pk_1@(SX.eye(x.size1()) - K@H).T + K@[email protected] | |
rls_predict = Function('RLS', [x, yk, sim_t, rqk, Pk_1], [estimate, err_k, Pk]) | |
x0 = np.random.randn(3,1) | |
P0 = 100*np.eye(3,3) | |
R = np.sqrt(0.5)*np.eye(1,1) | |
rqk = R | |
Pk = P0 | |
estimate = x0 | |
for i in np.arange(np.size(simulationTime)): | |
estimate, err_k, Pk = rls_predict(estimate, positionNoisy[i], simulationTime[i], rqk, Pk) | |
print(estimate) | |
# DM([9.97971, -1.99602, 4.99921]) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment