Skip to content

Instantly share code, notes, and snippets.

@edxmorgan
Created February 28, 2025 01:54
Show Gist options
  • Save edxmorgan/a2025103ade2e61a6ab8dbd4908ee1ba to your computer and use it in GitHub Desktop.
Save edxmorgan/a2025103ade2e61a6ab8dbd4908ee1ba to your computer and use it in GitHub Desktop.
simple Pytensor Op for casadi model
import numpy as np
from pytensor.graph.op import Op
from pytensor.graph.basic import Apply
import casadi as cs
import pytensor.tensor as at
import pytensor.compile.function as pt_function
from pytensor import grad
class CasadiJacOp(Op):
"""
Helper PyTensor Op that wraps a CasADi Jacobian function.
"""
itypes = [at.dvector] # 1D double vector input.
otypes = [at.dvector] # 1D double vector output.
def __init__(self, casadi_jac):
self.casadi_jac = casadi_jac
def make_node(self, x):
x = at.as_tensor_variable(x)
return Apply(self, [x], [x.type()])
def perform(self, node, inputs, output_storage):
(x_val,) = inputs
x_val = np.asarray(x_val)
result = self.casadi_jac(x_val)[0]
output_storage[0][0] = result
class CasadiOpWithJac(Op):
"""
PyTensor Op that wraps a CasADi function along with its Jacobian.
"""
itypes = [at.dvector] # 1D double vector input.
otypes = [at.dvector] # 1D double vector output.
def __init__(self, casadi_func, casadi_jac):
self.casadi_func = casadi_func
self.casadi_jac = casadi_jac
def make_node(self, x):
x = at.as_tensor_variable(x)
return Apply(self, [x], [x.type()])
def perform(self, node, inputs, output_storage):
(x_val,) = inputs
x_val = np.asarray(x_val)
result = self.casadi_func(x_val)[0]
output_storage[0][0] = result
def grad(self, inputs, output_grads):
(x_val,) = inputs
(g_out,) = output_grads
# Use the helper op to compute the Jacobian at evaluation time.
jacobian_sym = CasadiJacOp(self.casadi_jac)(x_val)
return [jacobian_sym * g_out]
# Define the CasADi function: f(x) = x^3 and its Jacobian f'(x)=3*x^2.
x_sym = cs.MX.sym('x')
f_expr = x_sym**3
f_func = cs.Function('f', [x_sym], [f_expr])
jac_expr = cs.jacobian(f_expr, x_sym)
jac_func = cs.Function('jac_f', [x_sym], [jac_expr])
# Wrap the CasADi function in a custom PyTensor Op.
casadi_op = CasadiOpWithJac(f_func, jac_func)
x_pytensor = at.dvector('x')
y_pytensor = casadi_op(x_pytensor)
f_pytensor = pt_function.function([x_pytensor], y_pytensor)
# Build a gradient function (using a scalar cost via sum).
cost = at.sum(y_pytensor)
grad_y = grad(cost, x_pytensor)
f_grad = pt_function.function([x_pytensor], grad_y)
# Test the function and its gradient.
input_val = np.array([2.0])
print("Output f(x):", f_pytensor(input_val)) # Expected: [8.0] since 2^3 = 8.
print("Gradient f'(x):", f_grad(input_val)) # Expected: [12.0] since 3*2^2 = 12.
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment