Skip to content

Instantly share code, notes, and snippets.

@Muhammad-Yunus
Created April 15, 2020 04:44
Show Gist options
  • Save Muhammad-Yunus/d0f4d54f9a1d4275908cd04c51affb74 to your computer and use it in GitHub Desktop.
Save Muhammad-Yunus/d0f4d54f9a1d4275908cd04c51affb74 to your computer and use it in GitHub Desktop.
MLP ODE Timeseries
#define sigmoid activation function
def f_sigmoid(value):
return 1.0/(1.0+np.exp(-value))
def f_sigmoid_derivation(value):
return f_sigmoid(value)*(1-f_sigmoid(value))
#define linear activation function
def f_linear(value):
return value
def f_linear_derivation(value):
return np.ones(value.shape, np.float32)
#hidden layer activation function wrapper
def f_activation(value):
return f_sigmoid(value)
def f_activation_derivation(value):
return f_sigmoid_derivation(value)
#output layer activation function wrapper
def f_activation_output(value):
return f_linear(value)
def f_activation_derivation_output(value):
return f_linear_derivation(value)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment