Created
April 11, 2018 13:31
-
-
Save darden1/457689baf609ac46d17b4e792bfd3ecf to your computer and use it in GitHub Desktop.
activation.py
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# -*- coding: utf-8 -*- | |
import numpy as np | |
class Activation(): | |
def __init__(self, name="sigmoid"): | |
self.name = name | |
def forward_prop(self, Z): | |
if self.name=="sigmoid": | |
return self.sigmoid(Z) | |
elif self.name=="tanh": | |
return self.tanh(Z) | |
elif self.name=="relu": | |
return self.relu(Z) | |
elif self.name=="linear": | |
return self.linear(Z) | |
def back_prop(self, Z, dPhi): | |
if self.name=="sigmoid": | |
return self.grad_sigmoid(Z) * dPhi | |
elif self.name=="tanh": | |
return self.grad_tanh(Z) * dPhi | |
elif self.name=="relu": | |
return self.grad_relu(Z) * dPhi | |
elif self.name=="linear": | |
return self.grad_linear(Z) * dPhi | |
def sigmoid(self, Z): | |
return 1.0/(1.0 + np.exp(-Z)) | |
def grad_sigmoid(self, Z): | |
return self.sigmoid(Z)*(1.0 - self.sigmoid(Z)) | |
def tanh(self, Z): | |
return np.tanh(Z) | |
def grad_tanh(self, Z): | |
return 1.0 - self.tanh(Z)**2 | |
def relu(self, Z): | |
return Z * (Z > 0) | |
def grad_relu(self, Z): | |
return 1.0 * (Z > 0) | |
def linear(self, Z): | |
return Z | |
def grad_linear(self, Z): | |
return np.ones(Z.shape) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment