Created
November 8, 2019 09:57
-
-
Save chrico-bu-uab/71c8fecb005662efcc3ecaf34d33ceca to your computer and use it in GitHub Desktop.
some python neural network stuff
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Adapted from Jovian Lin's code (hyperlink unavailable) | |
import numpy as np | |
from scipy.special import expit | |
from enum import Enum | |
from typing import Union | |
np.seterr(divide='raise', over='raise', under='warn', invalid='raise') | |
logmax = np.log(np.finfo(np.float64).max) | |
logtiny = np.log(np.finfo(np.float64).tiny) | |
def safeFunc(funcName, x, y=None): | |
if funcName=='softplus': | |
def softplus(v): | |
return np.log1p(np.exp(v)) - 1.0 | |
try: | |
return softplus(x) | |
except FloatingPointError: | |
z = x - 1.0 | |
z[(x < logmax) & (x > logtiny)] = softplus(x[(x < logmax) & (x > logtiny)]) | |
z[x < logtiny] = -1.0 | |
return z | |
if funcName=='logcosh': | |
try: | |
return np.log(np.cosh(x)) | |
except FloatingPointError: | |
z = np.empty(x.shape) | |
z[np.abs(x) > logmax] = np.abs(x[np.abs(x) > logmax]) - np.log(2) | |
z[np.abs(x) <= logmax] = np.log(np.cosh(x[np.abs(x) <= logmax])) | |
return z | |
class Calculus(Enum): | |
DERIVE = 1 | |
NONE = 0 | |
INTEGRATE = -1 | |
def ReLU_(c, x: Union[int, float], calculus: Calculus=Calculus.NONE): | |
if c==0.0: | |
return 0.0 | |
y = c * x | |
if calculus == Calculus.DERIVE: | |
return y > 0.0 | |
relu = np.maximum(y, 0.0) | |
if calculus == Calculus.INTEGRATE: | |
return 0.5 * np.square(relu) | |
return relu | |
def Sigmoid(c, x: Union[int, float], calculus: Calculus=Calculus.NONE): | |
if c==0.0: | |
return 0.0 | |
y = c * x | |
if calculus == Calculus.INTEGRATE: | |
return safeFunc('softplus', y) | |
sigm = expit(y) | |
if calculus == Calculus.DERIVE: | |
return sigm * (1.0 - sigm) | |
return sigm | |
def Tanh(c, x: Union[int, float], calculus: Calculus=Calculus.NONE): | |
if c==0.0: | |
return 0.0 | |
y = c * x | |
if calculus == Calculus.INTEGRATE: | |
return safeFunc('logcosh', y) | |
if calculus == Calculus.DERIVE: | |
return 1.0 - np.square(np.tanh(y)) | |
return np.tanh(y) | |
class ReLUp(Layer): | |
def __init__(self, cs): | |
self.cs = cs / np.linalg.norm(cs, ord=1) | |
self.ReLUness = self.cs[0] + self.cs[1] | |
def forward(self, input, is_train): | |
c1,c2,c3,c4,c5,c6 = self.cs | |
return np.sum([ReLU_(c1, input, Calculus.INTEGRATE), | |
ReLU_(c2, input), | |
Sigmoid(c3, input, Calculus.INTEGRATE), | |
Sigmoid(c4, input), | |
Tanh(c5, input, Calculus.INTEGRATE), | |
Tanh(c6, input) | |
], axis=0)/np.sum(self.cs) | |
def backward(self, input, grad_output): | |
c1,c2,c3,c4,c5,c6 = self.cs | |
return grad_output * np.sum([ReLU_(c1, input), | |
ReLU_(c2, input, Calculus.DERIVE), | |
Sigmoid(c3, input), | |
Sigmoid(c4, input, Calculus.DERIVE), | |
Tanh(c5, input), | |
Tanh(c6, input, Calculus.DERIVE) | |
], axis=0) / np.sum(self.cs) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment