Last active
December 9, 2016 13:41
-
-
Save travishsu/2245be204df144fd0f4e9432564f0f81 to your computer and use it in GitHub Desktop.
只有 linear transformation 的部分,如果要做到 linear regression 還需要 sum, square 的 node。
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
class Add: | |
def __init__(self): | |
pass | |
def forward(self, numlist): | |
self.numlist = numlist | |
return np.sum(numlist) | |
def backward(self, loss): | |
self.dnumlist = loss * np.ones(len(self.numlist)) | |
def update(self, stepsize): | |
pass | |
class Multiply: | |
def __init__(self): | |
pass | |
def forward(self, numlist): | |
self.numlist = numlist | |
return np.prod(numlist) | |
def backward(self, loss): | |
self.dnumlist = loss * np.prod(self.numlist) / self.numlist | |
def update(self, stepsize): | |
pass |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
class Dense: | |
def __init__(self, output_dim, input_dim): | |
self.W = np.random.randn(output_dim, input_dim) | |
self.bias = np.random.randn(output_dim,1) | |
def forward(self, inputs): | |
self.prev_in = inputs | |
return self.W.dot(inputs) + self.bias | |
def backward(self, loss): | |
self.dW = loss.dot(self.prev_in.T) | |
self.dbias = loss | |
return loss.T.dot(self.W).T | |
def update(self, stepsize): | |
self.W -= stepsize * self.dW | |
self.bias -= stepsize * self.dbias | |
class Sequential: | |
def __init__(self): | |
self.nodes = [] | |
def add(self, node): | |
self.nodes.append(node) | |
def evaluate_single(self, sample): | |
L = sample | |
for node in self.nodes: | |
L =node.forward(L) | |
return L | |
def fit_single(self, sample, stepsize): | |
L = sample | |
for node in self.nodes: | |
L = node.forward(L) | |
dfun = L | |
self.nodes.reverse() | |
for node in self.nodes: | |
dfun = node.backward(dfun) | |
node.update(stepsize) | |
self.nodes.reverse() | |
ij = np.random.randn(32,1) | |
n1 = Dense(5, 10) | |
n = Dense(10, 32) | |
# Try node's function | |
n1.forward(n.forward(ij)) | |
L = n1.forward(n.forward(ij)) | |
n.backward(n1.backward(L)) | |
# Try model's function | |
model = Sequential() | |
model.add(n) | |
model.add(n1) | |
for i in range(1000): | |
model.fit_single(ij, 0.0001) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment