Skip to content

Instantly share code, notes, and snippets.

@zhenghaoz
Created November 13, 2017 07:33
Show Gist options
  • Save zhenghaoz/f9ef67a61f3f6169aec378e2bac20146 to your computer and use it in GitHub Desktop.
Save zhenghaoz/f9ef67a61f3f6169aec378e2bac20146 to your computer and use it in GitHub Desktop.
import pandas as pd
import numpy as np
# Model
class SingleHiddenBP:
input_number = 0
hidden_number = 0
output_number = 0
w = v = np.array([])
learning_rate = 0
learning_round = 0
batch_learning = False
def __init__(self, input_number, hidden_number, output_number,
learning_rate=1, learning_round=100, batch_learning=False):
self.input_number = input_number
self.hidden_number = hidden_number
self.output_number = output_number
self.w = np.random.rand(hidden_number+1, output_number)
self.v = np.random.rand(input_number+1, hidden_number)
self.learning_rate = learning_rate
self.learning_round = learning_round
self.batch_learning = batch_learning
def fit(self, X, Y):
for time in range(0, self.learning_round):
batch_delta_w = batch_delta_v = 0
for i in range(0, len(X)):
x, b, temp_y = self.__predict__(X[i])
# Training hidden layer
g = temp_y*(1-temp_y)*(Y[i]-temp_y)
delta_w = np.outer(b, g) * self.learning_rate
if self.batch_learning:
batch_delta_w += delta_w
else:
self.w += delta_w
# Training hidden layer
e = b*(1-b)*np.matmul(self.w, g)
e = e[:-1]
delta_v = np.outer(x, e) * self.learning_rate
if self.batch_learning:
batch_delta_v += delta_v
else:
self.v += delta_v
if self.batch_learning:
self.w += batch_delta_w
self.v += batch_delta_v
def predict(self, x):
return self.__predict__(x)[2]
def __predict__(self, x):
# Input layer
x = np.append(x, [1])
# Hidden layer
b = self.__sigmoid__(np.matmul(x, self.v))
b = np.append(b, [1])
# Output layer
y = self.__sigmoid__(np.matmul(b, self.w))
return x, b, y
@staticmethod
def __sigmoid__(x):
return 1/(1+np.exp(-x))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment