Skip to content

Instantly share code, notes, and snippets.

@abhishekpratapa
Created December 28, 2016 10:26
Show Gist options
  • Save abhishekpratapa/e8382322ff8a0ee0bf35d4fbc6afc2a1 to your computer and use it in GitHub Desktop.
Save abhishekpratapa/e8382322ff8a0ee0bf35d4fbc6afc2a1 to your computer and use it in GitHub Desktop.
# Code for Part 2 of the video series
# Special thanks to Siraj Raval for his excellent work in the community, inspired me to do this
# Special thanks to Andrej Karpathy and his blog-post "http://karpathy.github.io/neuralnets/"
# Special thanks to iamtrask and his blog-post "http://iamtrask.github.io/2015/07/12/basic-python-network/"
import math
import random
# A SINGLE INPUT OPERAND (-, exp)
# _____________
# | |
# | |
# value of x1 | | value of y1 ->
# ----------------| op |------------------
# deravitive x1 | | deravitive y1 <-
# | |
# |___________|
#
# A TWO INPUT OPERAND (-, +, *, /)
# _____________
# value of x1 | |
# ----------------| |
# deravitive x1 | | value of y1 ->
# | op |------------------
# value of x2 | | deravitive y1 <-
# ----------------| |
# deravitive x2 |___________|
#
# ... extrapolate for more gates
# exponential gate
# _____________
# | |
# | |
# x1 | | y1
# --------| exp |--------
# dx1 | | dy1
# | |
# |___________|
#
class exponentiate:
def __init__(self, x1):
# input
self.x1 = x1
# output
self.y1 = 0
# deravitive values
self.dx1 = 0
self.dy1 = 0
def activate(self):
self.y1 = math.exp(self.x1)
def backpropagate(self):
self.dx1 = math.exp(self.x1) * self.dy1
# Negation Gate
# _____________
# | |
# | |
# x1 | | y1
# --------| - |--------
# dx1 | | dy1
# | |
# |___________|
#
class negate:
def __init__(self, x1):
# input
self.x1 = x1
# output
self.y1 = 0
# deravitive values
self.dx1 = 0
self.dy1 = 0
def activate(self):
self.y1 = -1 * self.x1
def backpropagate(self):
self.dx1 = -1 * self.dy1
# Addition gate
# _____________
# x1 | |
# --------| |
# dx1 | | y1
# | + |--------
# x2 | | dy1
# --------| |
# dx2 |___________|
#
class add:
def __init__(self, x1, x2):
# input
self.x1 = x1
self.x2 = x2
# output
self.y1 = 0
# deravitive values
self.dx1 = 0
self.dx2 = 0
self.dy1 = 0
def activate(self):
self.y1 = self.x1 + self.x2
def backpropagate(self):
self.dx1 = self.dy1
self.dx2 = self.dy1
# Subtraction Gate
# _____________
# x1 | |
# --------| |
# dx1 | | y1
# | - |--------
# x2 | | dy1
# --------| |
# dx2 |___________|
#
class subtract:
def __init__(self, x1, x2):
# inputs
self.x1 = x1
self.x2 = x2
# outputs
self.y1 = 0
# deravitive values
self.dx1 = 0
self.dx2 = 0
self.dy1 = 0
def activate(self):
self.y1 = self.x1 - self.x2
def backpropagate(self):
self.dx1 = self.dy1
self.dx2 = -1 * self.dy1
# Multiplication Gate
# _____________
# x1 | |
# --------| |
# dx1 | | y1
# | * |--------
# x2 | | dy1
# --------| |
# dx2 |___________|
#
class multiply:
def __init__(self, x1, x2):
# inputs
self.x1 = x1
self.x2 = x2
# outputs
self.y1 = 0
# deravitive values
self.dx1 = 0
self.dx2 = 0
self.dy1 = 0
def activate(self):
self.y1 = self.x1 * self.x2
def backpropagate(self):
self.dx1 = self.x2 * self.dy1
self.dx2 = self.x1 * self.dy1
# Division Gate
# _____________
# x1 | |
# --------| |
# dx1 | | y1
# | / |--------
# x2 | | dy1
# --------| |
# dx2 |___________|
#
class divide:
def __init__(self, x1, x2):
# inputs
self.x1 = x1
self.x2 = x2
# outputs
self.y1 = 0
# deravitives
self.dx1 = 0
self.dx2 = 0
self.dy1 = 0
def activate(self):
self.y1 = self.x1 / self.x2
def backpropagate(self):
self.dx1 = self.dy1 / self.x2
self.dx2 = -1 * (self.x1 * self.dy1) / (self.x2 * self.x2)
# Sigmoid Gate
# _____________
# | |
# | |
# x1 | | y1
# --------| sig |--------
# dx1 | | dy1
# | |
# |___________|
#
class sigmoid:
def __init__(self, x1):
# input
self.x1 = x1
# output
self.y1 = 0
# deravitive
self.dx1 = 0
self.dy1 = 0
def activate(self):
self.temp_negate = negate(self.x1)
self.temp_negate.activate()
self.temp_exp = exponentiate(self.temp_negate.y1)
self.temp_exp.activate()
self.temp_add = add(1, self.temp_exp.y1)
self.temp_add.activate()
self.temp_divide = divide(1, self.temp_add.y1)
self.temp_divide.activate()
self.y1 = self.temp_divide.y1
def backpropagate(self):
self.temp_divide.dy1 = self.dy1
self.temp_divide.backpropagate()
self.temp_add.dy1 = self.temp_divide.dx2
self.temp_add.backpropagate()
self.temp_exp.dy1 = self.temp_add.dx2
self.temp_exp.backpropagate()
self.temp_negate.dy1 = self.temp_exp.dx1
self.temp_negate.backpropagate()
self.dx1 = self.temp_negate.dx1
# 3 way add gate
# _____________
# | |
# x1 | |
# --------| |
# dx1 | |
# x2 | | y1
# --------| + |--------
# dx2 | | dy1
# x3 | |
# --------| |
# dx3 | |
# |___________|
#
class add_3:
def __init__(self, x1, x2, x3):
# input values
self.x1 = x1
self.x2 = x2
self.x3 = x3
# outputs
self.y1 = 0
# deravitives
self.dx1 = 0
self.dx2 = 0
self.dx3 = 0
self.dy1 = 0
def activate(self):
self.add_1 = add(self.x1, self.x2)
self.add_1.activate()
self.add_2 = add(self.add_1.y1, self.x3)
self.add_2.activate()
self.y1 = self.add_2.y1
def backpropagate(self):
self.add_2.dy1 = self.dy1
self.add_2.backpropagate()
self.add_1.dy1 = self.add_2.dx1
self.add_1.backpropagate()
self.dx3 = self.add_2.dx2
self.dx2 = self.add_1.dx2
self.dx1 = self.add_1.dx1
# 4 way add gate
# _____________
# | |
# x1 | |
# --------| |
# dx1 | |
# x2 | |
# --------| |
# dx2 | | y1
# x3 | + |--------
# --------| | dy1
# dx3 | |
# x4 | |
# --------| |
# dx4 | |
# |___________|
#
class add_4:
def __init__(self, x1, x2, x3, x4):
# input values
self.x1 = x1
self.x2 = x2
self.x3 = x3
self.x4 = x4
# outputs
self.y1 = 0
# deravitives
self.dx1 = 0
self.dx2 = 0
self.dx3 = 0
self.dx4 = 0
self.dy1 = 0
def activate(self):
self.add_1 = add(self.x1, self.x2)
self.add_1.activate()
self.add_2 = add(self.add_1.y1, self.x3)
self.add_2.activate()
self.add_3 = add(self.add_2.y1, self.x4)
self.add_3.activate()
self.y1 = self.add_3.y1
def backpropagate(self):
self.add_3.dy1 = self.dy1
self.add_3.backpropagate()
self.add_2.dy1 = self.add_3.dx1
self.add_2.backpropagate()
self.add_1.dy1 = self.add_2.dx1
self.add_1.backpropagate()
self.dx4 = self.add_3.dx2
self.dx3 = self.add_2.dx2
self.dx2 = self.add_1.dx2
self.dx1 = self.add_1.dx1
#############################################################
# #
# This is the design for our first neural network #
# #
#############################################################
# Neural Network 1 Input layer, 0 Hidden layer, 1 Output layer, see the Power - Point
# Here is the implementation of the Neural Network
# debug
random.seed(1)
class Net:
def __init__(self):
# inputs
self.x1 = 0
self.x2 = 0
self.x3 = 0
#stepsize
self.step_size = 0.01
# outputs
self.expected = 0
# randomly seeded values
self.a = random.uniform(-1, 1)
self.b = random.uniform(-1, 1)
self.c = random.uniform(-1, 1)
# deravitive for the synapses
self.da = 0
self.db = 0
self.dc = 0
# output
self.out = 0
# deravitive input
self.dout = 0
def activate(self):
# multiply here for a, b, and c. Look at the PDF/Powerpoint
self.multiply_x1 = multiply(self.x1, self.a)
self.multiply_x1.activate()
self.multiply_x2 = multiply(self.x2, self.b)
self.multiply_x2.activate()
self.multiply_x3 = multiply(self.x3, self.c)
self.multiply_x3.activate()
# add here
self.add_block = add_3(self.multiply_x1.y1, self.multiply_x2.y1, self.multiply_x3.y1)
self.add_block.activate()
# sigmoid here
self.sigmoid_block = sigmoid(self.add_block.y1)
self.sigmoid_block.activate()
self.out = self.sigmoid_block.y1
def backpropagate(self):
# backpropagate sigmoid
self.sigmoid_block.dy1 = self.dout
self.sigmoid_block.backpropagate()
# backpropagate add block
self.add_block.dy1 = self.sigmoid_block.dx1
self.add_block.backpropagate()
# backpropagate multiply block 1
self.multiply_x1.dy1 = self.add_block.dx1
self.multiply_x1.backpropagate()
# backpropagate multiply block 2
self.multiply_x2.dy1 = self.add_block.dx2
self.multiply_x2.backpropagate()
# backpropagate multiply block 2
self.multiply_x3.dy1 = self.add_block.dx3
self.multiply_x3.backpropagate()
# derivitives at a, b and c
self.da = self.multiply_x1.dx2
self.db = self.multiply_x2.dx2
self.dc = self.multiply_x3.dx2
def train_once(self):
# activate
self.activate()
# calculate error
self.dout = self.expected - self.out
# backpropagate
self.backpropagate()
# correct synapse values
self.a = self.a + self.step_size * self.da
self.b = self.b + self.step_size * self.db
self.c = self.c + self.step_size * self.dc
##############################################################
# #
# This is the design for our second neural network #
# #
##############################################################
# Neural Network 1 Input layer, 1 Hidden layer, 1 Output layer, see the Power - Point
# Here is the implementation of the Neural Network
#debug
random.seed(1)
class Net_2:
def __init__(self):
# inputs
self.x1 = 0
self.x2 = 0
self.x3 = 0
#stepsize
self.step_size = 1
# outputs
self.expected = 0
#######################
# Hidden Layer 3 -> 4 #
#######################
# node one hidden layer
self.a1 = random.uniform(-1, 1)
self.b1 = random.uniform(-1, 1)
self.c1 = random.uniform(-1, 1)
self.da1 = 0
self.db1 = 0
self.dc1 = 0
# node two hidden layer
self.a2 = random.uniform(-1, 1)
self.b2 = random.uniform(-1, 1)
self.c2 = random.uniform(-1, 1)
self.da2 = 0
self.db2 = 0
self.dc2 = 0
# node three hidden layer
self.a3 = random.uniform(-1, 1)
self.b3 = random.uniform(-1, 1)
self.c3 = random.uniform(-1, 1)
self.da3 = 0
self.db3 = 0
self.dc3 = 0
# node four hidden layer
self.a4 = random.uniform(-1, 1)
self.b4 = random.uniform(-1, 1)
self.c4 = random.uniform(-1, 1)
self.da4 = 0
self.db4 = 0
self.dc4 = 0
# Output Layer 4 -> 1
# node 1_2
self.a1_2 = random.uniform(-1, 1)
self.b1_2 = random.uniform(-1, 1)
self.c1_2 = random.uniform(-1, 1)
self.d1_2 = random.uniform(-1, 1)
self.da1_2 = 0
self.db1_2 = 0
self.dc1_2 = 0
self.dd1_2 = 0
##########
# Output #
##########
# output of the neural network
self.out = 0
# deravitive input
self.dout = 0
def activate(self):
# node one
self.multiply_xa_1 = multiply(self.x1, self.a1)
self.multiply_xa_1.activate()
self.multiply_xb_1 = multiply(self.x2, self.b1)
self.multiply_xb_1.activate()
self.multiply_xc_1 = multiply(self.x3, self.c1)
self.multiply_xc_1.activate()
self.add_block_n_1 = add_3(self.multiply_xa_1.y1, self.multiply_xb_1.y1, self.multiply_xc_1.y1)
self.add_block_n_1.activate()
self.sigmoid_block_n_1 = sigmoid(self.add_block_n_1.y1)
self.sigmoid_block_n_1.activate()
# node two
self.multiply_xa_2 = multiply(self.x1, self.a2)
self.multiply_xa_2.activate()
self.multiply_xb_2 = multiply(self.x2, self.b2)
self.multiply_xb_2.activate()
self.multiply_xc_2 = multiply(self.x3, self.c2)
self.multiply_xc_2.activate()
self.add_block_n_2 = add_3(self.multiply_xa_2.y1, self.multiply_xb_2.y1, self.multiply_xc_2.y1)
self.add_block_n_2.activate()
self.sigmoid_block_n_2 = sigmoid(self.add_block_n_2.y1)
self.sigmoid_block_n_2.activate()
# node three
self.multiply_xa_3 = multiply(self.x1, self.a3)
self.multiply_xa_3.activate()
self.multiply_xb_3 = multiply(self.x2, self.b3)
self.multiply_xb_3.activate()
self.multiply_xc_3 = multiply(self.x3, self.c3)
self.multiply_xc_3.activate()
self.add_block_n_3 = add_3(self.multiply_xa_3.y1, self.multiply_xb_3.y1, self.multiply_xc_3.y1)
self.add_block_n_3.activate()
self.sigmoid_block_n_3 = sigmoid(self.add_block_n_3.y1)
self.sigmoid_block_n_3.activate()
# node four
self.multiply_xa_4 = multiply(self.x1, self.a4)
self.multiply_xa_4.activate()
self.multiply_xb_4 = multiply(self.x2, self.b4)
self.multiply_xb_4.activate()
self.multiply_xc_4 = multiply(self.x3, self.c4)
self.multiply_xc_4.activate()
self.add_block_n_4 = add_3(self.multiply_xa_4.y1, self.multiply_xb_4.y1, self.multiply_xc_4.y1)
self.add_block_n_4.activate()
self.sigmoid_block_n_4 = sigmoid(self.add_block_n_4.y1)
self.sigmoid_block_n_4.activate()
#######################
# second hidden layer #
#######################
self.multiply_xa_1_2 = multiply(self.sigmoid_block_n_1.y1, self.a1_2)
self.multiply_xa_1_2.activate()
self.multiply_xb_1_2 = multiply(self.sigmoid_block_n_2.y1, self.b1_2)
self.multiply_xb_1_2.activate()
self.multiply_xc_1_2 = multiply(self.sigmoid_block_n_3.y1, self.c1_2)
self.multiply_xc_1_2.activate()
self.multiply_xd_1_2 = multiply(self.sigmoid_block_n_4.y1, self.d1_2)
self.multiply_xd_1_2.activate()
self.add_block_n_1_1 = add_4(self.multiply_xa_1_2.y1, self.multiply_xb_1_2.y1, self.multiply_xc_1_2.y1, self.multiply_xd_1_2.y1)
self.add_block_n_1_1.activate()
self.sigmoid_block_n_1_1 = sigmoid(self.add_block_n_1_1.y1)
self.sigmoid_block_n_1_1.activate()
self.out = self.sigmoid_block_n_1_1.y1
def backpropagate(self):
self.sigmoid_block_n_1_1.dy1 = self.dout
self.sigmoid_block_n_1_1.backpropagate()
self.add_block_n_1_1.dy1 = self.sigmoid_block_n_1_1.dx1
self.add_block_n_1_1.backpropagate()
self.multiply_xa_1_2.dy1 = self.add_block_n_1_1.dx1
self.multiply_xb_1_2.dy1 = self.add_block_n_1_1.dx2
self.multiply_xc_1_2.dy1 = self.add_block_n_1_1.dx3
self.multiply_xd_1_2.dy1 = self.add_block_n_1_1.dx4
self.multiply_xa_1_2.backpropagate()
self.multiply_xb_1_2.backpropagate()
self.multiply_xc_1_2.backpropagate()
self.multiply_xd_1_2.backpropagate()
self.da1_2 = self.multiply_xa_1_2.dx2
self.db1_2 = self.multiply_xb_1_2.dx2
self.dc1_2 = self.multiply_xc_1_2.dx2
self.dd1_2 = self.multiply_xd_1_2.dx2
self.sigmoid_block_n_1.dy1 = self.multiply_xa_1_2.dx1
self.sigmoid_block_n_2.dy1 = self.multiply_xb_1_2.dx1
self.sigmoid_block_n_3.dy1 = self.multiply_xc_1_2.dx1
self.sigmoid_block_n_4.dy1 = self.multiply_xd_1_2.dx1
self.sigmoid_block_n_1.backpropagate()
self.sigmoid_block_n_2.backpropagate()
self.sigmoid_block_n_3.backpropagate()
self.sigmoid_block_n_4.backpropagate()
self.add_block_n_1.dy1 = self.sigmoid_block_n_1.dx1
self.add_block_n_2.dy1 = self.sigmoid_block_n_2.dx1
self.add_block_n_3.dy1 = self.sigmoid_block_n_3.dx1
self.add_block_n_4.dy1 = self.sigmoid_block_n_4.dx1
self.add_block_n_1.backpropagate()
self.add_block_n_2.backpropagate()
self.add_block_n_3.backpropagate()
self.add_block_n_4.backpropagate()
self.multiply_xa_1.dy1 = self.add_block_n_1.dx1
self.multiply_xb_1.dy1 = self.add_block_n_1.dx2
self.multiply_xc_1.dy1 = self.add_block_n_1.dx3
self.multiply_xa_2.dy1 = self.add_block_n_2.dx1
self.multiply_xb_2.dy1 = self.add_block_n_2.dx2
self.multiply_xc_2.dy1 = self.add_block_n_2.dx3
self.multiply_xa_3.dy1 = self.add_block_n_3.dx1
self.multiply_xb_3.dy1 = self.add_block_n_3.dx2
self.multiply_xc_3.dy1 = self.add_block_n_3.dx3
self.multiply_xa_4.dy1 = self.add_block_n_4.dx1
self.multiply_xb_4.dy1 = self.add_block_n_4.dx2
self.multiply_xc_4.dy1 = self.add_block_n_4.dx3
self.multiply_xa_1.backpropagate()
self.multiply_xb_1.backpropagate()
self.multiply_xc_1.backpropagate()
self.multiply_xa_2.backpropagate()
self.multiply_xb_2.backpropagate()
self.multiply_xc_2.backpropagate()
self.multiply_xa_3.backpropagate()
self.multiply_xb_3.backpropagate()
self.multiply_xc_3.backpropagate()
self.multiply_xa_4.backpropagate()
self.multiply_xb_4.backpropagate()
self.multiply_xc_4.backpropagate()
self.da1 = self.multiply_xa_1.dx2
self.db1 = self.multiply_xb_1.dx2
self.dc1 = self.multiply_xc_1.dx2
self.da2 = self.multiply_xa_2.dx2
self.db2 = self.multiply_xb_2.dx2
self.dc2 = self.multiply_xc_2.dx2
self.da3 = self.multiply_xa_3.dx2
self.db3 = self.multiply_xb_3.dx2
self.dc3 = self.multiply_xc_3.dx2
self.da4 = self.multiply_xa_4.dx2
self.db4 = self.multiply_xb_4.dx2
self.dc4 = self.multiply_xc_4.dx2
def train_once(self):
# activate
self.activate()
# calculate error
self.dout = self.expected - self.out
# backpropagate
self.backpropagate()
# layer 1
self.a1 = self.a1 + self.step_size * self.da1
self.b1 = self.b1 + self.step_size * self.db1
self.c1 = self.c1 + self.step_size * self.dc1
self.a2 = self.a2 + self.step_size * self.da2
self.b2 = self.b2 + self.step_size * self.db2
self.c2 = self.c2 + self.step_size * self.dc2
self.a3 = self.a3 + self.step_size * self.da3
self.b3 = self.b3 + self.step_size * self.db3
self.c3 = self.c3 + self.step_size * self.dc3
self.a4 = self.a4 + self.step_size * self.da4
self.b4 = self.b4 + self.step_size * self.db4
self.c4 = self.c4 + self.step_size * self.dc4
self.a1_2 = self.a1_2 + self.step_size * self.da1_2
self.b1_2 = self.b1_2 + self.step_size * self.db1_2
self.c1_2 = self.c1_2 + self.step_size * self.dc1_2
self.d1_2 = self.d1_2 + self.step_size * self.dd1_2
#############################################################
# #
# Here is the second problem the PDF/powerpoint solved #
# #
#############################################################
# TEST SET
# [0, 0, 1] => 0
# [0, 1, 1] => 1
# [1, 0, 1] => 1
# [1, 1, 1] => 0
# Create Neural Network
# initialize with first dataset, we will do jank batch training on the neural net
# there is the training
neural_net_2 = Net_2()
neural_net_2.x1 = 0
neural_net_2.x2 = 0
neural_net_2.x3 = 1
neural_net_2.expected = 0
neural_net_2.activate()
'''
'''
###############################################################################
# You don't need to worry about this part its just for displaying information #
########################### START: DISPLAY INFO ###############################
###############################################################################
# Print Synapses Before Training
print("Synapse Weights Before Training")
print("Layer One 3 -> 4")
print(" ")
print("a1 = " + str(neural_net_2.a1))
print("b1 = " + str(neural_net_2.b1))
print("c1 = " + str(neural_net_2.c1))
print(" ")
print("a2 = " + str(neural_net_2.a2))
print("b2 = " + str(neural_net_2.b2))
print("c2 = " + str(neural_net_2.c2))
print(" ")
print("a3 = " + str(neural_net_2.a3))
print("b3 = " + str(neural_net_2.b3))
print("c3 = " + str(neural_net_2.c3))
print(" ")
print("a4 = " + str(neural_net_2.a4))
print("b4 = " + str(neural_net_2.b4))
print("c4 = " + str(neural_net_2.c4))
print(" ")
print(" ")
print("Layer Two 4 -> 1")
print(" ")
print("a1_2 = " + str(neural_net_2.a1_2))
print("b1_2 = " + str(neural_net_2.b1_2))
print("c1_2 = " + str(neural_net_2.c1_2))
print("d1_2 = " + str(neural_net_2.d1_2))
print(" ")
# Before training display
print("Before Training")
print(" ")
print("[0, 0, 1] => 0")
neural_net_2.x1 = 0
neural_net_2.x2 = 0
neural_net_2.x3 = 1
neural_net_2.expected = 0
neural_net_2.activate()
print("> " + str(neural_net_2.out))
print(" ")
print("[0, 1, 1] => 1")
neural_net_2.x1 = 0
neural_net_2.x2 = 1
neural_net_2.x3 = 1
neural_net_2.expected = 1
neural_net_2.activate()
print("> " + str(neural_net_2.out))
print(" ")
print("[1, 0, 1] => 1")
neural_net_2.x1 = 1
neural_net_2.x2 = 0
neural_net_2.x3 = 1
neural_net_2.expected = 1
neural_net_2.activate()
print("> " + str(neural_net_2.out))
print(" ")
print("[1, 1, 1] => 0")
neural_net_2.x1 = 1
neural_net_2.x2 = 1
neural_net_2.x3 = 1
neural_net_2.expected = 0
neural_net_2.activate()
print("> " + str(neural_net_2.out))
print(" ")
print("As you see the values do not Match")
##############################################################################
############################ END: DISPLAY INFO ###############################
##############################################################################
# Actual Training using the set of data
for x in range(100000):
# [0, 0, 1] => 0
neural_net_2.x1 = 0
neural_net_2.x2 = 0
neural_net_2.x3 = 1
neural_net_2.expected = 0
neural_net_2.train_once()
# [1, 1, 1] => 1
neural_net_2.x1 = 0
neural_net_2.x2 = 1
neural_net_2.x3 = 1
neural_net_2.expected = 1
neural_net_2.train_once()
# [1, 0, 1] => 1
neural_net_2.x1 = 1
neural_net_2.x2 = 0
neural_net_2.x3 = 1
neural_net_2.expected = 1
neural_net_2.train_once()
# [0, 1, 1] => 0
neural_net_2.x1 = 1
neural_net_2.x2 = 1
neural_net_2.x3 = 1
neural_net_2.expected = 0
neural_net_2.train_once()
if x % 1000 == 0:
print(str(x/1000))
###############################################################################
# You don't need to worry about this part its just for displaying information #
########################### START: DISPLAY INFO ###############################
###############################################################################
# Print Synapses After Training
print("Synapse Weights After Training")
print("Layer One 3 -> 4")
print(" ")
print("a1 = " + str(neural_net_2.a1))
print("b1 = " + str(neural_net_2.b1))
print("c1 = " + str(neural_net_2.c1))
print(" ")
print("a2 = " + str(neural_net_2.a2))
print("b2 = " + str(neural_net_2.b2))
print("c2 = " + str(neural_net_2.c2))
print(" ")
print("a3 = " + str(neural_net_2.a3))
print("b3 = " + str(neural_net_2.b3))
print("c3 = " + str(neural_net_2.c3))
print(" ")
print("a4 = " + str(neural_net_2.a4))
print("b4 = " + str(neural_net_2.b4))
print("c4 = " + str(neural_net_2.c4))
print(" ")
print(" ")
print("Layer Two 4 -> 1")
print(" ")
print("a1_2 = " + str(neural_net_2.a1_2))
print("b1_2 = " + str(neural_net_2.b1_2))
print("c1_2 = " + str(neural_net_2.c1_2))
print("d1_2 = " + str(neural_net_2.d1_2))
print(" ")
# After training display
print("After Training")
print(" ")
print("[0, 0, 1] => 0")
neural_net_2.x1 = 0
neural_net_2.x2 = 0
neural_net_2.x3 = 1
neural_net_2.expected = 0
neural_net_2.activate()
print("> " + str(neural_net_2.out))
print(" ")
print("[0, 1, 1] => 1")
neural_net_2.x1 = 0
neural_net_2.x2 = 1
neural_net_2.x3 = 1
neural_net_2.expected = 1
neural_net_2.activate()
print("> " + str(neural_net_2.out))
print(" ")
print("[1, 0, 1] => 1")
neural_net_2.x1 = 1
neural_net_2.x2 = 0
neural_net_2.x3 = 1
neural_net_2.expected = 1
neural_net_2.activate()
print("> " + str(neural_net_2.out))
print(" ")
print("[1, 1, 1] => 0")
neural_net_2.x1 = 1
neural_net_2.x2 = 1
neural_net_2.x3 = 1
neural_net_2.expected = 0
neural_net_2.activate()
print("> " + str(neural_net_2.out))
print(" ")
print("The values should be very close to the actual values")
##############################################################################
############################ END: DISPLAY INFO ###############################
##############################################################################
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment